How would I detect lighting on fragments?

Godot Version

4.2.2 stable

Question

Simple question, how would I detect lighting on fragments.

Apparently very tough to answer. I am currently making a modified version of the pixel shader from this video. Here it is for reference ( note that some stuff may be intentionally broken/unused since I’m still trying to test things out).

This shader is on a post processing quad.

shader_type spatial;

#include “res://addons/ShaderFunction-Extras/Color/color_adjustment.gdshaderinc”

uniform sampler2D screen_texture: source_color, hint_screen_texture, filter_nearest;
uniform sampler2D depth_texture: source_color, hint_depth_texture, filter_nearest;
uniform sampler2D normal_texture: source_color, hint_normal_roughness_texture, filter_nearest;

uniform int light_texture_size = 24;

uniform sampler2D light_position_texture: source_color, filter_nearest;
//contains position: rgb
uniform sampler2D light_data_texture: source_color, filter_nearest;
// contains energy, attenuation, and range: rgb
uniform sampler2D light_data2_texture: source_color, filter_nearest;
// contains steps, , and : rgb
uniform sampler2D light_color_texture: source_color, filter_nearest;
// contains color: rgb → hsv

uniform float depth_threshold : hint_range(0,1) = 0.1;
uniform float normal_threshold : hint_range(0, 1) = 0.8;

uniform vec3 test_color: source_color;
varying vec3 local_vertex;

uniform float darken_mult: hint_range(0.0, 1.0, 0.1) = .3;
uniform float lightness_mult: hint_range(1.0, 2.0, 0.1) = 1.3;
uniform float dark_saturation_mult: hint_range(1.0, 2.0, 0.1) = 1.3;
uniform float light_saturation_mult: hint_range(0.0, 1.0, 0.1) = 0.7;
uniform float value_offset: hint_range(0.0, 1.0, 0.01) = .12; // to fix flickering

uniform vec3 normal_edge_bias = vec3(1,1,1);

vec3 WorldPosFromDepth(float depth, vec2 uv, mat4 inv_proj, mat4 inv_view) {
float z = depth * 2.0 - 1.0;

vec4 clipSpacePosition = vec4(uv * 2.0 - 1.0, z, 1.0);
vec4 viewSpacePosition = inv_proj * clipSpacePosition;

// Perspective division
viewSpacePosition /= viewSpacePosition.w;

vec4 worldSpacePosition = inv_view * viewSpacePosition;

return worldSpacePosition.xyz;

}
//thx Andon M. Coleman

vec3 hue_shift_rgb(vec3 color, vec3 frag_pos){

float norm_post_brightness = 0.0;
float s;
float sat_mult;
float val_mult;

for (int iy = 0; iy < 24; iy++){

  for (int ix = 0; ix < 24; ix++){
  	// uv starts in bottom left so we flip y axis using 1-iy
  	// texture pixels start in the top left
  	
  	vec3 light_color = texture(light_color_texture, vec2(float(ix)/24.0, float(1-iy)/24.0)).rgb;
  	
  	if (light_color == vec3(0,0,0)){
  		iy = 23;
  		ix = 23;
  		break;
  	}
  	
  	vec3 light_pos = texture(light_position_texture, vec2(float(ix)/24.0, float(1-iy)/24.0)).rgb;
  	vec3 light_data = texture(light_data_texture, vec2(float(ix)/24.0, float(1-iy)/24.0)).rgb;
  	
  	float energy = light_data.x;
  	float attenuation = light_data.y;
  	float range = light_data.z;
  	
  	float dist_to_light = distance(frag_pos, light_pos);
  	s = dist_to_light/range;
  	
  	if (dist_to_light > range){
  		iy = 23;
  		ix = 23;
  		break;
  	}
  	
  	vec3 light_data2 = texture(light_data2_texture, vec2(float(ix)/24.0, float(iy)/24.0)).rgb;
  	
  	
  	float steps = light_data2.x;
  	float decay = light_data2.y;
  	
  	
  	
  	float brightness = energy * ((pow((1.0-pow(s, 2)), 2)) / (1.0 + (decay * pow(s, 2))));
  	// thx lisyarus
  	
  	float norm_brightness = (brightness / energy);
  	
  	norm_post_brightness = round(norm_brightness * steps) / steps;
  	
  	sat_mult += 1.0 - norm_post_brightness;
  	val_mult += 1.0 + norm_post_brightness;
  }

}

vec3 hsv_color = rgb_to_hsv(color);

vec3 mixed_color = vec3(hsv_color.x, hsv_color.y * sat_mult, hsv_color.z * val_mult);

vec3 new_color = hsv_to_rgb(mixed_color);
return new_color;
}

float get_depth(vec2 screen_uv, mat4 inv_proj_mat){
float depth = texture(depth_texture, screen_uv).x;
vec3 ndc = vec3(screen_uv * 2.0 - 1.0, depth);
vec4 view = inv_proj_mat * vec4(ndc, 1.0);
view.xyz /= view.w;
return -view.z;
}

void vertex(){
POSITION = vec4(VERTEX, 1.0);
local_vertex = VERTEX;
}

void fragment() {

vec3 normal = texture(normal_texture, SCREEN_UV).rgb * 2.0 -1.0;

float depth = get_depth(SCREEN_UV, INV_PROJECTION_MATRIX);

vec2 texel_size = 1.0/ VIEWPORT_SIZE.xy;

vec2 uvs[8];
uvs[0] = vec2(SCREEN_UV.x, SCREEN_UV.y + texel_size.y);
uvs[1] = vec2(SCREEN_UV.x, SCREEN_UV.y - texel_size.y);
uvs[2] = vec2(SCREEN_UV.x + texel_size.x, SCREEN_UV.y );
uvs[3] = vec2(SCREEN_UV.x - texel_size.x, SCREEN_UV.y );
uvs[4] = vec2(SCREEN_UV + texel_size);
uvs[5] = vec2(SCREEN_UV - texel_size);
uvs[6] = vec2(SCREEN_UV + vec2(-texel_size.x, texel_size.y) );
uvs[7] = vec2(SCREEN_UV + vec2(texel_size.x, -texel_size.y) );

float depth_diff = 0.0;
float nearest_depth = depth;
vec2 nearest_uv = SCREEN_UV;
float normal_sum = 0.0;

for (int i = 0; i < 8; i++){
float d = get_depth(uvs[i], INV_PROJECTION_MATRIX);
depth_diff += depth -d;

  if (d < nearest_depth){
  	nearest_depth = d;
  	nearest_uv = uvs[i];
  }
  
  vec3 n = texture(normal_texture, uvs[i]).xyz * 2.0 - 1.0;
  vec3 normal_diff = normal-n;
  
  // edge pixels yield to normal closest to this direction
  
  float normal_bias_diff = dot(normal_diff, normal_edge_bias);
  float normal_indicator = smoothstep(-.01, .01, normal_bias_diff);
  
  normal_sum += dot(normal_diff, normal_diff) * normal_indicator;

}

float depth_edge = step(depth_threshold, depth_diff);

float indicator = sqrt(normal_sum);
float normal_edge = step(normal_threshold, indicator);

vec3 screen_color = texture(screen_texture, SCREEN_UV).rgb;
vec3 altered_screen_color = texture(screen_texture, nearest_uv).rgb;
vec3 edge_mix;

mat3 view_to_world_normal_mat = mat3(
INV_VIEW_MATRIX[0].xyz,
INV_VIEW_MATRIX[1].xyz,
INV_VIEW_MATRIX[2].xyz
);

vec3 world_pos = WorldPosFromDepth(texture(depth_texture, SCREEN_UV).x, SCREEN_UV, INV_PROJECTION_MATRIX, INV_VIEW_MATRIX);

vec3 edge_color = hue_shift_rgb(altered_screen_color, world_pos);

vec3 processed_color = (mix(screen_color, edge_color, (depth_edge > 0.0 ? depth_edge : normal_edge)));

ALBEDO = processed_color;

// thx Crigz Vs Game Dev

}

void light(){
DIFFUSE_LIGHT = vec3(1.0);

}

My goal is to put shine on edges where there is light, and darken edges where there is not. This would be achieved by computing brightness of light compared to the distance of the light in the hue_shift_rgb() function ( doesn’t actually hue shift yet).

I tried making an extra viewport that viewed the world in unshaded. This was the closest I came to making this work, as I compared the current viewport color to an unshaded viewport color. However, this would be a “global” way to do it. For example, if I had a tiny light in a dark room, I would want the rim of objects to be highlighted. However, the value of the color of these edges would be lower than the value of the unshaded edges, causing no highlights. I also suspect that rendering the game twice would not be very performant.

I tried getting light data from light() and pumping it back into fragment. I attempted passing data from light() into the next pass ( which I found out is not possible iirc).

I attempted passing arrays of data into the shader, I didn’t know how many lights I would have, so I decided on a max 512. I made four 512 size arrays and my shader crashed before I even did anything with them ( the arrays would store position, data, more data, and color).

I tried to see if I could get all light nodes and their properties (energy, range, etc). I couldn’t find a way to directly get a list of nodes into the shader.

I then thought I found light at the end of the tunnel by putting light data into textures which I would read the rgb data from.

Then I ran into the issue of getting the world coordinates of the current fragment.

I finally decided that maybe I should ask the forum for help.

Now I am here.

I am not sure where exactly my ignorance lies but this problem has been driving me crazy for a week now. My approach itself may be flawed, so any help is welcome.

In the video you posted he gets light direction from a uniform that is probably updated via a script.

He adds this uniform at 8:20 mark.

If you don’t like that approach, lighting is done after the fragment shader. So you may need to pass some information to the light function rather then pull light information before the light step.

He calculates the normal edges and lightens, or darkens, them base on light direction and maybe also with depth.

I would think, whenever the math has an edge detected you can signify that fragment with a varying Boolean or magnitude. Then in the light function use that info to adjust the color of the edge fragment without have to calculate everything again.

I saw that he added a light direction, however this was a global light direction, if I had omni lights, it would not look correct. I tried your idea but ran into the problem of getting the world position of a fragment to decide whether said fragment is in shadow or in light.

if the fragment position is local space just multiply it by the model matrix to get it into world space.

MODEL_MATRIX

Fragments are in screen space.

1 Like

Oh, okay, just put the world coordinate also into a varing variable

That’s the issue, how do I get the world coordinate of the fragment?

Oh okay, right. Well i think the projection from view space to screen space is destructive and cannot be undone with with math.

I guess if you have a depth texture. you sort of have the component to undo the projection.

yea maybe this could work?

I’ve seen this before. I’ll try it again with your approach in the light() function.