Looks like its the srgb parameter of RenderingServer.texture_get_rd_texture() that crashes the game – but only if srgb=true and the texture is used as an image uniform. Perhaps there’s a GitHub issue that can be formulated here. I would expect a warning or an error message of some kind, not a straight-up crash when running the game.
Related code examples
This is fine:
RenderingServer.texture_get_rd_texture(RenderingServer.viewport_get_texture(viewport), true)
# [...]
var u_vp: RDUniform = RDUniform.new()
u_vp.uniform_type = RenderingDevice.UNIFORM_TYPE_SAMPLER_WITH_TEXTURE
u_vp.binding = 1
u_vp.add_id(nearest_sampler)
u_vp.add_id(vp_tex)
This is not fine:
RenderingServer.texture_get_rd_texture(RenderingServer.viewport_get_texture(viewport), true)
# [...]
var u_vp: RDUniform = RDUniform.new()
u_vp.uniform_type = RenderingDevice.UNIFORM_TYPE_IMAGE
u_vp.binding = 1
u_vp.add_id(vp_tex)
Before changing my compositor effect to use an image (image2D) instead of a texture sampler (sampler2D), I wanted to see if I could fix the sampling artefact seen in my previous example (the weird aliasing). I tried setting the sampler’s RDSamplerState.unnormalized_uvw to true which, supposedly, should change the expected UV range from [0,1] to [0, texture_size] – essentially mimicing the same range expected when sampling image2Ds using imageLoad(). My thinking was that if the sampler were to use this range, it would be impossible for it to offset the sampling given that the UV would always be an integer-based vector (ivec2). From my testing though, it doesn’t seem to affect the sampler’s UV range whatsoever. Nothing changed.
As such, I decided to change the uniform to an image2D. The image is in sRGB color space though which, from my testing, is not the expected color space in the middle of the rendering pipeline. The image is, therefore, converted to linear color space to produce the expected results.
Original color space (left) and the image after srgb-to-linear conversion (right)
Shader conversion code
// ==================== COLOR CONVERSION HELPER FUNCTIONS ====================
// Source: https://physicallybased.info/tools/
float lin_to_gamma(float value)
{
if (value < 0.0031308)
return value * 12.92;
else
return 1.055 * pow(value, 0.41666) - 0.055;
}
float gamma_to_lin(float value)
{
if (value < 0.04045)
return value * 0.0773993808;
else
return pow(value * 0.9478672986 + 0.0521327014, 2.4);
}
vec3 l2g(vec3 value)
{
return vec3(
lin_to_gamma(value.r),
lin_to_gamma(value.g),
lin_to_gamma(value.b)
);
}
vec3 g2l(vec3 value)
{
return vec3(
gamma_to_lin(value.r),
gamma_to_lin(value.g),
gamma_to_lin(value.b)
);
}
// ===========================================================================
I appreciate the help @normalized. It would just be nice if you didn’t have to make me jump through all these hoops to get it. I just wanted to know how to work with viewport textures in the compositor, and perhaps an explanation as to how it all works – not a constant insistence to go back to basics.
This has been long – but I feel like I am finally confident enough with the way viewports and their textures work in the context of the compositor. I still can’t get it to work in the editor (it still crashes), but that’s okay. The complete code used can be seen below.
CompositorEffectViewport.gd
extends CompositorEffect
class_name ViewportEffect
var shader_file = preload("res://shaders/viewport_shader.glsl")
var rd: RenderingDevice
var shader: RID
var pipeline: RID
var vp_tex: RID
var linear_sampler: RID
var nearest_sampler: RID
var viewport: RID
var vp_cam: RID
@export_range(0.0, 1.0, 0.01) var cutoff_point: float = 0.5
var root: Viewport:
get:
if (Engine.is_editor_hint()):
return EditorInterface.get_editor_viewport_3d()
else:
var tree: SceneTree = Engine.get_main_loop() as SceneTree
print("Main Loop is tree: %s" % (Engine.get_main_loop() is SceneTree))
print("Root is valid: %s" % (tree.root != null))
print("Root viewport is valid: %s" % (tree.root.get_viewport() != null))
return tree.root.get_viewport()
func _init():
effect_callback_type = CompositorEffect.EFFECT_CALLBACK_TYPE_POST_SKY
rd = RenderingServer.get_rendering_device()
shader = rd.shader_create_from_spirv(shader_file.get_spirv())
pipeline = rd.compute_pipeline_create(shader)
# Create samplers
var sampler_state = RDSamplerState.new()
sampler_state.min_filter = RenderingDevice.SAMPLER_FILTER_LINEAR
sampler_state.mag_filter = RenderingDevice.SAMPLER_FILTER_LINEAR
linear_sampler = rd.sampler_create(sampler_state)
sampler_state.min_filter = RenderingDevice.SAMPLER_FILTER_NEAREST
sampler_state.mag_filter = RenderingDevice.SAMPLER_FILTER_NEAREST
nearest_sampler = rd.sampler_create(sampler_state)
# Defer viewport setup to make sure the scene tree has been created and initialized.
call_deferred("setup_viewport")
func setup_viewport():
vp_cam = RenderingServer.camera_create()
viewport = RenderingServer.viewport_create()
# Configure camera
var cam_main = root.get_camera_3d()
RenderingServer.camera_set_transform(vp_cam, cam_main.global_transform)
RenderingServer.camera_set_perspective(vp_cam, cam_main.fov, cam_main.near, cam_main.far)
RenderingServer.camera_set_cull_mask(vp_cam, 1 << 1)
# Configure viewport
RenderingServer.viewport_attach_camera(viewport, vp_cam)
RenderingServer.viewport_set_update_mode(viewport, RenderingServer.VIEWPORT_UPDATE_ALWAYS)
RenderingServer.viewport_set_clear_mode(viewport, RenderingServer.VIEWPORT_CLEAR_ALWAYS)
# Failing to set this crashes the game at runtime.
RenderingServer.viewport_set_scenario(viewport, root.world_3d.scenario)
# Setting this crashes the editor (i.e. when this script is a @tool script).
RenderingServer.viewport_set_active(viewport, true)
#RenderingServer.viewport_set_use_hdr_2d(viewport, true)
#RenderingServer.viewport_set_parent_viewport(viewport, root)
update_viewport_size(root.size)
func update_viewport_size(size: Vector2i):
print("Updating viewport size (new: %s)" % size)
#mutex.lock()
RenderingServer.viewport_set_size(viewport, size.x, size.y)
#mutex.unlock()
if (vp_tex.is_valid()):
RenderingServer.free_rid(vp_tex)
# NOTE: 'srgb' parameter can not be true when using the texture as an image uniform.
# Make the linear conversion in the shader (if this is used as an image2D).
vp_tex = RenderingServer.texture_get_rd_texture(RenderingServer.viewport_get_texture(viewport))
func _render_callback(callback_type, render_data):
# get frame buffers
var render_scene_buffers: RenderSceneBuffersRD = render_data.get_render_scene_buffers()
var tf = rd.texture_get_format(vp_tex)
#print("Viewport texture format: %s" % (tf.format))
#print("Viewport texture size: %s" % Vector2i(tf.width, tf.height))
var intern_size = render_scene_buffers.get_internal_size()
if (tf.width != intern_size.x || tf.height != intern_size.y):
update_viewport_size(intern_size)
# main render image uniform
var u_main: RDUniform = RDUniform.new()
u_main.uniform_type = RenderingDevice.UNIFORM_TYPE_IMAGE
u_main.binding = 0
u_main.add_id(render_scene_buffers.get_color_layer(0))
# vp render image uniform
var u_vp: RDUniform = RDUniform.new()
u_vp.uniform_type = RenderingDevice.UNIFORM_TYPE_IMAGE
u_vp.binding = 1
#u_vp.add_id(nearest_sampler)
u_vp.add_id(vp_tex)
# uniform set
var uniform_set = UniformSetCacheRD.get_cache(shader, 0, [u_main, u_vp])
# calculate number of workgroups
var image_size = render_scene_buffers.get_internal_size()
var wgroups_count_x = (image_size.x - 1) / 8 + 1
var wgroups_count_y = (image_size.y - 1) / 8 + 1
# size uniform (for split screen mixing and invocation "crop")
var push_constant: PackedFloat32Array = PackedFloat32Array()
push_constant.push_back(image_size.x)
push_constant.push_back(image_size.y)
push_constant.push_back(cutoff_point)
push_constant.push_back(0.0)
# execute shader
var compute_list:= rd.compute_list_begin()
rd.compute_list_bind_compute_pipeline(compute_list, pipeline)
rd.compute_list_bind_uniform_set(compute_list, uniform_set, 0)
rd.compute_list_set_push_constant(compute_list, push_constant.to_byte_array(), push_constant.size() * 4)
rd.compute_list_dispatch(compute_list, wgroups_count_x, wgroups_count_y, 1)
rd.compute_list_end()
viewport_shader.glsl
#[compute]
#version 460
layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in;
layout(rgba16f, set = 0, binding = 0) uniform image2D screen;
layout(rgba8, set = 0, binding = 1) uniform image2D viewport;
layout(push_constant) uniform Params {
vec2 image_size;
vec2 offset; // ...and padding (only x-value is used)
} params;
// ==================== COLOR CONVERSION HELPER FUNCTIONS ====================
// Source: https://physicallybased.info/tools/
float lin_to_gamma(float value)
{
if (value < 0.0031308)
return value * 12.92;
else
return 1.055 * pow(value, 0.41666) - 0.055;
}
float gamma_to_lin(float value)
{
if (value < 0.04045)
return value * 0.0773993808;
else
return pow(value * 0.9478672986 + 0.0521327014, 2.4);
}
vec3 l2g(vec3 value)
{
return vec3(
lin_to_gamma(value.r),
lin_to_gamma(value.g),
lin_to_gamma(value.b)
);
}
vec3 g2l(vec3 value)
{
return vec3(
gamma_to_lin(value.r),
gamma_to_lin(value.g),
gamma_to_lin(value.b)
);
}
// ===========================================================================
void main()
{
ivec2 uv = ivec2(gl_GlobalInvocationID.xy);
vec2 uv_norm = uv / params.image_size;
if(uv.x >= params.image_size.x || uv.y >= params.image_size.y){
return;
}
vec4 color = imageLoad(screen, uv);
// IN-BETWEEN CODE
vec4 vp = imageLoad(viewport, uv);
vp.rgb = g2l(vp.rgb);
// vec4 vp = texture(viewport, uv_norm);
// IMAGE/VIEWPORT Blending
float t = step(params.offset.x * params.image_size.x, uv.x);
vec4 newColor = vp * t + color * (1.0 - t);
// Threshold line
const float lineWidth = 10.0;
float lineMask = clamp(abs((params.offset.x * params.image_size.x) - uv.x) / lineWidth, 0.0, 1.0);
imageStore(screen, uv, newColor * lineMask);
}
EffectControls.gd
extends Camera3D
func _process(delta: float) -> void:
if (Input.is_mouse_button_pressed(MOUSE_BUTTON_LEFT)):
var norm_mouse_pos = get_viewport().get_mouse_position() / Vector2(get_viewport().size)
if (compositor.compositor_effects[0] != null):
compositor.compositor_effects[0].cutoff_point = norm_mouse_pos.x