Use dual-source blending for rendering the sky (#17672)

# Objective

Since previously we only had the alpha channel available, we stored the
mean of the transmittance in the aerial view lut, resulting in a grayer
fog than should be expected.

## Solution

- Calculate transmittance to scene in `render_sky` with two samples from
the transmittance lut
- use dual-source blending to effectively have per-component alpha
blending
This commit is contained in:
Emerson Coskey 2025-02-10 15:53:53 -08:00 committed by GitHub
parent 69db29efb9
commit 83370e0a25
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 83 additions and 38 deletions

View File

@ -55,12 +55,9 @@ fn main(@builtin(global_invocation_id) idx: vec3<u32>) {
break;
}
}
// We only have one channel to store transmittance, so we store the mean
let mean_transmittance = (throughput.r + throughput.g + throughput.b) / 3.0;
// Store in log space to allow linear interpolation of exponential values between slices
let log_transmittance = -log(max(mean_transmittance, 1e-6)); // Avoid log(0)
let log_inscattering = log(max(total_inscattering, vec3(1e-6)));
textureStore(aerial_view_lut_out, vec3(vec2<u32>(idx.xy), slice_i), vec4(log_inscattering, log_transmittance));
textureStore(aerial_view_lut_out, vec3(vec2<u32>(idx.xy), slice_i), vec4(log_inscattering, 0.0));
}
}

View File

@ -118,6 +118,27 @@ fn sample_transmittance_lut(r: f32, mu: f32) -> vec3<f32> {
return textureSampleLevel(transmittance_lut, transmittance_lut_sampler, uv, 0.0).rgb;
}
// NOTICE: This function is copyrighted by Eric Bruneton and INRIA, and falls
// under the license reproduced in bruneton_functions.wgsl (variant of MIT license)
//
// FIXME: this function should be in bruneton_functions.wgsl, but because naga_oil doesn't
// support cyclic imports it's stuck here
fn sample_transmittance_lut_segment(r: f32, mu: f32, t: f32) -> vec3<f32> {
let r_t = get_local_r(r, mu, t);
let mu_t = clamp((r * mu + t) / r_t, -1.0, 1.0);
if ray_intersects_ground(r, mu) {
return min(
sample_transmittance_lut(r_t, -mu_t) / sample_transmittance_lut(r, -mu),
vec3(1.0)
);
} else {
return min(
sample_transmittance_lut(r, mu) / sample_transmittance_lut(r_t, mu_t), vec3(1.0)
);
}
}
fn sample_multiscattering_lut(r: f32, mu: f32) -> vec3<f32> {
let uv = multiscattering_lut_r_mu_to_uv(r, mu);
return textureSampleLevel(multiscattering_lut, multiscattering_lut_sampler, uv, 0.0).rgb;
@ -130,23 +151,31 @@ fn sample_sky_view_lut(r: f32, ray_dir_as: vec3<f32>) -> vec3<f32> {
return textureSampleLevel(sky_view_lut, sky_view_lut_sampler, uv, 0.0).rgb;
}
fn ndc_to_camera_dist(ndc: vec3<f32>) -> f32 {
let view_pos = view.view_from_clip * vec4(ndc, 1.0);
let t = length(view_pos.xyz / view_pos.w) * settings.scene_units_to_m;
return t;
}
// RGB channels: total inscattered light along the camera ray to the current sample.
// A channel: average transmittance across all wavelengths to the current sample.
fn sample_aerial_view_lut(uv: vec2<f32>, depth: f32) -> vec4<f32> {
let view_pos = view.view_from_clip * vec4(uv_to_ndc(uv), depth, 1.0);
let dist = length(view_pos.xyz / view_pos.w) * settings.scene_units_to_m;
fn sample_aerial_view_lut(uv: vec2<f32>, t: f32) -> vec3<f32> {
let t_max = settings.aerial_view_lut_max_distance;
let num_slices = f32(settings.aerial_view_lut_size.z);
// Offset the W coordinate by -0.5 over the max distance in order to
// align sampling position with slice boundaries, since each texel
// stores the integral over its entire slice
let uvw = vec3(uv, saturate(dist / t_max - 0.5 / num_slices));
// Each texel stores the value of the scattering integral over the whole slice,
// which requires us to offset the w coordinate by half a slice. For
// example, if we wanted the value of the integral at the boundary between slices,
// we'd need to sample at the center of the previous slice, and vice-versa for
// sampling in the center of a slice.
let uvw = vec3(uv, saturate(t / t_max - 0.5 / num_slices));
let sample = textureSampleLevel(aerial_view_lut, aerial_view_lut_sampler, uvw, 0.0);
// Treat the first slice specially since there is 0 scattering at the camera
let delta_slice = t_max / num_slices;
let fade = saturate(dist / delta_slice);
// Since sampling anywhere between w=0 and w=t_slice will clamp to the first slice,
// we need to do a linear step over the first slice towards zero at the camera's
// position to recover the correct integral value.
let t_slice = t_max / num_slices;
let fade = saturate(t / t_slice);
// Recover the values from log space
return exp(sample) * fade;
return exp(sample.rgb) * fade;
}
// PHASE FUNCTIONS

View File

@ -46,6 +46,8 @@ use bevy_reflect::Reflect;
use bevy_render::{
extract_component::UniformComponentPlugin,
render_resource::{DownlevelFlags, ShaderType, SpecializedRenderPipelines},
renderer::RenderDevice,
settings::WgpuFeatures,
};
use bevy_render::{
extract_component::{ExtractComponent, ExtractComponentPlugin},
@ -157,6 +159,15 @@ impl Plugin for AtmospherePlugin {
};
let render_adapter = render_app.world().resource::<RenderAdapter>();
let render_device = render_app.world().resource::<RenderDevice>();
if !render_device
.features()
.contains(WgpuFeatures::DUAL_SOURCE_BLENDING)
{
warn!("AtmospherePlugin not loaded. GPU lacks support for dual-source blending.");
return;
}
if !render_adapter
.get_downlevel_capabilities()

View File

@ -2,10 +2,10 @@
types::{Atmosphere, AtmosphereSettings},
bindings::{atmosphere, view, atmosphere_transforms},
functions::{
sample_transmittance_lut, sample_sky_view_lut,
direction_world_to_atmosphere, uv_to_ray_direction,
uv_to_ndc, sample_aerial_view_lut, view_radius,
sample_sun_illuminance,
sample_transmittance_lut, sample_transmittance_lut_segment,
sample_sky_view_lut, direction_world_to_atmosphere,
uv_to_ray_direction, uv_to_ndc, sample_aerial_view_lut,
view_radius, sample_sun_illuminance, ndc_to_camera_dist
},
};
#import bevy_render::view::View;
@ -18,22 +18,30 @@
@group(0) @binding(13) var depth_texture: texture_depth_2d;
#endif
@fragment
fn main(in: FullscreenVertexOutput) -> @location(0) vec4<f32> {
let depth = textureLoad(depth_texture, vec2<i32>(in.position.xy), 0);
if depth == 0.0 {
let ray_dir_ws = uv_to_ray_direction(in.uv);
let ray_dir_as = direction_world_to_atmosphere(ray_dir_ws.xyz);
struct RenderSkyOutput {
@location(0) inscattering: vec4<f32>,
@location(0) @second_blend_source transmittance: vec4<f32>,
}
@fragment
fn main(in: FullscreenVertexOutput) -> RenderSkyOutput {
let depth = textureLoad(depth_texture, vec2<i32>(in.position.xy), 0);
let ray_dir_ws = uv_to_ray_direction(in.uv);
let r = view_radius();
let mu = ray_dir_ws.y;
let transmittance = sample_transmittance_lut(r, mu);
let inscattering = sample_sky_view_lut(r, ray_dir_as);
let sun_illuminance = sample_sun_illuminance(ray_dir_ws.xyz, transmittance);
return vec4(inscattering + sun_illuminance, (transmittance.r + transmittance.g + transmittance.b) / 3.0);
var transmittance: vec3<f32>;
var inscattering: vec3<f32>;
if depth == 0.0 {
let ray_dir_as = direction_world_to_atmosphere(ray_dir_ws.xyz);
transmittance = sample_transmittance_lut(r, mu);
inscattering += sample_sky_view_lut(r, ray_dir_as);
inscattering += sample_sun_illuminance(ray_dir_ws.xyz, transmittance);
} else {
return sample_aerial_view_lut(in.uv, depth);
let t = ndc_to_camera_dist(vec3(uv_to_ndc(in.uv), depth));
inscattering = sample_aerial_view_lut(in.uv, t);
transmittance = sample_transmittance_lut_segment(r, mu, t);
}
return RenderSkyOutput(vec4(inscattering, 0.0), vec4(transmittance, 1.0));
}

View File

@ -367,7 +367,7 @@ impl SpecializedRenderPipeline for RenderSkyBindGroupLayouts {
blend: Some(BlendState {
color: BlendComponent {
src_factor: BlendFactor::One,
dst_factor: BlendFactor::SrcAlpha,
dst_factor: BlendFactor::Src1,
operation: BlendOperation::Add,
},
alpha: BlendComponent {