bevy/crates/bevy_pbr/src/render/pbr.wgsl
Marco Buono 1a96d820fd Add Distance and Atmospheric Fog support (#6412)
<img width="1392" alt="image" src="https://user-images.githubusercontent.com/418473/203873533-44c029af-13b7-4740-8ea3-af96bd5867c9.png">
<img width="1392" alt="image" src="https://user-images.githubusercontent.com/418473/203873549-36be7a23-b341-42a2-8a9f-ceea8ac7a2b8.png">


# Objective

- Add support for the “classic” distance fog effect, as well as a more advanced atmospheric fog effect.

## Solution

This PR:

- Introduces a new `FogSettings` component that controls distance fog per-camera. 
- Adds support for three widely used “traditional” fog falloff modes: `Linear`, `Exponential` and `ExponentialSquared`, as well as a more advanced `Atmospheric` fog;
- Adds support for directional light influence over fog color;
- Extracts fog via `ExtractComponent`, then uses a prepare system that sets up a new dynamic uniform struct (`Fog`), similar to other mesh view types;
- Renders fog in PBR material shader, as a final adjustment to the `output_color`, after PBR is computed (but before tone mapping);
- Adds a new `StandardMaterial` flag to enable fog; (`fog_enabled`)
- Adds convenience methods for easier artistic control when creating non-linear fog types;
- Adds documentation around fog.

---

## Changelog

### Added

- Added support for distance-based fog effects for PBR materials, controllable per-camera via the new `FogSettings` component;
- Added `FogFalloff` enum for selecting between three widely used “traditional” fog falloff modes: `Linear`, `Exponential` and `ExponentialSquared`, as well as a more advanced `Atmospheric` fog;
2023-01-29 15:28:56 +00:00

121 lines
4.4 KiB
WebGPU Shading Language

#import bevy_pbr::mesh_view_bindings
#import bevy_pbr::pbr_bindings
#import bevy_pbr::mesh_bindings
#import bevy_pbr::utils
#import bevy_pbr::clustered_forward
#import bevy_pbr::lighting
#import bevy_pbr::shadows
#import bevy_pbr::fog
#import bevy_pbr::pbr_functions
struct FragmentInput {
@builtin(front_facing) is_front: bool,
@builtin(position) frag_coord: vec4<f32>,
#import bevy_pbr::mesh_vertex_output
};
@fragment
fn fragment(in: FragmentInput) -> @location(0) vec4<f32> {
var output_color: vec4<f32> = material.base_color;
#ifdef VERTEX_COLORS
output_color = output_color * in.color;
#endif
#ifdef VERTEX_UVS
if ((material.flags & STANDARD_MATERIAL_FLAGS_BASE_COLOR_TEXTURE_BIT) != 0u) {
output_color = output_color * textureSample(base_color_texture, base_color_sampler, in.uv);
}
#endif
// NOTE: Unlit bit not set means == 0 is true, so the true case is if lit
if ((material.flags & STANDARD_MATERIAL_FLAGS_UNLIT_BIT) == 0u) {
// Prepare a 'processed' StandardMaterial by sampling all textures to resolve
// the material members
var pbr_input: PbrInput;
pbr_input.material.base_color = output_color;
pbr_input.material.reflectance = material.reflectance;
pbr_input.material.flags = material.flags;
pbr_input.material.alpha_cutoff = material.alpha_cutoff;
// TODO use .a for exposure compensation in HDR
var emissive: vec4<f32> = material.emissive;
#ifdef VERTEX_UVS
if ((material.flags & STANDARD_MATERIAL_FLAGS_EMISSIVE_TEXTURE_BIT) != 0u) {
emissive = vec4<f32>(emissive.rgb * textureSample(emissive_texture, emissive_sampler, in.uv).rgb, 1.0);
}
#endif
pbr_input.material.emissive = emissive;
var metallic: f32 = material.metallic;
var perceptual_roughness: f32 = material.perceptual_roughness;
#ifdef VERTEX_UVS
if ((material.flags & STANDARD_MATERIAL_FLAGS_METALLIC_ROUGHNESS_TEXTURE_BIT) != 0u) {
let metallic_roughness = textureSample(metallic_roughness_texture, metallic_roughness_sampler, in.uv);
// Sampling from GLTF standard channels for now
metallic = metallic * metallic_roughness.b;
perceptual_roughness = perceptual_roughness * metallic_roughness.g;
}
#endif
pbr_input.material.metallic = metallic;
pbr_input.material.perceptual_roughness = perceptual_roughness;
var occlusion: f32 = 1.0;
#ifdef VERTEX_UVS
if ((material.flags & STANDARD_MATERIAL_FLAGS_OCCLUSION_TEXTURE_BIT) != 0u) {
occlusion = textureSample(occlusion_texture, occlusion_sampler, in.uv).r;
}
#endif
pbr_input.occlusion = occlusion;
pbr_input.frag_coord = in.frag_coord;
pbr_input.world_position = in.world_position;
pbr_input.world_normal = prepare_world_normal(
in.world_normal,
(material.flags & STANDARD_MATERIAL_FLAGS_DOUBLE_SIDED_BIT) != 0u,
in.is_front,
);
pbr_input.is_orthographic = view.projection[3].w == 1.0;
pbr_input.N = apply_normal_mapping(
material.flags,
pbr_input.world_normal,
#ifdef VERTEX_TANGENTS
#ifdef STANDARDMATERIAL_NORMAL_MAP
in.world_tangent,
#endif
#endif
#ifdef VERTEX_UVS
in.uv,
#endif
);
pbr_input.V = calculate_view(in.world_position, pbr_input.is_orthographic);
output_color = pbr(pbr_input);
} else {
output_color = alpha_discard(material, output_color);
}
// fog
if (fog.mode != FOG_MODE_OFF && (material.flags & STANDARD_MATERIAL_FLAGS_FOG_ENABLED_BIT) != 0u) {
output_color = apply_fog(output_color, in.world_position.xyz, view.world_position.xyz);
}
#ifdef TONEMAP_IN_SHADER
output_color = tone_mapping(output_color);
#endif
#ifdef DEBAND_DITHER
var output_rgb = output_color.rgb;
output_rgb = pow(output_rgb, vec3<f32>(1.0 / 2.2));
output_rgb = output_rgb + screen_space_dither(in.frag_coord.xy);
// This conversion back to linear space is required because our output texture format is
// SRGB; the GPU will assume our output is linear and will apply an SRGB conversion.
output_rgb = pow(output_rgb, vec3<f32>(2.2));
output_color = vec4(output_rgb, output_color.a);
#endif
#ifdef PREMULTIPLY_ALPHA
output_color = premultiply_alpha(material.flags, output_color);
#endif
return output_color;
}