Merge branch 'main' into proper-json-schema
This commit is contained in:
commit
16c7db42aa
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@ -15,7 +15,7 @@ env:
|
||||
# If nightly is breaking CI, modify this variable to target a specific nightly version.
|
||||
NIGHTLY_TOOLCHAIN: nightly
|
||||
RUSTFLAGS: "-D warnings"
|
||||
BINSTALL_VERSION: "v1.12.5"
|
||||
BINSTALL_VERSION: "v1.14.1"
|
||||
|
||||
concurrency:
|
||||
group: ${{github.workflow}}-${{github.ref}}
|
||||
@ -260,7 +260,7 @@ jobs:
|
||||
# Full git history is needed to get a proper list of changed files within `super-linter`
|
||||
fetch-depth: 0
|
||||
- name: Run Markdown Lint
|
||||
uses: super-linter/super-linter/slim@v7.3.0
|
||||
uses: super-linter/super-linter/slim@v7.4.0
|
||||
env:
|
||||
MULTI_STATUS: false
|
||||
VALIDATE_ALL_CODEBASE: false
|
||||
@ -272,7 +272,8 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: cargo-bins/cargo-binstall@v1.12.5
|
||||
# Update in sync with BINSTALL_VERSION
|
||||
- uses: cargo-bins/cargo-binstall@v1.14.1
|
||||
- name: Install taplo
|
||||
run: cargo binstall taplo-cli@0.9.3 --locked
|
||||
- name: Run Taplo
|
||||
|
74
Cargo.toml
74
Cargo.toml
@ -165,6 +165,7 @@ default = [
|
||||
"webgl2",
|
||||
"x11",
|
||||
"debug",
|
||||
"zstd_rust",
|
||||
]
|
||||
|
||||
# Recommended defaults for no_std applications
|
||||
@ -305,6 +306,9 @@ bevy_input_focus = ["bevy_internal/bevy_input_focus"]
|
||||
# Headless widget collection for Bevy UI.
|
||||
bevy_core_widgets = ["bevy_internal/bevy_core_widgets"]
|
||||
|
||||
# Feathers widget collection.
|
||||
experimental_bevy_feathers = ["bevy_internal/bevy_feathers"]
|
||||
|
||||
# Enable passthrough loading for SPIR-V shaders (Only supported on Vulkan, shader capabilities and extensions must agree with the platform implementation)
|
||||
spirv_shader_passthrough = ["bevy_internal/spirv_shader_passthrough"]
|
||||
|
||||
@ -381,8 +385,11 @@ webp = ["bevy_internal/webp"]
|
||||
# For KTX2 supercompression
|
||||
zlib = ["bevy_internal/zlib"]
|
||||
|
||||
# For KTX2 supercompression
|
||||
zstd = ["bevy_internal/zstd"]
|
||||
# For KTX2 Zstandard decompression using pure rust [ruzstd](https://crates.io/crates/ruzstd). This is the safe default. For maximum performance, use "zstd_c".
|
||||
zstd_rust = ["bevy_internal/zstd_rust"]
|
||||
|
||||
# For KTX2 Zstandard decompression using [zstd](https://crates.io/crates/zstd). This is a faster backend, but uses unsafe C bindings. For the safe option, stick to the default backend with "zstd_rust".
|
||||
zstd_c = ["bevy_internal/zstd_c"]
|
||||
|
||||
# FLAC audio format support
|
||||
flac = ["bevy_internal/flac"]
|
||||
@ -451,7 +458,7 @@ android_shared_stdcxx = ["bevy_internal/android_shared_stdcxx"]
|
||||
detailed_trace = ["bevy_internal/detailed_trace"]
|
||||
|
||||
# Include tonemapping Look Up Tables KTX2 files. If everything is pink, you need to enable this feature or change the `Tonemapping` method for your `Camera2d` or `Camera3d`.
|
||||
tonemapping_luts = ["bevy_internal/tonemapping_luts", "ktx2", "zstd"]
|
||||
tonemapping_luts = ["bevy_internal/tonemapping_luts", "ktx2", "bevy_image/zstd"]
|
||||
|
||||
# Include SMAA Look Up Tables KTX2 Files
|
||||
smaa_luts = ["bevy_internal/smaa_luts"]
|
||||
@ -480,6 +487,12 @@ shader_format_wesl = ["bevy_internal/shader_format_wesl"]
|
||||
# Enable support for transmission-related textures in the `StandardMaterial`, at the risk of blowing past the global, per-shader texture limit on older/lower-end GPUs
|
||||
pbr_transmission_textures = ["bevy_internal/pbr_transmission_textures"]
|
||||
|
||||
# Enable support for Clustered Decals
|
||||
pbr_clustered_decals = ["bevy_internal/pbr_clustered_decals"]
|
||||
|
||||
# Enable support for Light Textures
|
||||
pbr_light_textures = ["bevy_internal/pbr_light_textures"]
|
||||
|
||||
# Enable support for multi-layer material textures in the `StandardMaterial`, at the risk of blowing past the global, per-shader texture limit on older/lower-end GPUs
|
||||
pbr_multi_layer_material_textures = [
|
||||
"bevy_internal/pbr_multi_layer_material_textures",
|
||||
@ -557,6 +570,11 @@ web = ["bevy_internal/web"]
|
||||
# Enable hotpatching of Bevy systems
|
||||
hotpatching = ["bevy_internal/hotpatching"]
|
||||
|
||||
# Enable converting glTF coordinates to Bevy's coordinate system by default. This will be Bevy's default behavior starting in 0.18.
|
||||
gltf_convert_coordinates_default = [
|
||||
"bevy_internal/gltf_convert_coordinates_default",
|
||||
]
|
||||
|
||||
# Enable collecting debug information about systems and components to help with diagnostics
|
||||
debug = ["bevy_internal/debug"]
|
||||
|
||||
@ -575,7 +593,7 @@ ron = "0.10"
|
||||
flate2 = "1.0"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
bytemuck = "1.7"
|
||||
bytemuck = "1"
|
||||
bevy_render = { path = "crates/bevy_render", version = "0.17.0-dev", default-features = false }
|
||||
# The following explicit dependencies are needed for proc macros to work inside of examples as they are part of the bevy crate itself.
|
||||
bevy_ecs = { path = "crates/bevy_ecs", version = "0.17.0-dev", default-features = false }
|
||||
@ -1047,6 +1065,17 @@ description = "Showcases different blend modes"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "manual_material"
|
||||
path = "examples/3d/manual_material.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.manual_material]
|
||||
name = "Manual Material Implementation"
|
||||
description = "Demonstrates how to implement a material manually using the mid-level render APIs"
|
||||
category = "3D Rendering"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "edit_material_on_gltf"
|
||||
path = "examples/3d/edit_material_on_gltf.rs"
|
||||
@ -4397,6 +4426,7 @@ wasm = true
|
||||
name = "clustered_decals"
|
||||
path = "examples/3d/clustered_decals.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["pbr_clustered_decals"]
|
||||
|
||||
[package.metadata.example.clustered_decals]
|
||||
name = "Clustered Decals"
|
||||
@ -4404,6 +4434,18 @@ description = "Demonstrates clustered decals"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "light_textures"
|
||||
path = "examples/3d/light_textures.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["pbr_light_textures"]
|
||||
|
||||
[package.metadata.example.light_textures]
|
||||
name = "Light Textures"
|
||||
description = "Demonstrates light textures"
|
||||
category = "3D Rendering"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "occlusion_culling"
|
||||
path = "examples/3d/occlusion_culling.rs"
|
||||
@ -4506,3 +4548,27 @@ name = "Core Widgets (w/Observers)"
|
||||
description = "Demonstrates use of core (headless) widgets in Bevy UI, with Observers"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "scrollbars"
|
||||
path = "examples/ui/scrollbars.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.scrollbars]
|
||||
name = "Scrollbars"
|
||||
description = "Demonstrates use of core scrollbar in Bevy UI"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "feathers"
|
||||
path = "examples/ui/feathers.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["experimental_bevy_feathers"]
|
||||
|
||||
[package.metadata.example.feathers]
|
||||
name = "Feathers Widgets"
|
||||
description = "Gallery of Feathers Widgets"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
hidden = true
|
||||
|
@ -9,20 +9,20 @@
|
||||
(
|
||||
node_type: Blend,
|
||||
mask: 0,
|
||||
weight: 1.0,
|
||||
weight: 0.5,
|
||||
),
|
||||
(
|
||||
node_type: Clip(AssetPath("models/animated/Fox.glb#Animation0")),
|
||||
node_type: Clip("models/animated/Fox.glb#Animation0"),
|
||||
mask: 0,
|
||||
weight: 1.0,
|
||||
),
|
||||
(
|
||||
node_type: Clip(AssetPath("models/animated/Fox.glb#Animation1")),
|
||||
node_type: Clip("models/animated/Fox.glb#Animation1"),
|
||||
mask: 0,
|
||||
weight: 1.0,
|
||||
),
|
||||
(
|
||||
node_type: Clip(AssetPath("models/animated/Fox.glb#Animation2")),
|
||||
node_type: Clip("models/animated/Fox.glb#Animation2"),
|
||||
mask: 0,
|
||||
weight: 1.0,
|
||||
),
|
||||
|
BIN
assets/lightmaps/caustic_directional_texture.png
Normal file
BIN
assets/lightmaps/caustic_directional_texture.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 229 KiB |
BIN
assets/lightmaps/faces_pointlight_texture_blurred.png
Normal file
BIN
assets/lightmaps/faces_pointlight_texture_blurred.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 551 KiB |
BIN
assets/lightmaps/torch_spotlight_texture.png
Normal file
BIN
assets/lightmaps/torch_spotlight_texture.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 93 KiB |
BIN
assets/models/Faces/faces.glb
Normal file
BIN
assets/models/Faces/faces.glb
Normal file
Binary file not shown.
@ -7,8 +7,8 @@
|
||||
}
|
||||
#import bevy_core_pipeline::tonemapping::tone_mapping
|
||||
|
||||
@group(2) @binding(0) var my_array_texture: texture_2d_array<f32>;
|
||||
@group(2) @binding(1) var my_array_texture_sampler: sampler;
|
||||
@group(3) @binding(0) var my_array_texture: texture_2d_array<f32>;
|
||||
@group(3) @binding(1) var my_array_texture_sampler: sampler;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
@ -3,8 +3,8 @@
|
||||
view_transformations::position_world_to_clip
|
||||
}
|
||||
|
||||
@group(2) @binding(0) var texture: texture_2d<f32>;
|
||||
@group(2) @binding(1) var texture_sampler: sampler;
|
||||
@group(3) @binding(0) var texture: texture_2d<f32>;
|
||||
@group(3) @binding(1) var texture_sampler: sampler;
|
||||
|
||||
struct Vertex {
|
||||
@builtin(instance_index) instance_index: u32,
|
||||
|
@ -15,12 +15,12 @@ struct MaterialBindings {
|
||||
}
|
||||
|
||||
#ifdef BINDLESS
|
||||
@group(2) @binding(0) var<storage> materials: array<MaterialBindings>;
|
||||
@group(2) @binding(10) var<storage> material_color: binding_array<Color>;
|
||||
@group(3) @binding(0) var<storage> materials: array<MaterialBindings>;
|
||||
@group(3) @binding(10) var<storage> material_color: binding_array<Color>;
|
||||
#else // BINDLESS
|
||||
@group(2) @binding(0) var<uniform> material_color: Color;
|
||||
@group(2) @binding(1) var material_color_texture: texture_2d<f32>;
|
||||
@group(2) @binding(2) var material_color_sampler: sampler;
|
||||
@group(3) @binding(0) var<uniform> material_color: Color;
|
||||
@group(3) @binding(1) var material_color_texture: texture_2d<f32>;
|
||||
@group(3) @binding(2) var material_color_sampler: sampler;
|
||||
#endif // BINDLESS
|
||||
|
||||
@fragment
|
||||
|
@ -1,12 +1,12 @@
|
||||
#import bevy_pbr::forward_io::VertexOutput
|
||||
|
||||
#ifdef CUBEMAP_ARRAY
|
||||
@group(2) @binding(0) var base_color_texture: texture_cube_array<f32>;
|
||||
@group(3) @binding(0) var base_color_texture: texture_cube_array<f32>;
|
||||
#else
|
||||
@group(2) @binding(0) var base_color_texture: texture_cube<f32>;
|
||||
@group(3) @binding(0) var base_color_texture: texture_cube<f32>;
|
||||
#endif
|
||||
|
||||
@group(2) @binding(1) var base_color_sampler: sampler;
|
||||
@group(3) @binding(1) var base_color_sampler: sampler;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
@ -3,10 +3,10 @@ layout(location = 0) in vec2 v_Uv;
|
||||
|
||||
layout(location = 0) out vec4 o_Target;
|
||||
|
||||
layout(set = 2, binding = 0) uniform vec4 CustomMaterial_color;
|
||||
layout(set = 3, binding = 0) uniform vec4 CustomMaterial_color;
|
||||
|
||||
layout(set = 2, binding = 1) uniform texture2D CustomMaterial_texture;
|
||||
layout(set = 2, binding = 2) uniform sampler CustomMaterial_sampler;
|
||||
layout(set = 3, binding = 1) uniform texture2D CustomMaterial_texture;
|
||||
layout(set = 3, binding = 2) uniform sampler CustomMaterial_sampler;
|
||||
|
||||
// wgsl modules can be imported and used in glsl
|
||||
// FIXME - this doesn't work any more ...
|
||||
|
@ -25,9 +25,9 @@ struct Mesh {
|
||||
};
|
||||
|
||||
#ifdef PER_OBJECT_BUFFER_BATCH_SIZE
|
||||
layout(set = 1, binding = 0) uniform Mesh Meshes[#{PER_OBJECT_BUFFER_BATCH_SIZE}];
|
||||
layout(set = 2, binding = 0) uniform Mesh Meshes[#{PER_OBJECT_BUFFER_BATCH_SIZE}];
|
||||
#else
|
||||
layout(set = 1, binding = 0) readonly buffer _Meshes {
|
||||
layout(set = 2, binding = 0) readonly buffer _Meshes {
|
||||
Mesh Meshes[];
|
||||
};
|
||||
#endif // PER_OBJECT_BUFFER_BATCH_SIZE
|
||||
|
@ -10,7 +10,7 @@ struct CustomMaterial {
|
||||
time: vec4<f32>,
|
||||
}
|
||||
|
||||
@group(2) @binding(0) var<uniform> material: CustomMaterial;
|
||||
@group(3) @binding(0) var<uniform> material: CustomMaterial;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
@ -2,9 +2,9 @@
|
||||
// we can import items from shader modules in the assets folder with a quoted path
|
||||
#import "shaders/custom_material_import.wgsl"::COLOR_MULTIPLIER
|
||||
|
||||
@group(2) @binding(0) var<uniform> material_color: vec4<f32>;
|
||||
@group(2) @binding(1) var material_color_texture: texture_2d<f32>;
|
||||
@group(2) @binding(2) var material_color_sampler: sampler;
|
||||
@group(3) @binding(0) var<uniform> material_color: vec4<f32>;
|
||||
@group(3) @binding(1) var material_color_texture: texture_2d<f32>;
|
||||
@group(3) @binding(2) var material_color_sampler: sampler;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
@ -4,8 +4,8 @@
|
||||
utils::coords_to_viewport_uv,
|
||||
}
|
||||
|
||||
@group(2) @binding(0) var texture: texture_2d<f32>;
|
||||
@group(2) @binding(1) var texture_sampler: sampler;
|
||||
@group(3) @binding(0) var texture: texture_2d<f32>;
|
||||
@group(3) @binding(1) var texture_sampler: sampler;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
@ -3,7 +3,7 @@
|
||||
struct CustomMaterial {
|
||||
color: vec4<f32>,
|
||||
};
|
||||
@group(2) @binding(0) var<uniform> material: CustomMaterial;
|
||||
@group(3) @binding(0) var<uniform> material: CustomMaterial;
|
||||
|
||||
struct Vertex {
|
||||
@builtin(instance_index) instance_index: u32,
|
||||
|
@ -19,7 +19,7 @@ struct MyExtendedMaterial {
|
||||
quantize_steps: u32,
|
||||
}
|
||||
|
||||
@group(2) @binding(100)
|
||||
@group(3) @binding(100)
|
||||
var<uniform> my_extended_material: MyExtendedMaterial;
|
||||
|
||||
@fragment
|
||||
|
@ -42,19 +42,19 @@ struct ExampleBindlessExtendedMaterial {
|
||||
|
||||
// The indices of the bindless resources in the bindless resource arrays, for
|
||||
// the `ExampleBindlessExtension` fields.
|
||||
@group(2) @binding(100) var<storage> example_extended_material_indices:
|
||||
@group(3) @binding(100) var<storage> example_extended_material_indices:
|
||||
array<ExampleBindlessExtendedMaterialIndices>;
|
||||
// An array that holds the `ExampleBindlessExtendedMaterial` plain old data,
|
||||
// indexed by `ExampleBindlessExtendedMaterialIndices.material`.
|
||||
@group(2) @binding(101) var<storage> example_extended_material:
|
||||
@group(3) @binding(101) var<storage> example_extended_material:
|
||||
array<ExampleBindlessExtendedMaterial>;
|
||||
|
||||
#else // BINDLESS
|
||||
|
||||
// In non-bindless mode, we simply use a uniform for the plain old data.
|
||||
@group(2) @binding(50) var<uniform> example_extended_material: ExampleBindlessExtendedMaterial;
|
||||
@group(2) @binding(51) var modulate_texture: texture_2d<f32>;
|
||||
@group(2) @binding(52) var modulate_sampler: sampler;
|
||||
@group(3) @binding(50) var<uniform> example_extended_material: ExampleBindlessExtendedMaterial;
|
||||
@group(3) @binding(51) var modulate_texture: texture_2d<f32>;
|
||||
@group(3) @binding(52) var modulate_sampler: sampler;
|
||||
|
||||
#endif // BINDLESS
|
||||
|
||||
|
@ -1,22 +1,22 @@
|
||||
#import bevy_pbr::forward_io::VertexOutput
|
||||
|
||||
@group(2) @binding(0) var test_texture_1d: texture_1d<f32>;
|
||||
@group(2) @binding(1) var test_texture_1d_sampler: sampler;
|
||||
@group(3) @binding(0) var test_texture_1d: texture_1d<f32>;
|
||||
@group(3) @binding(1) var test_texture_1d_sampler: sampler;
|
||||
|
||||
@group(2) @binding(2) var test_texture_2d: texture_2d<f32>;
|
||||
@group(2) @binding(3) var test_texture_2d_sampler: sampler;
|
||||
@group(3) @binding(2) var test_texture_2d: texture_2d<f32>;
|
||||
@group(3) @binding(3) var test_texture_2d_sampler: sampler;
|
||||
|
||||
@group(2) @binding(4) var test_texture_2d_array: texture_2d_array<f32>;
|
||||
@group(2) @binding(5) var test_texture_2d_array_sampler: sampler;
|
||||
@group(3) @binding(4) var test_texture_2d_array: texture_2d_array<f32>;
|
||||
@group(3) @binding(5) var test_texture_2d_array_sampler: sampler;
|
||||
|
||||
@group(2) @binding(6) var test_texture_cube: texture_cube<f32>;
|
||||
@group(2) @binding(7) var test_texture_cube_sampler: sampler;
|
||||
@group(3) @binding(6) var test_texture_cube: texture_cube<f32>;
|
||||
@group(3) @binding(7) var test_texture_cube_sampler: sampler;
|
||||
|
||||
@group(2) @binding(8) var test_texture_cube_array: texture_cube_array<f32>;
|
||||
@group(2) @binding(9) var test_texture_cube_array_sampler: sampler;
|
||||
@group(3) @binding(8) var test_texture_cube_array: texture_cube_array<f32>;
|
||||
@group(3) @binding(9) var test_texture_cube_array_sampler: sampler;
|
||||
|
||||
@group(2) @binding(10) var test_texture_3d: texture_3d<f32>;
|
||||
@group(2) @binding(11) var test_texture_3d_sampler: sampler;
|
||||
@group(3) @binding(10) var test_texture_3d: texture_3d<f32>;
|
||||
@group(3) @binding(11) var test_texture_3d_sampler: sampler;
|
||||
|
||||
@fragment
|
||||
fn fragment(in: VertexOutput) {}
|
||||
|
@ -12,7 +12,7 @@ struct VoxelVisualizationIrradianceVolumeInfo {
|
||||
intensity: f32,
|
||||
}
|
||||
|
||||
@group(2) @binding(100)
|
||||
@group(3) @binding(100)
|
||||
var<uniform> irradiance_volume_info: VoxelVisualizationIrradianceVolumeInfo;
|
||||
|
||||
@fragment
|
||||
|
@ -4,7 +4,7 @@ struct LineMaterial {
|
||||
color: vec4<f32>,
|
||||
};
|
||||
|
||||
@group(2) @binding(0) var<uniform> material: LineMaterial;
|
||||
@group(3) @binding(0) var<uniform> material: LineMaterial;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
11
assets/shaders/manual_material.wgsl
Normal file
11
assets/shaders/manual_material.wgsl
Normal file
@ -0,0 +1,11 @@
|
||||
#import bevy_pbr::forward_io::VertexOutput
|
||||
|
||||
@group(3) @binding(0) var material_color_texture: texture_2d<f32>;
|
||||
@group(3) @binding(1) var material_color_sampler: sampler;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
mesh: VertexOutput,
|
||||
) -> @location(0) vec4<f32> {
|
||||
return textureSample(material_color_texture, material_color_sampler, mesh.uv);
|
||||
}
|
@ -4,7 +4,7 @@ struct CustomMaterial {
|
||||
color: vec4<f32>,
|
||||
};
|
||||
|
||||
@group(2) @binding(0) var<uniform> material: CustomMaterial;
|
||||
@group(3) @binding(0) var<uniform> material: CustomMaterial;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
@ -11,7 +11,7 @@ struct ShowPrepassSettings {
|
||||
padding_1: u32,
|
||||
padding_2: u32,
|
||||
}
|
||||
@group(2) @binding(0) var<uniform> settings: ShowPrepassSettings;
|
||||
@group(3) @binding(0) var<uniform> settings: ShowPrepassSettings;
|
||||
|
||||
@fragment
|
||||
fn fragment(
|
||||
|
@ -3,7 +3,7 @@
|
||||
view_transformations::position_world_to_clip
|
||||
}
|
||||
|
||||
@group(2) @binding(0) var<storage, read> colors: array<vec4<f32>, 5>;
|
||||
@group(3) @binding(0) var<storage, read> colors: array<vec4<f32>, 5>;
|
||||
|
||||
struct Vertex {
|
||||
@builtin(instance_index) instance_index: u32,
|
||||
|
@ -1,7 +1,7 @@
|
||||
#import bevy_pbr::forward_io::VertexOutput
|
||||
|
||||
@group(2) @binding(0) var textures: binding_array<texture_2d<f32>>;
|
||||
@group(2) @binding(1) var nearest_sampler: sampler;
|
||||
@group(3) @binding(0) var textures: binding_array<texture_2d<f32>>;
|
||||
@group(3) @binding(1) var nearest_sampler: sampler;
|
||||
// We can also have array of samplers
|
||||
// var samplers: binding_array<sampler>;
|
||||
|
||||
|
@ -23,9 +23,9 @@ struct WaterSettings {
|
||||
|
||||
@group(0) @binding(1) var<uniform> globals: Globals;
|
||||
|
||||
@group(2) @binding(100) var water_normals_texture: texture_2d<f32>;
|
||||
@group(2) @binding(101) var water_normals_sampler: sampler;
|
||||
@group(2) @binding(102) var<uniform> water_settings: WaterSettings;
|
||||
@group(3) @binding(100) var water_normals_texture: texture_2d<f32>;
|
||||
@group(3) @binding(101) var water_normals_sampler: sampler;
|
||||
@group(3) @binding(102) var<uniform> water_settings: WaterSettings;
|
||||
|
||||
// Samples a single octave of noise and returns the resulting normal.
|
||||
fn sample_noise_octave(uv: vec2<f32>, strength: f32) -> vec3<f32> {
|
||||
|
@ -10,7 +10,7 @@ autobenches = false
|
||||
[dependencies]
|
||||
# The primary crate that runs and analyzes our benchmarks. This is a regular dependency because the
|
||||
# `bench!` macro refers to it in its documentation.
|
||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
||||
|
||||
[dev-dependencies]
|
||||
# Bevy crates
|
||||
|
67
benches/benches/bevy_ecs/bundles/insert_many.rs
Normal file
67
benches/benches/bevy_ecs/bundles/insert_many.rs
Normal file
@ -0,0 +1,67 @@
|
||||
use benches::bench;
|
||||
use bevy_ecs::{component::Component, world::World};
|
||||
use criterion::Criterion;
|
||||
|
||||
const ENTITY_COUNT: usize = 2_000;
|
||||
|
||||
#[derive(Component)]
|
||||
struct C<const N: usize>(usize);
|
||||
|
||||
pub fn insert_many(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group(bench!("insert_many"));
|
||||
|
||||
group.bench_function("all", |bencher| {
|
||||
let mut world = World::new();
|
||||
bencher.iter(|| {
|
||||
for _ in 0..ENTITY_COUNT {
|
||||
world
|
||||
.spawn_empty()
|
||||
.insert(C::<0>(1))
|
||||
.insert(C::<1>(1))
|
||||
.insert(C::<2>(1))
|
||||
.insert(C::<3>(1))
|
||||
.insert(C::<4>(1))
|
||||
.insert(C::<5>(1))
|
||||
.insert(C::<6>(1))
|
||||
.insert(C::<7>(1))
|
||||
.insert(C::<8>(1))
|
||||
.insert(C::<9>(1))
|
||||
.insert(C::<10>(1))
|
||||
.insert(C::<11>(1))
|
||||
.insert(C::<12>(1))
|
||||
.insert(C::<13>(1))
|
||||
.insert(C::<14>(1));
|
||||
}
|
||||
world.clear_entities();
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("only_last", |bencher| {
|
||||
let mut world = World::new();
|
||||
bencher.iter(|| {
|
||||
for _ in 0..ENTITY_COUNT {
|
||||
world
|
||||
.spawn((
|
||||
C::<0>(1),
|
||||
C::<1>(1),
|
||||
C::<2>(1),
|
||||
C::<3>(1),
|
||||
C::<4>(1),
|
||||
C::<5>(1),
|
||||
C::<6>(1),
|
||||
C::<7>(1),
|
||||
C::<8>(1),
|
||||
C::<9>(1),
|
||||
C::<10>(1),
|
||||
C::<11>(1),
|
||||
C::<12>(1),
|
||||
C::<13>(1),
|
||||
))
|
||||
.insert(C::<14>(1));
|
||||
}
|
||||
world.clear_entities();
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
14
benches/benches/bevy_ecs/bundles/mod.rs
Normal file
14
benches/benches/bevy_ecs/bundles/mod.rs
Normal file
@ -0,0 +1,14 @@
|
||||
use criterion::criterion_group;
|
||||
|
||||
mod insert_many;
|
||||
mod spawn_many;
|
||||
mod spawn_many_zst;
|
||||
mod spawn_one_zst;
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
spawn_one_zst::spawn_one_zst,
|
||||
spawn_many_zst::spawn_many_zst,
|
||||
spawn_many::spawn_many,
|
||||
insert_many::insert_many,
|
||||
);
|
40
benches/benches/bevy_ecs/bundles/spawn_many.rs
Normal file
40
benches/benches/bevy_ecs/bundles/spawn_many.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use benches::bench;
|
||||
use bevy_ecs::{component::Component, world::World};
|
||||
use criterion::Criterion;
|
||||
|
||||
const ENTITY_COUNT: usize = 2_000;
|
||||
|
||||
#[derive(Component)]
|
||||
struct C<const N: usize>(usize);
|
||||
|
||||
pub fn spawn_many(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group(bench!("spawn_many"));
|
||||
|
||||
group.bench_function("static", |bencher| {
|
||||
let mut world = World::new();
|
||||
bencher.iter(|| {
|
||||
for _ in 0..ENTITY_COUNT {
|
||||
world.spawn((
|
||||
C::<0>(1),
|
||||
C::<1>(1),
|
||||
C::<2>(1),
|
||||
C::<3>(1),
|
||||
C::<4>(1),
|
||||
C::<5>(1),
|
||||
C::<6>(1),
|
||||
C::<7>(1),
|
||||
C::<8>(1),
|
||||
C::<9>(1),
|
||||
C::<10>(1),
|
||||
C::<11>(1),
|
||||
C::<12>(1),
|
||||
C::<13>(1),
|
||||
C::<14>(1),
|
||||
));
|
||||
}
|
||||
world.clear_entities();
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
27
benches/benches/bevy_ecs/bundles/spawn_many_zst.rs
Normal file
27
benches/benches/bevy_ecs/bundles/spawn_many_zst.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use benches::bench;
|
||||
use bevy_ecs::{component::Component, world::World};
|
||||
use criterion::Criterion;
|
||||
|
||||
const ENTITY_COUNT: usize = 2_000;
|
||||
|
||||
#[derive(Component)]
|
||||
struct C<const N: usize>;
|
||||
|
||||
pub fn spawn_many_zst(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group(bench!("spawn_many_zst"));
|
||||
|
||||
group.bench_function("static", |bencher| {
|
||||
let mut world = World::new();
|
||||
bencher.iter(|| {
|
||||
for _ in 0..ENTITY_COUNT {
|
||||
world.spawn((
|
||||
C::<0>, C::<1>, C::<2>, C::<3>, C::<4>, C::<5>, C::<6>, C::<7>, C::<8>, C::<9>,
|
||||
C::<10>, C::<11>, C::<12>, C::<13>, C::<14>,
|
||||
));
|
||||
}
|
||||
world.clear_entities();
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
24
benches/benches/bevy_ecs/bundles/spawn_one_zst.rs
Normal file
24
benches/benches/bevy_ecs/bundles/spawn_one_zst.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use benches::bench;
|
||||
use bevy_ecs::{component::Component, world::World};
|
||||
use criterion::Criterion;
|
||||
|
||||
const ENTITY_COUNT: usize = 10_000;
|
||||
|
||||
#[derive(Component)]
|
||||
struct A;
|
||||
|
||||
pub fn spawn_one_zst(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group(bench!("spawn_one_zst"));
|
||||
|
||||
group.bench_function("static", |bencher| {
|
||||
let mut world = World::new();
|
||||
bencher.iter(|| {
|
||||
for _ in 0..ENTITY_COUNT {
|
||||
world.spawn(A);
|
||||
}
|
||||
world.clear_entities();
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
@ -10,19 +10,19 @@ fn send(c: &mut Criterion) {
|
||||
group.warm_up_time(core::time::Duration::from_millis(500));
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
for count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("size_4_events_{}", count), |b| {
|
||||
group.bench_function(format!("size_4_events_{count}"), |b| {
|
||||
let mut bench = send::Benchmark::<4>::new(count);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
}
|
||||
for count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("size_16_events_{}", count), |b| {
|
||||
group.bench_function(format!("size_16_events_{count}"), |b| {
|
||||
let mut bench = send::Benchmark::<16>::new(count);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
}
|
||||
for count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("size_512_events_{}", count), |b| {
|
||||
group.bench_function(format!("size_512_events_{count}"), |b| {
|
||||
let mut bench = send::Benchmark::<512>::new(count);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
@ -35,19 +35,19 @@ fn iter(c: &mut Criterion) {
|
||||
group.warm_up_time(core::time::Duration::from_millis(500));
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
for count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("size_4_events_{}", count), |b| {
|
||||
group.bench_function(format!("size_4_events_{count}"), |b| {
|
||||
let mut bench = iter::Benchmark::<4>::new(count);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
}
|
||||
for count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("size_16_events_{}", count), |b| {
|
||||
group.bench_function(format!("size_16_events_{count}"), |b| {
|
||||
let mut bench = iter::Benchmark::<4>::new(count);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
}
|
||||
for count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("size_512_events_{}", count), |b| {
|
||||
group.bench_function(format!("size_512_events_{count}"), |b| {
|
||||
let mut bench = iter::Benchmark::<512>::new(count);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
|
@ -130,7 +130,7 @@ fn par_iter_simple(c: &mut Criterion) {
|
||||
group.warm_up_time(core::time::Duration::from_millis(500));
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
for f in [0, 10, 100, 1000] {
|
||||
group.bench_function(format!("with_{}_fragment", f), |b| {
|
||||
group.bench_function(format!("with_{f}_fragment"), |b| {
|
||||
let mut bench = par_iter_simple::Benchmark::new(f);
|
||||
b.iter(move || bench.run());
|
||||
});
|
||||
|
@ -5,6 +5,7 @@
|
||||
|
||||
use criterion::criterion_main;
|
||||
|
||||
mod bundles;
|
||||
mod change_detection;
|
||||
mod components;
|
||||
mod empty_archetypes;
|
||||
@ -18,6 +19,7 @@ mod scheduling;
|
||||
mod world;
|
||||
|
||||
criterion_main!(
|
||||
bundles::benches,
|
||||
change_detection::benches,
|
||||
components::benches,
|
||||
empty_archetypes::benches,
|
||||
|
@ -24,7 +24,7 @@ pub fn run_condition_yes(criterion: &mut Criterion) {
|
||||
}
|
||||
// run once to initialize systems
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(format!("{}_systems", amount), |bencher| {
|
||||
group.bench_function(format!("{amount}_systems"), |bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
});
|
||||
@ -46,7 +46,7 @@ pub fn run_condition_no(criterion: &mut Criterion) {
|
||||
}
|
||||
// run once to initialize systems
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(format!("{}_systems", amount), |bencher| {
|
||||
group.bench_function(format!("{amount}_systems"), |bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
});
|
||||
@ -77,7 +77,7 @@ pub fn run_condition_yes_with_query(criterion: &mut Criterion) {
|
||||
}
|
||||
// run once to initialize systems
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(format!("{}_systems", amount), |bencher| {
|
||||
group.bench_function(format!("{amount}_systems"), |bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
});
|
||||
@ -105,7 +105,7 @@ pub fn run_condition_yes_with_resource(criterion: &mut Criterion) {
|
||||
}
|
||||
// run once to initialize systems
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(format!("{}_systems", amount), |bencher| {
|
||||
group.bench_function(format!("{amount}_systems"), |bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
});
|
||||
|
@ -26,7 +26,7 @@ pub fn empty_systems(criterion: &mut Criterion) {
|
||||
schedule.add_systems(empty);
|
||||
}
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(format!("{}_systems", amount), |bencher| {
|
||||
group.bench_function(format!("{amount}_systems"), |bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
});
|
||||
@ -38,7 +38,7 @@ pub fn empty_systems(criterion: &mut Criterion) {
|
||||
schedule.add_systems((empty, empty, empty, empty, empty));
|
||||
}
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(format!("{}_systems", amount), |bencher| {
|
||||
group.bench_function(format!("{amount}_systems"), |bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
});
|
||||
@ -79,10 +79,7 @@ pub fn busy_systems(criterion: &mut Criterion) {
|
||||
}
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(
|
||||
format!(
|
||||
"{:02}x_entities_{:02}_systems",
|
||||
entity_bunches, system_amount
|
||||
),
|
||||
format!("{entity_bunches:02}x_entities_{system_amount:02}_systems"),
|
||||
|bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
@ -128,10 +125,7 @@ pub fn contrived(criterion: &mut Criterion) {
|
||||
}
|
||||
schedule.run(&mut world);
|
||||
group.bench_function(
|
||||
format!(
|
||||
"{:02}x_entities_{:02}_systems",
|
||||
entity_bunches, system_amount
|
||||
),
|
||||
format!("{entity_bunches:02}x_entities_{system_amount:02}_systems"),
|
||||
|bencher| {
|
||||
bencher.iter(|| {
|
||||
schedule.run(&mut world);
|
||||
|
@ -37,7 +37,7 @@ pub fn spawn_commands(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut command_queue = CommandQueue::default();
|
||||
|
||||
@ -68,7 +68,7 @@ pub fn nonempty_spawn_commands(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut command_queue = CommandQueue::default();
|
||||
|
||||
@ -162,7 +162,7 @@ pub fn fake_commands(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for command_count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("{}_commands", command_count), |bencher| {
|
||||
group.bench_function(format!("{command_count}_commands"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut command_queue = CommandQueue::default();
|
||||
|
||||
@ -207,7 +207,7 @@ pub fn sized_commands_impl<T: Default + Command>(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for command_count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("{}_commands", command_count), |bencher| {
|
||||
group.bench_function(format!("{command_count}_commands"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut command_queue = CommandQueue::default();
|
||||
|
||||
|
@ -13,7 +13,7 @@ pub fn world_despawn(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in [1, 100, 10_000] {
|
||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities"), |bencher| {
|
||||
bencher.iter_batched_ref(
|
||||
|| {
|
||||
let mut world = World::default();
|
||||
|
@ -13,7 +13,7 @@ pub fn world_despawn_recursive(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in [1, 100, 10_000] {
|
||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities"), |bencher| {
|
||||
bencher.iter_batched_ref(
|
||||
|| {
|
||||
let mut world = World::default();
|
||||
|
@ -13,7 +13,7 @@ pub fn world_spawn(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in [1, 100, 10_000] {
|
||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities"), |bencher| {
|
||||
let mut world = World::default();
|
||||
bencher.iter(|| {
|
||||
for _ in 0..entity_count {
|
||||
|
@ -49,7 +49,7 @@ pub fn world_entity(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities"), |bencher| {
|
||||
let world = setup::<Table>(entity_count);
|
||||
|
||||
bencher.iter(|| {
|
||||
@ -72,7 +72,7 @@ pub fn world_get(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_table"), |bencher| {
|
||||
let world = setup::<Table>(entity_count);
|
||||
|
||||
bencher.iter(|| {
|
||||
@ -84,7 +84,7 @@ pub fn world_get(criterion: &mut Criterion) {
|
||||
}
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_entities_sparse", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_sparse"), |bencher| {
|
||||
let world = setup::<Sparse>(entity_count);
|
||||
|
||||
bencher.iter(|| {
|
||||
@ -107,7 +107,7 @@ pub fn world_query_get(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_table"), |bencher| {
|
||||
let mut world = setup::<Table>(entity_count);
|
||||
let mut query = world.query::<&Table>();
|
||||
|
||||
@ -120,7 +120,7 @@ pub fn world_query_get(criterion: &mut Criterion) {
|
||||
}
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_entities_table_wide", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_table_wide"), |bencher| {
|
||||
let mut world = setup_wide::<(
|
||||
WideTable<0>,
|
||||
WideTable<1>,
|
||||
@ -147,7 +147,7 @@ pub fn world_query_get(criterion: &mut Criterion) {
|
||||
}
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_entities_sparse", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_sparse"), |bencher| {
|
||||
let mut world = setup::<Sparse>(entity_count);
|
||||
let mut query = world.query::<&Sparse>();
|
||||
|
||||
@ -160,37 +160,33 @@ pub fn world_query_get(criterion: &mut Criterion) {
|
||||
}
|
||||
});
|
||||
});
|
||||
group.bench_function(
|
||||
format!("{}_entities_sparse_wide", entity_count),
|
||||
|bencher| {
|
||||
let mut world = setup_wide::<(
|
||||
WideSparse<0>,
|
||||
WideSparse<1>,
|
||||
WideSparse<2>,
|
||||
WideSparse<3>,
|
||||
WideSparse<4>,
|
||||
WideSparse<5>,
|
||||
)>(entity_count);
|
||||
let mut query = world.query::<(
|
||||
&WideSparse<0>,
|
||||
&WideSparse<1>,
|
||||
&WideSparse<2>,
|
||||
&WideSparse<3>,
|
||||
&WideSparse<4>,
|
||||
&WideSparse<5>,
|
||||
)>();
|
||||
group.bench_function(format!("{entity_count}_entities_sparse_wide"), |bencher| {
|
||||
let mut world = setup_wide::<(
|
||||
WideSparse<0>,
|
||||
WideSparse<1>,
|
||||
WideSparse<2>,
|
||||
WideSparse<3>,
|
||||
WideSparse<4>,
|
||||
WideSparse<5>,
|
||||
)>(entity_count);
|
||||
let mut query = world.query::<(
|
||||
&WideSparse<0>,
|
||||
&WideSparse<1>,
|
||||
&WideSparse<2>,
|
||||
&WideSparse<3>,
|
||||
&WideSparse<4>,
|
||||
&WideSparse<5>,
|
||||
)>();
|
||||
|
||||
bencher.iter(|| {
|
||||
for i in 0..entity_count {
|
||||
// SAFETY: Range is exclusive.
|
||||
let entity = Entity::from_raw(EntityRow::new(unsafe {
|
||||
NonMaxU32::new_unchecked(i)
|
||||
}));
|
||||
assert!(query.get(&world, entity).is_ok());
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
bencher.iter(|| {
|
||||
for i in 0..entity_count {
|
||||
// SAFETY: Range is exclusive.
|
||||
let entity =
|
||||
Entity::from_raw(EntityRow::new(unsafe { NonMaxU32::new_unchecked(i) }));
|
||||
assert!(query.get(&world, entity).is_ok());
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
group.finish();
|
||||
@ -202,7 +198,7 @@ pub fn world_query_iter(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_table"), |bencher| {
|
||||
let mut world = setup::<Table>(entity_count);
|
||||
let mut query = world.query::<&Table>();
|
||||
|
||||
@ -216,7 +212,7 @@ pub fn world_query_iter(criterion: &mut Criterion) {
|
||||
assert_eq!(black_box(count), entity_count);
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_entities_sparse", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_sparse"), |bencher| {
|
||||
let mut world = setup::<Sparse>(entity_count);
|
||||
let mut query = world.query::<&Sparse>();
|
||||
|
||||
@ -241,7 +237,7 @@ pub fn world_query_for_each(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_table"), |bencher| {
|
||||
let mut world = setup::<Table>(entity_count);
|
||||
let mut query = world.query::<&Table>();
|
||||
|
||||
@ -255,7 +251,7 @@ pub fn world_query_for_each(criterion: &mut Criterion) {
|
||||
assert_eq!(black_box(count), entity_count);
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_entities_sparse", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_sparse"), |bencher| {
|
||||
let mut world = setup::<Sparse>(entity_count);
|
||||
let mut query = world.query::<&Sparse>();
|
||||
|
||||
@ -280,7 +276,7 @@ pub fn query_get(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_entities_table", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_table"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut entities: Vec<_> = world
|
||||
.spawn_batch((0..entity_count).map(|_| Table::default()))
|
||||
@ -299,7 +295,7 @@ pub fn query_get(criterion: &mut Criterion) {
|
||||
assert_eq!(black_box(count), entity_count);
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_entities_sparse", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_entities_sparse"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut entities: Vec<_> = world
|
||||
.spawn_batch((0..entity_count).map(|_| Sparse::default()))
|
||||
@ -329,7 +325,7 @@ pub fn query_get_many<const N: usize>(criterion: &mut Criterion) {
|
||||
group.measurement_time(core::time::Duration::from_secs(2 * N as u64));
|
||||
|
||||
for entity_count in RANGE.map(|i| i * 10_000) {
|
||||
group.bench_function(format!("{}_calls_table", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_calls_table"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut entity_groups: Vec<_> = (0..entity_count)
|
||||
.map(|_| [(); N].map(|_| world.spawn(Table::default()).id()))
|
||||
@ -352,7 +348,7 @@ pub fn query_get_many<const N: usize>(criterion: &mut Criterion) {
|
||||
assert_eq!(black_box(count), entity_count);
|
||||
});
|
||||
});
|
||||
group.bench_function(format!("{}_calls_sparse", entity_count), |bencher| {
|
||||
group.bench_function(format!("{entity_count}_calls_sparse"), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut entity_groups: Vec<_> = (0..entity_count)
|
||||
.map(|_| [(); N].map(|_| world.spawn(Sparse::default()).id()))
|
||||
|
@ -142,7 +142,7 @@ fn concrete_map_apply(criterion: &mut Criterion) {
|
||||
|
||||
fn u64_to_n_byte_key(k: u64, n: usize) -> String {
|
||||
let mut key = String::with_capacity(n);
|
||||
write!(&mut key, "{}", k).unwrap();
|
||||
write!(&mut key, "{k}").unwrap();
|
||||
|
||||
// Pad key to n bytes.
|
||||
key.extend(iter::repeat_n('\0', n - key.len()));
|
||||
|
@ -55,7 +55,7 @@ fn concrete_struct_field(criterion: &mut Criterion) {
|
||||
&s,
|
||||
|bencher, s| {
|
||||
let field_names = (0..field_count)
|
||||
.map(|i| format!("field_{}", i))
|
||||
.map(|i| format!("field_{i}"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
bencher.iter(|| {
|
||||
@ -256,7 +256,7 @@ fn dynamic_struct_apply(criterion: &mut Criterion) {
|
||||
|
||||
let mut base = DynamicStruct::default();
|
||||
for i in 0..field_count {
|
||||
let field_name = format!("field_{}", i);
|
||||
let field_name = format!("field_{i}");
|
||||
base.insert(&field_name, 1u32);
|
||||
}
|
||||
|
||||
@ -283,7 +283,7 @@ fn dynamic_struct_apply(criterion: &mut Criterion) {
|
||||
let mut base = DynamicStruct::default();
|
||||
let mut patch = DynamicStruct::default();
|
||||
for i in 0..field_count {
|
||||
let field_name = format!("field_{}", i);
|
||||
let field_name = format!("field_{i}");
|
||||
base.insert(&field_name, 0u32);
|
||||
patch.insert(&field_name, 1u32);
|
||||
}
|
||||
@ -309,11 +309,11 @@ fn dynamic_struct_insert(criterion: &mut Criterion) {
|
||||
|bencher, field_count| {
|
||||
let mut s = DynamicStruct::default();
|
||||
for i in 0..*field_count {
|
||||
let field_name = format!("field_{}", i);
|
||||
let field_name = format!("field_{i}");
|
||||
s.insert(&field_name, ());
|
||||
}
|
||||
|
||||
let field = format!("field_{}", field_count);
|
||||
let field = format!("field_{field_count}");
|
||||
bencher.iter_batched(
|
||||
|| s.to_dynamic_struct(),
|
||||
|mut s| {
|
||||
@ -339,7 +339,7 @@ fn dynamic_struct_get_field(criterion: &mut Criterion) {
|
||||
|bencher, field_count| {
|
||||
let mut s = DynamicStruct::default();
|
||||
for i in 0..*field_count {
|
||||
let field_name = format!("field_{}", i);
|
||||
let field_name = format!("field_{i}");
|
||||
s.insert(&field_name, ());
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ bevy_platform = { path = "../bevy_platform", version = "0.17.0-dev", default-fea
|
||||
] }
|
||||
|
||||
# other
|
||||
petgraph = { version = "0.7", features = ["serde-1"] }
|
||||
petgraph = { version = "0.8", features = ["serde-1"] }
|
||||
ron = "0.10"
|
||||
serde = "1"
|
||||
blake3 = { version = "1.0" }
|
||||
|
@ -19,7 +19,7 @@ use bevy_ecs::{
|
||||
system::{Res, ResMut},
|
||||
};
|
||||
use bevy_platform::collections::HashMap;
|
||||
use bevy_reflect::{prelude::ReflectDefault, Reflect, ReflectSerialize};
|
||||
use bevy_reflect::{prelude::ReflectDefault, Reflect};
|
||||
use derive_more::derive::From;
|
||||
use petgraph::{
|
||||
graph::{DiGraph, NodeIndex},
|
||||
@ -29,6 +29,7 @@ use ron::de::SpannedError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use thiserror::Error;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::{AnimationClip, AnimationTargetId};
|
||||
|
||||
@ -108,9 +109,8 @@ use crate::{AnimationClip, AnimationTargetId};
|
||||
/// [RON]: https://github.com/ron-rs/ron
|
||||
///
|
||||
/// [RFC 51]: https://github.com/bevyengine/rfcs/blob/main/rfcs/51-animation-composition.md
|
||||
#[derive(Asset, Reflect, Clone, Debug, Serialize)]
|
||||
#[reflect(Serialize, Debug, Clone)]
|
||||
#[serde(into = "SerializedAnimationGraph")]
|
||||
#[derive(Asset, Reflect, Clone, Debug)]
|
||||
#[reflect(Debug, Clone)]
|
||||
pub struct AnimationGraph {
|
||||
/// The `petgraph` data structure that defines the animation graph.
|
||||
pub graph: AnimationDiGraph,
|
||||
@ -242,20 +242,40 @@ pub enum AnimationNodeType {
|
||||
#[derive(Default)]
|
||||
pub struct AnimationGraphAssetLoader;
|
||||
|
||||
/// Various errors that can occur when serializing or deserializing animation
|
||||
/// graphs to and from RON, respectively.
|
||||
/// Errors that can occur when serializing animation graphs to RON.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AnimationGraphSaveError {
|
||||
/// An I/O error occurred.
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
/// An error occurred in RON serialization.
|
||||
#[error(transparent)]
|
||||
Ron(#[from] ron::Error),
|
||||
/// An error occurred converting the graph to its serialization form.
|
||||
#[error(transparent)]
|
||||
ConvertToSerialized(#[from] NonPathHandleError),
|
||||
}
|
||||
|
||||
/// Errors that can occur when deserializing animation graphs from RON.
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AnimationGraphLoadError {
|
||||
/// An I/O error occurred.
|
||||
#[error("I/O")]
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
/// An error occurred in RON serialization or deserialization.
|
||||
#[error("RON serialization")]
|
||||
/// An error occurred in RON deserialization.
|
||||
#[error(transparent)]
|
||||
Ron(#[from] ron::Error),
|
||||
/// An error occurred in RON deserialization, and the location of the error
|
||||
/// is supplied.
|
||||
#[error("RON serialization")]
|
||||
#[error(transparent)]
|
||||
SpannedRon(#[from] SpannedError),
|
||||
/// The deserialized graph contained legacy data that we no longer support.
|
||||
#[error(
|
||||
"The deserialized AnimationGraph contained an AnimationClip referenced by an AssetId, \
|
||||
which is no longer supported. Consider manually deserializing the SerializedAnimationGraph \
|
||||
type and determine how to migrate any SerializedAnimationClip::AssetId animation clips"
|
||||
)]
|
||||
GraphContainsLegacyAssetId,
|
||||
}
|
||||
|
||||
/// Acceleration structures for animation graphs that allows Bevy to evaluate
|
||||
@ -388,18 +408,32 @@ pub struct SerializedAnimationGraphNode {
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum SerializedAnimationNodeType {
|
||||
/// Corresponds to [`AnimationNodeType::Clip`].
|
||||
Clip(SerializedAnimationClip),
|
||||
Clip(MigrationSerializedAnimationClip),
|
||||
/// Corresponds to [`AnimationNodeType::Blend`].
|
||||
Blend,
|
||||
/// Corresponds to [`AnimationNodeType::Add`].
|
||||
Add,
|
||||
}
|
||||
|
||||
/// A version of `Handle<AnimationClip>` suitable for serializing as an asset.
|
||||
/// A type to facilitate migration from the legacy format of [`SerializedAnimationGraph`] to the
|
||||
/// new format.
|
||||
///
|
||||
/// This replaces any handle that has a path with an [`AssetPath`]. Failing
|
||||
/// that, the asset ID is serialized directly.
|
||||
/// By using untagged serde deserialization, we can try to deserialize the modern form, then
|
||||
/// fallback to the legacy form. Users must migrate to the modern form by Bevy 0.18.
|
||||
// TODO: Delete this after Bevy 0.17.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum MigrationSerializedAnimationClip {
|
||||
/// This is the new type of this field.
|
||||
Modern(AssetPath<'static>),
|
||||
/// This is the legacy type of this field. Users must migrate away from this.
|
||||
#[serde(skip_serializing)]
|
||||
Legacy(SerializedAnimationClip),
|
||||
}
|
||||
|
||||
/// The legacy form of serialized animation clips. This allows raw asset IDs to be deserialized.
|
||||
// TODO: Delete this after Bevy 0.17.
|
||||
#[derive(Deserialize)]
|
||||
pub enum SerializedAnimationClip {
|
||||
/// Records an asset path.
|
||||
AssetPath(AssetPath<'static>),
|
||||
@ -648,12 +682,13 @@ impl AnimationGraph {
|
||||
///
|
||||
/// If writing to a file, it can later be loaded with the
|
||||
/// [`AnimationGraphAssetLoader`] to reconstruct the graph.
|
||||
pub fn save<W>(&self, writer: &mut W) -> Result<(), AnimationGraphLoadError>
|
||||
pub fn save<W>(&self, writer: &mut W) -> Result<(), AnimationGraphSaveError>
|
||||
where
|
||||
W: Write,
|
||||
{
|
||||
let mut ron_serializer = ron::ser::Serializer::new(writer, None)?;
|
||||
Ok(self.serialize(&mut ron_serializer)?)
|
||||
let serialized_graph: SerializedAnimationGraph = self.clone().try_into()?;
|
||||
Ok(serialized_graph.serialize(&mut ron_serializer)?)
|
||||
}
|
||||
|
||||
/// Adds an animation target (bone) to the mask group with the given ID.
|
||||
@ -758,28 +793,55 @@ impl AssetLoader for AnimationGraphAssetLoader {
|
||||
let serialized_animation_graph = SerializedAnimationGraph::deserialize(&mut deserializer)
|
||||
.map_err(|err| deserializer.span_error(err))?;
|
||||
|
||||
// Load all `AssetPath`s to convert from a
|
||||
// `SerializedAnimationGraph` to a real `AnimationGraph`.
|
||||
Ok(AnimationGraph {
|
||||
graph: serialized_animation_graph.graph.map(
|
||||
|_, serialized_node| AnimationGraphNode {
|
||||
node_type: match serialized_node.node_type {
|
||||
SerializedAnimationNodeType::Clip(ref clip) => match clip {
|
||||
SerializedAnimationClip::AssetId(asset_id) => {
|
||||
AnimationNodeType::Clip(Handle::Weak(*asset_id))
|
||||
// Load all `AssetPath`s to convert from a `SerializedAnimationGraph` to a real
|
||||
// `AnimationGraph`. This is effectively a `DiGraph::map`, but this allows us to return
|
||||
// errors.
|
||||
let mut animation_graph = DiGraph::with_capacity(
|
||||
serialized_animation_graph.graph.node_count(),
|
||||
serialized_animation_graph.graph.edge_count(),
|
||||
);
|
||||
|
||||
let mut already_warned = false;
|
||||
for serialized_node in serialized_animation_graph.graph.node_weights() {
|
||||
animation_graph.add_node(AnimationGraphNode {
|
||||
node_type: match serialized_node.node_type {
|
||||
SerializedAnimationNodeType::Clip(ref clip) => match clip {
|
||||
MigrationSerializedAnimationClip::Modern(path) => {
|
||||
AnimationNodeType::Clip(load_context.load(path.clone()))
|
||||
}
|
||||
MigrationSerializedAnimationClip::Legacy(
|
||||
SerializedAnimationClip::AssetPath(path),
|
||||
) => {
|
||||
if !already_warned {
|
||||
let path = load_context.asset_path();
|
||||
warn!(
|
||||
"Loaded an AnimationGraph asset at \"{path}\" which contains a \
|
||||
legacy-style SerializedAnimationClip. Please re-save the asset \
|
||||
using AnimationGraph::save to automatically migrate to the new \
|
||||
format"
|
||||
);
|
||||
already_warned = true;
|
||||
}
|
||||
SerializedAnimationClip::AssetPath(asset_path) => {
|
||||
AnimationNodeType::Clip(load_context.load(asset_path))
|
||||
}
|
||||
},
|
||||
SerializedAnimationNodeType::Blend => AnimationNodeType::Blend,
|
||||
SerializedAnimationNodeType::Add => AnimationNodeType::Add,
|
||||
AnimationNodeType::Clip(load_context.load(path.clone()))
|
||||
}
|
||||
MigrationSerializedAnimationClip::Legacy(
|
||||
SerializedAnimationClip::AssetId(_),
|
||||
) => {
|
||||
return Err(AnimationGraphLoadError::GraphContainsLegacyAssetId);
|
||||
}
|
||||
},
|
||||
mask: serialized_node.mask,
|
||||
weight: serialized_node.weight,
|
||||
SerializedAnimationNodeType::Blend => AnimationNodeType::Blend,
|
||||
SerializedAnimationNodeType::Add => AnimationNodeType::Add,
|
||||
},
|
||||
|_, _| (),
|
||||
),
|
||||
mask: serialized_node.mask,
|
||||
weight: serialized_node.weight,
|
||||
});
|
||||
}
|
||||
for edge in serialized_animation_graph.graph.raw_edges() {
|
||||
animation_graph.add_edge(edge.source(), edge.target(), ());
|
||||
}
|
||||
Ok(AnimationGraph {
|
||||
graph: animation_graph,
|
||||
root: serialized_animation_graph.root,
|
||||
mask_groups: serialized_animation_graph.mask_groups,
|
||||
})
|
||||
@ -790,37 +852,50 @@ impl AssetLoader for AnimationGraphAssetLoader {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AnimationGraph> for SerializedAnimationGraph {
|
||||
fn from(animation_graph: AnimationGraph) -> Self {
|
||||
// If any of the animation clips have paths, then serialize them as
|
||||
// `SerializedAnimationClip::AssetPath` so that the
|
||||
// `AnimationGraphAssetLoader` can load them.
|
||||
Self {
|
||||
graph: animation_graph.graph.map(
|
||||
|_, node| SerializedAnimationGraphNode {
|
||||
weight: node.weight,
|
||||
mask: node.mask,
|
||||
node_type: match node.node_type {
|
||||
AnimationNodeType::Clip(ref clip) => match clip.path() {
|
||||
Some(path) => SerializedAnimationNodeType::Clip(
|
||||
SerializedAnimationClip::AssetPath(path.clone()),
|
||||
),
|
||||
None => SerializedAnimationNodeType::Clip(
|
||||
SerializedAnimationClip::AssetId(clip.id()),
|
||||
),
|
||||
},
|
||||
AnimationNodeType::Blend => SerializedAnimationNodeType::Blend,
|
||||
AnimationNodeType::Add => SerializedAnimationNodeType::Add,
|
||||
impl TryFrom<AnimationGraph> for SerializedAnimationGraph {
|
||||
type Error = NonPathHandleError;
|
||||
|
||||
fn try_from(animation_graph: AnimationGraph) -> Result<Self, NonPathHandleError> {
|
||||
// Convert all the `Handle<AnimationClip>` to AssetPath, so that
|
||||
// `AnimationGraphAssetLoader` can load them. This is effectively just doing a
|
||||
// `DiGraph::map`, except we need to return an error if any handles aren't associated to a
|
||||
// path.
|
||||
let mut serialized_graph = DiGraph::with_capacity(
|
||||
animation_graph.graph.node_count(),
|
||||
animation_graph.graph.edge_count(),
|
||||
);
|
||||
for node in animation_graph.graph.node_weights() {
|
||||
serialized_graph.add_node(SerializedAnimationGraphNode {
|
||||
weight: node.weight,
|
||||
mask: node.mask,
|
||||
node_type: match node.node_type {
|
||||
AnimationNodeType::Clip(ref clip) => match clip.path() {
|
||||
Some(path) => SerializedAnimationNodeType::Clip(
|
||||
MigrationSerializedAnimationClip::Modern(path.clone()),
|
||||
),
|
||||
None => return Err(NonPathHandleError),
|
||||
},
|
||||
AnimationNodeType::Blend => SerializedAnimationNodeType::Blend,
|
||||
AnimationNodeType::Add => SerializedAnimationNodeType::Add,
|
||||
},
|
||||
|_, _| (),
|
||||
),
|
||||
});
|
||||
}
|
||||
for edge in animation_graph.graph.raw_edges() {
|
||||
serialized_graph.add_edge(edge.source(), edge.target(), ());
|
||||
}
|
||||
Ok(Self {
|
||||
graph: serialized_graph,
|
||||
root: animation_graph.root,
|
||||
mask_groups: animation_graph.mask_groups,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Error for when only path [`Handle`]s are supported.
|
||||
#[derive(Error, Debug)]
|
||||
#[error("AnimationGraph contains a handle to an AnimationClip that does not correspond to an asset path")]
|
||||
pub struct NonPathHandleError;
|
||||
|
||||
/// A system that creates, updates, and removes [`ThreadedAnimationGraph`]
|
||||
/// structures for every changed [`AnimationGraph`].
|
||||
///
|
||||
|
@ -1,5 +1,5 @@
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer, Handle};
|
||||
use bevy_core_pipeline::{
|
||||
core_2d::graph::{Core2d, Node2d},
|
||||
core_3d::graph::{Core3d, Node3d},
|
||||
@ -11,15 +11,16 @@ use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin},
|
||||
prelude::Camera,
|
||||
render_graph::RenderGraphApp,
|
||||
render_graph::RenderGraphExt,
|
||||
render_resource::{
|
||||
binding_types::{sampler, texture_2d, uniform_buffer},
|
||||
*,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
view::{ExtractedView, ViewTarget},
|
||||
Render, RenderApp, RenderSystems,
|
||||
Render, RenderApp, RenderStartup, RenderSystems,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
|
||||
mod node;
|
||||
|
||||
@ -113,6 +114,7 @@ impl Plugin for CasPlugin {
|
||||
};
|
||||
render_app
|
||||
.init_resource::<SpecializedRenderPipelines<CasPipeline>>()
|
||||
.add_systems(RenderStartup, init_cas_pipeline)
|
||||
.add_systems(Render, prepare_cas_pipelines.in_set(RenderSystems::Prepare));
|
||||
|
||||
{
|
||||
@ -150,13 +152,6 @@ impl Plugin for CasPlugin {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(&self, app: &mut App) {
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
};
|
||||
render_app.init_resource::<CasPipeline>();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Resource)]
|
||||
@ -167,34 +162,36 @@ pub struct CasPipeline {
|
||||
fragment_shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl FromWorld for CasPipeline {
|
||||
fn from_world(render_world: &mut World) -> Self {
|
||||
let render_device = render_world.resource::<RenderDevice>();
|
||||
let texture_bind_group = render_device.create_bind_group_layout(
|
||||
"sharpening_texture_bind_group_layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
// CAS Settings
|
||||
uniform_buffer::<CasUniform>(true),
|
||||
),
|
||||
pub fn init_cas_pipeline(
|
||||
mut commands: Commands,
|
||||
render_device: Res<RenderDevice>,
|
||||
fullscreen_shader: Res<FullscreenShader>,
|
||||
asset_server: Res<AssetServer>,
|
||||
) {
|
||||
let texture_bind_group = render_device.create_bind_group_layout(
|
||||
"sharpening_texture_bind_group_layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
// CAS Settings
|
||||
uniform_buffer::<CasUniform>(true),
|
||||
),
|
||||
);
|
||||
),
|
||||
);
|
||||
|
||||
let sampler = render_device.create_sampler(&SamplerDescriptor::default());
|
||||
let sampler = render_device.create_sampler(&SamplerDescriptor::default());
|
||||
|
||||
CasPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
fullscreen_shader: render_world.resource::<FullscreenShader>().clone(),
|
||||
fragment_shader: load_embedded_asset!(
|
||||
render_world,
|
||||
"robust_contrast_adaptive_sharpening.wgsl"
|
||||
),
|
||||
}
|
||||
}
|
||||
commands.insert_resource(CasPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
fullscreen_shader: fullscreen_shader.clone(),
|
||||
fragment_shader: load_embedded_asset!(
|
||||
asset_server.as_ref(),
|
||||
"robust_contrast_adaptive_sharpening.wgsl"
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
|
||||
@ -218,18 +215,14 @@ impl SpecializedRenderPipeline for CasPipeline {
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: key.texture_format,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: Vec::new(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer, Handle};
|
||||
use bevy_core_pipeline::{
|
||||
core_2d::graph::{Core2d, Node2d},
|
||||
core_3d::graph::{Core3d, Node3d},
|
||||
@ -11,14 +11,14 @@ use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin},
|
||||
prelude::Camera,
|
||||
render_graph::{RenderGraphApp, ViewNodeRunner},
|
||||
render_graph::{RenderGraphExt, ViewNodeRunner},
|
||||
render_resource::{
|
||||
binding_types::{sampler, texture_2d},
|
||||
*,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
view::{ExtractedView, ViewTarget},
|
||||
Render, RenderApp, RenderSystems,
|
||||
Render, RenderApp, RenderStartup, RenderSystems,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
|
||||
@ -94,6 +94,7 @@ impl Plugin for FxaaPlugin {
|
||||
};
|
||||
render_app
|
||||
.init_resource::<SpecializedRenderPipelines<FxaaPipeline>>()
|
||||
.add_systems(RenderStartup, init_fxaa_pipeline)
|
||||
.add_systems(
|
||||
Render,
|
||||
prepare_fxaa_pipelines.in_set(RenderSystems::Prepare),
|
||||
@ -117,13 +118,6 @@ impl Plugin for FxaaPlugin {
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
fn finish(&self, app: &mut App) {
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
};
|
||||
render_app.init_resource::<FxaaPipeline>();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Resource)]
|
||||
@ -134,34 +128,36 @@ pub struct FxaaPipeline {
|
||||
fragment_shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl FromWorld for FxaaPipeline {
|
||||
fn from_world(render_world: &mut World) -> Self {
|
||||
let render_device = render_world.resource::<RenderDevice>();
|
||||
let texture_bind_group = render_device.create_bind_group_layout(
|
||||
"fxaa_texture_bind_group_layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
),
|
||||
pub fn init_fxaa_pipeline(
|
||||
mut commands: Commands,
|
||||
render_device: Res<RenderDevice>,
|
||||
fullscreen_shader: Res<FullscreenShader>,
|
||||
asset_server: Res<AssetServer>,
|
||||
) {
|
||||
let texture_bind_group = render_device.create_bind_group_layout(
|
||||
"fxaa_texture_bind_group_layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
),
|
||||
);
|
||||
),
|
||||
);
|
||||
|
||||
let sampler = render_device.create_sampler(&SamplerDescriptor {
|
||||
mipmap_filter: FilterMode::Linear,
|
||||
mag_filter: FilterMode::Linear,
|
||||
min_filter: FilterMode::Linear,
|
||||
..default()
|
||||
});
|
||||
let sampler = render_device.create_sampler(&SamplerDescriptor {
|
||||
mipmap_filter: FilterMode::Linear,
|
||||
mag_filter: FilterMode::Linear,
|
||||
min_filter: FilterMode::Linear,
|
||||
..default()
|
||||
});
|
||||
|
||||
FxaaPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
fullscreen_shader: render_world.resource::<FullscreenShader>().clone(),
|
||||
fragment_shader: load_embedded_asset!(render_world, "fxaa.wgsl"),
|
||||
}
|
||||
}
|
||||
commands.insert_resource(FxaaPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
fullscreen_shader: fullscreen_shader.clone(),
|
||||
fragment_shader: load_embedded_asset!(asset_server.as_ref(), "fxaa.wgsl"),
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Component)]
|
||||
@ -190,18 +186,14 @@ impl SpecializedRenderPipeline for FxaaPipeline {
|
||||
format!("EDGE_THRESH_{}", key.edge_threshold.get_str()).into(),
|
||||
format!("EDGE_THRESH_MIN_{}", key.edge_threshold_min.get_str()).into(),
|
||||
],
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: key.texture_format,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: Vec::new(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,7 +32,7 @@
|
||||
use bevy_app::{App, Plugin};
|
||||
#[cfg(feature = "smaa_luts")]
|
||||
use bevy_asset::load_internal_binary_asset;
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, uuid_handle, AssetServer, Handle};
|
||||
#[cfg(not(feature = "smaa_luts"))]
|
||||
use bevy_core_pipeline::tonemapping::lut_placeholder;
|
||||
use bevy_core_pipeline::{
|
||||
@ -48,9 +48,9 @@ use bevy_ecs::{
|
||||
resource::Resource,
|
||||
schedule::IntoScheduleConfigs as _,
|
||||
system::{lifetimeless::Read, Commands, Query, Res, ResMut},
|
||||
world::{FromWorld, World},
|
||||
world::World,
|
||||
};
|
||||
use bevy_image::{BevyDefault, Image};
|
||||
use bevy_image::{BevyDefault, Image, ToExtents};
|
||||
use bevy_math::{vec4, Vec4};
|
||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
@ -58,34 +58,33 @@ use bevy_render::{
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin},
|
||||
render_asset::RenderAssets,
|
||||
render_graph::{
|
||||
NodeRunError, RenderGraphApp as _, RenderGraphContext, ViewNode, ViewNodeRunner,
|
||||
NodeRunError, RenderGraphContext, RenderGraphExt as _, ViewNode, ViewNodeRunner,
|
||||
},
|
||||
render_resource::{
|
||||
binding_types::{sampler, texture_2d, uniform_buffer},
|
||||
AddressMode, BindGroup, BindGroupEntries, BindGroupLayout, BindGroupLayoutEntries,
|
||||
CachedRenderPipelineId, ColorTargetState, ColorWrites, CompareFunction, DepthStencilState,
|
||||
DynamicUniformBuffer, Extent3d, FilterMode, FragmentState, LoadOp, MultisampleState,
|
||||
Operations, PipelineCache, PrimitiveState, RenderPassColorAttachment,
|
||||
RenderPassDepthStencilAttachment, RenderPassDescriptor, RenderPipeline,
|
||||
RenderPipelineDescriptor, SamplerBindingType, SamplerDescriptor, Shader, ShaderDefVal,
|
||||
ShaderStages, ShaderType, SpecializedRenderPipeline, SpecializedRenderPipelines,
|
||||
StencilFaceState, StencilOperation, StencilState, StoreOp, TextureDescriptor,
|
||||
TextureDimension, TextureFormat, TextureSampleType, TextureUsages, TextureView,
|
||||
VertexState,
|
||||
DynamicUniformBuffer, FilterMode, FragmentState, LoadOp, Operations, PipelineCache,
|
||||
RenderPassColorAttachment, RenderPassDepthStencilAttachment, RenderPassDescriptor,
|
||||
RenderPipeline, RenderPipelineDescriptor, SamplerBindingType, SamplerDescriptor, Shader,
|
||||
ShaderDefVal, ShaderStages, ShaderType, SpecializedRenderPipeline,
|
||||
SpecializedRenderPipelines, StencilFaceState, StencilOperation, StencilState, StoreOp,
|
||||
TextureDescriptor, TextureDimension, TextureFormat, TextureSampleType, TextureUsages,
|
||||
TextureView, VertexState,
|
||||
},
|
||||
renderer::{RenderContext, RenderDevice, RenderQueue},
|
||||
texture::{CachedTexture, GpuImage, TextureCache},
|
||||
view::{ExtractedView, ViewTarget},
|
||||
Render, RenderApp, RenderSystems,
|
||||
Render, RenderApp, RenderStartup, RenderSystems,
|
||||
};
|
||||
use bevy_utils::prelude::default;
|
||||
|
||||
/// The handle of the area LUT, a KTX2 format texture that SMAA uses internally.
|
||||
const SMAA_AREA_LUT_TEXTURE_HANDLE: Handle<Image> =
|
||||
weak_handle!("569c4d67-c7fa-4958-b1af-0836023603c0");
|
||||
uuid_handle!("569c4d67-c7fa-4958-b1af-0836023603c0");
|
||||
/// The handle of the search LUT, a KTX2 format texture that SMAA uses internally.
|
||||
const SMAA_SEARCH_LUT_TEXTURE_HANDLE: Handle<Image> =
|
||||
weak_handle!("43b97515-252e-4c8a-b9af-f2fc528a1c27");
|
||||
uuid_handle!("43b97515-252e-4c8a-b9af-f2fc528a1c27");
|
||||
|
||||
/// Adds support for subpixel morphological antialiasing, or SMAA.
|
||||
pub struct SmaaPlugin;
|
||||
@ -347,6 +346,7 @@ impl Plugin for SmaaPlugin {
|
||||
render_app
|
||||
.init_resource::<SmaaSpecializedRenderPipelines>()
|
||||
.init_resource::<SmaaInfoUniformBuffer>()
|
||||
.add_systems(RenderStartup, init_smaa_pipelines)
|
||||
.add_systems(
|
||||
Render,
|
||||
(
|
||||
@ -375,86 +375,79 @@ impl Plugin for SmaaPlugin {
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
fn finish(&self, app: &mut App) {
|
||||
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
|
||||
render_app.init_resource::<SmaaPipelines>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromWorld for SmaaPipelines {
|
||||
fn from_world(world: &mut World) -> Self {
|
||||
let render_device = world.resource::<RenderDevice>();
|
||||
|
||||
// Create the postprocess bind group layout (all passes, bind group 0).
|
||||
let postprocess_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA postprocess bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
uniform_buffer::<SmaaInfoUniform>(true)
|
||||
.visibility(ShaderStages::VERTEX_FRAGMENT),
|
||||
),
|
||||
pub fn init_smaa_pipelines(
|
||||
mut commands: Commands,
|
||||
render_device: Res<RenderDevice>,
|
||||
asset_server: Res<AssetServer>,
|
||||
) {
|
||||
// Create the postprocess bind group layout (all passes, bind group 0).
|
||||
let postprocess_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA postprocess bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
uniform_buffer::<SmaaInfoUniform>(true).visibility(ShaderStages::VERTEX_FRAGMENT),
|
||||
),
|
||||
);
|
||||
),
|
||||
);
|
||||
|
||||
// Create the edge detection bind group layout (pass 1, bind group 1).
|
||||
let edge_detection_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA edge detection bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(sampler(SamplerBindingType::Filtering),),
|
||||
// Create the edge detection bind group layout (pass 1, bind group 1).
|
||||
let edge_detection_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA edge detection bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(sampler(SamplerBindingType::Filtering),),
|
||||
),
|
||||
);
|
||||
|
||||
// Create the blending weight calculation bind group layout (pass 2, bind group 1).
|
||||
let blending_weight_calculation_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA blending weight calculation bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }), // edges texture
|
||||
sampler(SamplerBindingType::Filtering), // edges sampler
|
||||
texture_2d(TextureSampleType::Float { filterable: true }), // search texture
|
||||
texture_2d(TextureSampleType::Float { filterable: true }), // area texture
|
||||
),
|
||||
);
|
||||
),
|
||||
);
|
||||
|
||||
// Create the blending weight calculation bind group layout (pass 2, bind group 1).
|
||||
let blending_weight_calculation_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA blending weight calculation bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }), // edges texture
|
||||
sampler(SamplerBindingType::Filtering), // edges sampler
|
||||
texture_2d(TextureSampleType::Float { filterable: true }), // search texture
|
||||
texture_2d(TextureSampleType::Float { filterable: true }), // area texture
|
||||
),
|
||||
// Create the neighborhood blending bind group layout (pass 3, bind group 1).
|
||||
let neighborhood_blending_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA neighborhood blending bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
),
|
||||
);
|
||||
),
|
||||
);
|
||||
|
||||
// Create the neighborhood blending bind group layout (pass 3, bind group 1).
|
||||
let neighborhood_blending_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"SMAA neighborhood blending bind group layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
),
|
||||
),
|
||||
);
|
||||
let shader = load_embedded_asset!(asset_server.as_ref(), "smaa.wgsl");
|
||||
|
||||
let shader = load_embedded_asset!(world, "smaa.wgsl");
|
||||
|
||||
SmaaPipelines {
|
||||
edge_detection: SmaaEdgeDetectionPipeline {
|
||||
postprocess_bind_group_layout: postprocess_bind_group_layout.clone(),
|
||||
edge_detection_bind_group_layout,
|
||||
shader: shader.clone(),
|
||||
},
|
||||
blending_weight_calculation: SmaaBlendingWeightCalculationPipeline {
|
||||
postprocess_bind_group_layout: postprocess_bind_group_layout.clone(),
|
||||
blending_weight_calculation_bind_group_layout,
|
||||
shader: shader.clone(),
|
||||
},
|
||||
neighborhood_blending: SmaaNeighborhoodBlendingPipeline {
|
||||
postprocess_bind_group_layout,
|
||||
neighborhood_blending_bind_group_layout,
|
||||
shader,
|
||||
},
|
||||
}
|
||||
}
|
||||
commands.insert_resource(SmaaPipelines {
|
||||
edge_detection: SmaaEdgeDetectionPipeline {
|
||||
postprocess_bind_group_layout: postprocess_bind_group_layout.clone(),
|
||||
edge_detection_bind_group_layout,
|
||||
shader: shader.clone(),
|
||||
},
|
||||
blending_weight_calculation: SmaaBlendingWeightCalculationPipeline {
|
||||
postprocess_bind_group_layout: postprocess_bind_group_layout.clone(),
|
||||
blending_weight_calculation_bind_group_layout,
|
||||
shader: shader.clone(),
|
||||
},
|
||||
neighborhood_blending: SmaaNeighborhoodBlendingPipeline {
|
||||
postprocess_bind_group_layout,
|
||||
neighborhood_blending_bind_group_layout,
|
||||
shader,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Phase 1: edge detection.
|
||||
@ -483,21 +476,19 @@ impl SpecializedRenderPipeline for SmaaEdgeDetectionPipeline {
|
||||
vertex: VertexState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: shader_defs.clone(),
|
||||
entry_point: "edge_detection_vertex_main".into(),
|
||||
entry_point: Some("edge_detection_vertex_main".into()),
|
||||
buffers: vec![],
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "luma_edge_detection_fragment_main".into(),
|
||||
entry_point: Some("luma_edge_detection_fragment_main".into()),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: TextureFormat::Rg8Unorm,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
}),
|
||||
push_constant_ranges: vec![],
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: Some(DepthStencilState {
|
||||
format: TextureFormat::Stencil8,
|
||||
depth_write_enabled: false,
|
||||
@ -510,8 +501,7 @@ impl SpecializedRenderPipeline for SmaaEdgeDetectionPipeline {
|
||||
},
|
||||
bias: default(),
|
||||
}),
|
||||
multisample: MultisampleState::default(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -543,21 +533,19 @@ impl SpecializedRenderPipeline for SmaaBlendingWeightCalculationPipeline {
|
||||
vertex: VertexState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: shader_defs.clone(),
|
||||
entry_point: "blending_weight_calculation_vertex_main".into(),
|
||||
entry_point: Some("blending_weight_calculation_vertex_main".into()),
|
||||
buffers: vec![],
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "blending_weight_calculation_fragment_main".into(),
|
||||
entry_point: Some("blending_weight_calculation_fragment_main".into()),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: TextureFormat::Rgba8Unorm,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
}),
|
||||
push_constant_ranges: vec![],
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: Some(DepthStencilState {
|
||||
format: TextureFormat::Stencil8,
|
||||
depth_write_enabled: false,
|
||||
@ -570,8 +558,7 @@ impl SpecializedRenderPipeline for SmaaBlendingWeightCalculationPipeline {
|
||||
},
|
||||
bias: default(),
|
||||
}),
|
||||
multisample: MultisampleState::default(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -591,24 +578,20 @@ impl SpecializedRenderPipeline for SmaaNeighborhoodBlendingPipeline {
|
||||
vertex: VertexState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: shader_defs.clone(),
|
||||
entry_point: "neighborhood_blending_vertex_main".into(),
|
||||
entry_point: Some("neighborhood_blending_vertex_main".into()),
|
||||
buffers: vec![],
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "neighborhood_blending_fragment_main".into(),
|
||||
entry_point: Some("neighborhood_blending_fragment_main".into()),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: key.texture_format,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
}),
|
||||
push_constant_ranges: vec![],
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -704,18 +687,12 @@ fn prepare_smaa_textures(
|
||||
continue;
|
||||
};
|
||||
|
||||
let texture_size = Extent3d {
|
||||
width: texture_size.x,
|
||||
height: texture_size.y,
|
||||
depth_or_array_layers: 1,
|
||||
};
|
||||
|
||||
// Create the two-channel RG texture for phase 1 (edge detection).
|
||||
let edge_detection_color_texture = texture_cache.get(
|
||||
&render_device,
|
||||
TextureDescriptor {
|
||||
label: Some("SMAA edge detection color texture"),
|
||||
size: texture_size,
|
||||
size: texture_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: TextureDimension::D2,
|
||||
@ -730,7 +707,7 @@ fn prepare_smaa_textures(
|
||||
&render_device,
|
||||
TextureDescriptor {
|
||||
label: Some("SMAA edge detection stencil texture"),
|
||||
size: texture_size,
|
||||
size: texture_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: TextureDimension::D2,
|
||||
@ -746,7 +723,7 @@ fn prepare_smaa_textures(
|
||||
&render_device,
|
||||
TextureDescriptor {
|
||||
label: Some("SMAA blend texture"),
|
||||
size: texture_size,
|
||||
size: texture_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: TextureDimension::D2,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer, Handle};
|
||||
use bevy_core_pipeline::{
|
||||
core_3d::graph::{Core3d, Node3d},
|
||||
prelude::Camera3d,
|
||||
@ -13,31 +13,32 @@ use bevy_ecs::{
|
||||
resource::Resource,
|
||||
schedule::IntoScheduleConfigs,
|
||||
system::{Commands, Query, Res, ResMut},
|
||||
world::{FromWorld, World},
|
||||
world::World,
|
||||
};
|
||||
use bevy_image::BevyDefault as _;
|
||||
use bevy_image::{BevyDefault as _, ToExtents};
|
||||
use bevy_math::vec2;
|
||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
camera::{ExtractedCamera, MipBias, TemporalJitter},
|
||||
prelude::{Camera, Projection},
|
||||
render_graph::{NodeRunError, RenderGraphApp, RenderGraphContext, ViewNode, ViewNodeRunner},
|
||||
render_graph::{NodeRunError, RenderGraphContext, RenderGraphExt, ViewNode, ViewNodeRunner},
|
||||
render_resource::{
|
||||
binding_types::{sampler, texture_2d, texture_depth_2d},
|
||||
BindGroupEntries, BindGroupLayout, BindGroupLayoutEntries, CachedRenderPipelineId,
|
||||
ColorTargetState, ColorWrites, Extent3d, FilterMode, FragmentState, MultisampleState,
|
||||
Operations, PipelineCache, PrimitiveState, RenderPassColorAttachment, RenderPassDescriptor,
|
||||
RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, Shader,
|
||||
ShaderStages, SpecializedRenderPipeline, SpecializedRenderPipelines, TextureDescriptor,
|
||||
TextureDimension, TextureFormat, TextureSampleType, TextureUsages,
|
||||
ColorTargetState, ColorWrites, FilterMode, FragmentState, Operations, PipelineCache,
|
||||
RenderPassColorAttachment, RenderPassDescriptor, RenderPipelineDescriptor, Sampler,
|
||||
SamplerBindingType, SamplerDescriptor, Shader, ShaderStages, SpecializedRenderPipeline,
|
||||
SpecializedRenderPipelines, TextureDescriptor, TextureDimension, TextureFormat,
|
||||
TextureSampleType, TextureUsages,
|
||||
},
|
||||
renderer::{RenderContext, RenderDevice},
|
||||
sync_component::SyncComponentPlugin,
|
||||
sync_world::RenderEntity,
|
||||
texture::{CachedTexture, TextureCache},
|
||||
view::{ExtractedView, Msaa, ViewTarget},
|
||||
ExtractSchedule, MainWorld, Render, RenderApp, RenderSystems,
|
||||
ExtractSchedule, MainWorld, Render, RenderApp, RenderStartup, RenderSystems,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
use tracing::warn;
|
||||
|
||||
/// Plugin for temporal anti-aliasing.
|
||||
@ -58,6 +59,7 @@ impl Plugin for TemporalAntiAliasPlugin {
|
||||
};
|
||||
render_app
|
||||
.init_resource::<SpecializedRenderPipelines<TaaPipeline>>()
|
||||
.add_systems(RenderStartup, init_taa_pipeline)
|
||||
.add_systems(ExtractSchedule, extract_taa_settings)
|
||||
.add_systems(
|
||||
Render,
|
||||
@ -79,14 +81,6 @@ impl Plugin for TemporalAntiAliasPlugin {
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
fn finish(&self, app: &mut App) {
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
};
|
||||
|
||||
render_app.init_resource::<TaaPipeline>();
|
||||
}
|
||||
}
|
||||
|
||||
/// Component to apply temporal anti-aliasing to a 3D perspective camera.
|
||||
@ -242,52 +236,53 @@ struct TaaPipeline {
|
||||
fragment_shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl FromWorld for TaaPipeline {
|
||||
fn from_world(world: &mut World) -> Self {
|
||||
let render_device = world.resource::<RenderDevice>();
|
||||
fn init_taa_pipeline(
|
||||
mut commands: Commands,
|
||||
render_device: Res<RenderDevice>,
|
||||
fullscreen_shader: Res<FullscreenShader>,
|
||||
asset_server: Res<AssetServer>,
|
||||
) {
|
||||
let nearest_sampler = render_device.create_sampler(&SamplerDescriptor {
|
||||
label: Some("taa_nearest_sampler"),
|
||||
mag_filter: FilterMode::Nearest,
|
||||
min_filter: FilterMode::Nearest,
|
||||
..SamplerDescriptor::default()
|
||||
});
|
||||
let linear_sampler = render_device.create_sampler(&SamplerDescriptor {
|
||||
label: Some("taa_linear_sampler"),
|
||||
mag_filter: FilterMode::Linear,
|
||||
min_filter: FilterMode::Linear,
|
||||
..SamplerDescriptor::default()
|
||||
});
|
||||
|
||||
let nearest_sampler = render_device.create_sampler(&SamplerDescriptor {
|
||||
label: Some("taa_nearest_sampler"),
|
||||
mag_filter: FilterMode::Nearest,
|
||||
min_filter: FilterMode::Nearest,
|
||||
..SamplerDescriptor::default()
|
||||
});
|
||||
let linear_sampler = render_device.create_sampler(&SamplerDescriptor {
|
||||
label: Some("taa_linear_sampler"),
|
||||
mag_filter: FilterMode::Linear,
|
||||
min_filter: FilterMode::Linear,
|
||||
..SamplerDescriptor::default()
|
||||
});
|
||||
|
||||
let taa_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"taa_bind_group_layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
// View target (read)
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
// TAA History (read)
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
// Motion Vectors
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
// Depth
|
||||
texture_depth_2d(),
|
||||
// Nearest sampler
|
||||
sampler(SamplerBindingType::NonFiltering),
|
||||
// Linear sampler
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
),
|
||||
let taa_bind_group_layout = render_device.create_bind_group_layout(
|
||||
"taa_bind_group_layout",
|
||||
&BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
// View target (read)
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
// TAA History (read)
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
// Motion Vectors
|
||||
texture_2d(TextureSampleType::Float { filterable: true }),
|
||||
// Depth
|
||||
texture_depth_2d(),
|
||||
// Nearest sampler
|
||||
sampler(SamplerBindingType::NonFiltering),
|
||||
// Linear sampler
|
||||
sampler(SamplerBindingType::Filtering),
|
||||
),
|
||||
);
|
||||
),
|
||||
);
|
||||
|
||||
TaaPipeline {
|
||||
taa_bind_group_layout,
|
||||
nearest_sampler,
|
||||
linear_sampler,
|
||||
fullscreen_shader: world.resource::<FullscreenShader>().clone(),
|
||||
fragment_shader: load_embedded_asset!(world, "taa.wgsl"),
|
||||
}
|
||||
}
|
||||
commands.insert_resource(TaaPipeline {
|
||||
taa_bind_group_layout,
|
||||
nearest_sampler,
|
||||
linear_sampler,
|
||||
fullscreen_shader: fullscreen_shader.clone(),
|
||||
fragment_shader: load_embedded_asset!(asset_server.as_ref(), "taa.wgsl"),
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone)]
|
||||
@ -320,7 +315,6 @@ impl SpecializedRenderPipeline for TaaPipeline {
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "taa".into(),
|
||||
targets: vec![
|
||||
Some(ColorTargetState {
|
||||
format,
|
||||
@ -333,12 +327,9 @@ impl SpecializedRenderPipeline for TaaPipeline {
|
||||
write_mask: ColorWrites::ALL,
|
||||
}),
|
||||
],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: Vec::new(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -420,11 +411,7 @@ fn prepare_taa_history_textures(
|
||||
if let Some(physical_target_size) = camera.physical_target_size {
|
||||
let mut texture_descriptor = TextureDescriptor {
|
||||
label: None,
|
||||
size: Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
},
|
||||
size: physical_target_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: TextureDimension::D2,
|
||||
|
@ -160,7 +160,7 @@ impl TaskPoolOptions {
|
||||
pub fn create_default_pools(&self) {
|
||||
let total_threads = bevy_tasks::available_parallelism()
|
||||
.clamp(self.min_total_threads, self.max_total_threads);
|
||||
trace!("Assigning {} cores to default task pools", total_threads);
|
||||
trace!("Assigning {total_threads} cores to default task pools");
|
||||
|
||||
let mut remaining_threads = total_threads;
|
||||
|
||||
@ -170,7 +170,7 @@ impl TaskPoolOptions {
|
||||
.io
|
||||
.get_number_of_threads(remaining_threads, total_threads);
|
||||
|
||||
trace!("IO Threads: {}", io_threads);
|
||||
trace!("IO Threads: {io_threads}");
|
||||
remaining_threads = remaining_threads.saturating_sub(io_threads);
|
||||
|
||||
IoTaskPool::get_or_init(|| {
|
||||
@ -200,7 +200,7 @@ impl TaskPoolOptions {
|
||||
.async_compute
|
||||
.get_number_of_threads(remaining_threads, total_threads);
|
||||
|
||||
trace!("Async Compute Threads: {}", async_compute_threads);
|
||||
trace!("Async Compute Threads: {async_compute_threads}");
|
||||
remaining_threads = remaining_threads.saturating_sub(async_compute_threads);
|
||||
|
||||
AsyncComputeTaskPool::get_or_init(|| {
|
||||
@ -231,7 +231,7 @@ impl TaskPoolOptions {
|
||||
.compute
|
||||
.get_number_of_threads(remaining_threads, total_threads);
|
||||
|
||||
trace!("Compute Threads: {}", compute_threads);
|
||||
trace!("Compute Threads: {compute_threads}");
|
||||
|
||||
ComputeTaskPool::get_or_init(|| {
|
||||
let builder = TaskPoolBuilder::default()
|
||||
|
@ -7,10 +7,12 @@ use bevy_reflect::{std_traits::ReflectDefault, Reflect, TypePath};
|
||||
use core::{
|
||||
any::TypeId,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
};
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use disqualified::ShortName;
|
||||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Provides [`Handle`] and [`UntypedHandle`] _for a specific asset type_.
|
||||
/// This should _only_ be used for one specific asset type.
|
||||
@ -117,7 +119,7 @@ impl core::fmt::Debug for StrongHandle {
|
||||
/// avoiding the need to store multiple copies of the same data.
|
||||
///
|
||||
/// If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept
|
||||
/// alive until the [`Handle`] is dropped. If a [`Handle`] is [`Handle::Weak`], it does not necessarily reference a live [`Asset`],
|
||||
/// alive until the [`Handle`] is dropped. If a [`Handle`] is [`Handle::Uuid`], it does not necessarily reference a live [`Asset`],
|
||||
/// nor will it keep assets alive.
|
||||
///
|
||||
/// Modifying a *handle* will change which existing asset is referenced, but modifying the *asset*
|
||||
@ -133,16 +135,16 @@ pub enum Handle<A: Asset> {
|
||||
/// A "strong" reference to a live (or loading) [`Asset`]. If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept
|
||||
/// alive until the [`Handle`] is dropped. Strong handles also provide access to additional asset metadata.
|
||||
Strong(Arc<StrongHandle>),
|
||||
/// A "weak" reference to an [`Asset`]. If a [`Handle`] is [`Handle::Weak`], it does not necessarily reference a live [`Asset`],
|
||||
/// nor will it keep assets alive.
|
||||
Weak(AssetId<A>),
|
||||
/// A reference to an [`Asset`] using a stable-across-runs / const identifier. Dropping this
|
||||
/// handle will not result in the asset being dropped.
|
||||
Uuid(Uuid, #[reflect(ignore, clone)] PhantomData<fn() -> A>),
|
||||
}
|
||||
|
||||
impl<T: Asset> Clone for Handle<T> {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
Handle::Strong(handle) => Handle::Strong(handle.clone()),
|
||||
Handle::Weak(id) => Handle::Weak(*id),
|
||||
Handle::Uuid(uuid, ..) => Handle::Uuid(*uuid, PhantomData),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -153,7 +155,7 @@ impl<A: Asset> Handle<A> {
|
||||
pub fn id(&self) -> AssetId<A> {
|
||||
match self {
|
||||
Handle::Strong(handle) => handle.id.typed_unchecked(),
|
||||
Handle::Weak(id) => *id,
|
||||
Handle::Uuid(uuid, ..) => AssetId::Uuid { uuid: *uuid },
|
||||
}
|
||||
}
|
||||
|
||||
@ -162,14 +164,14 @@ impl<A: Asset> Handle<A> {
|
||||
pub fn path(&self) -> Option<&AssetPath<'static>> {
|
||||
match self {
|
||||
Handle::Strong(handle) => handle.path.as_ref(),
|
||||
Handle::Weak(_) => None,
|
||||
Handle::Uuid(..) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if this is a weak handle.
|
||||
/// Returns `true` if this is a uuid handle.
|
||||
#[inline]
|
||||
pub fn is_weak(&self) -> bool {
|
||||
matches!(self, Handle::Weak(_))
|
||||
pub fn is_uuid(&self) -> bool {
|
||||
matches!(self, Handle::Uuid(..))
|
||||
}
|
||||
|
||||
/// Returns `true` if this is a strong handle.
|
||||
@ -178,18 +180,9 @@ impl<A: Asset> Handle<A> {
|
||||
matches!(self, Handle::Strong(_))
|
||||
}
|
||||
|
||||
/// Creates a [`Handle::Weak`] clone of this [`Handle`], which will not keep the referenced [`Asset`] alive.
|
||||
#[inline]
|
||||
pub fn clone_weak(&self) -> Self {
|
||||
match self {
|
||||
Handle::Strong(handle) => Handle::Weak(handle.id.typed_unchecked::<A>()),
|
||||
Handle::Weak(id) => Handle::Weak(*id),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts this [`Handle`] to an "untyped" / "generic-less" [`UntypedHandle`], which stores the [`Asset`] type information
|
||||
/// _inside_ [`UntypedHandle`]. This will return [`UntypedHandle::Strong`] for [`Handle::Strong`] and [`UntypedHandle::Weak`] for
|
||||
/// [`Handle::Weak`].
|
||||
/// _inside_ [`UntypedHandle`]. This will return [`UntypedHandle::Strong`] for [`Handle::Strong`] and [`UntypedHandle::Uuid`] for
|
||||
/// [`Handle::Uuid`].
|
||||
#[inline]
|
||||
pub fn untyped(self) -> UntypedHandle {
|
||||
self.into()
|
||||
@ -198,7 +191,7 @@ impl<A: Asset> Handle<A> {
|
||||
|
||||
impl<A: Asset> Default for Handle<A> {
|
||||
fn default() -> Self {
|
||||
Handle::Weak(AssetId::default())
|
||||
Handle::Uuid(AssetId::<A>::DEFAULT_UUID, PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
@ -214,7 +207,7 @@ impl<A: Asset> core::fmt::Debug for Handle<A> {
|
||||
handle.path
|
||||
)
|
||||
}
|
||||
Handle::Weak(id) => write!(f, "WeakHandle<{name}>({:?})", id.internal()),
|
||||
Handle::Uuid(uuid, ..) => write!(f, "UuidHandle<{name}>({uuid:?})"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -284,8 +277,13 @@ impl<A: Asset> From<&mut Handle<A>> for UntypedAssetId {
|
||||
pub enum UntypedHandle {
|
||||
/// A strong handle, which will keep the referenced [`Asset`] alive until all strong handles are dropped.
|
||||
Strong(Arc<StrongHandle>),
|
||||
/// A weak handle, which does not keep the referenced [`Asset`] alive.
|
||||
Weak(UntypedAssetId),
|
||||
/// A UUID handle, which does not keep the referenced [`Asset`] alive.
|
||||
Uuid {
|
||||
/// An identifier that records the underlying asset type.
|
||||
type_id: TypeId,
|
||||
/// The UUID provided during asset registration.
|
||||
uuid: Uuid,
|
||||
},
|
||||
}
|
||||
|
||||
impl UntypedHandle {
|
||||
@ -294,7 +292,10 @@ impl UntypedHandle {
|
||||
pub fn id(&self) -> UntypedAssetId {
|
||||
match self {
|
||||
UntypedHandle::Strong(handle) => handle.id,
|
||||
UntypedHandle::Weak(id) => *id,
|
||||
UntypedHandle::Uuid { type_id, uuid } => UntypedAssetId::Uuid {
|
||||
uuid: *uuid,
|
||||
type_id: *type_id,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -303,16 +304,7 @@ impl UntypedHandle {
|
||||
pub fn path(&self) -> Option<&AssetPath<'static>> {
|
||||
match self {
|
||||
UntypedHandle::Strong(handle) => handle.path.as_ref(),
|
||||
UntypedHandle::Weak(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an [`UntypedHandle::Weak`] clone of this [`UntypedHandle`], which will not keep the referenced [`Asset`] alive.
|
||||
#[inline]
|
||||
pub fn clone_weak(&self) -> UntypedHandle {
|
||||
match self {
|
||||
UntypedHandle::Strong(handle) => UntypedHandle::Weak(handle.id),
|
||||
UntypedHandle::Weak(id) => UntypedHandle::Weak(*id),
|
||||
UntypedHandle::Uuid { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -321,7 +313,7 @@ impl UntypedHandle {
|
||||
pub fn type_id(&self) -> TypeId {
|
||||
match self {
|
||||
UntypedHandle::Strong(handle) => handle.id.type_id(),
|
||||
UntypedHandle::Weak(id) => id.type_id(),
|
||||
UntypedHandle::Uuid { type_id, .. } => *type_id,
|
||||
}
|
||||
}
|
||||
|
||||
@ -330,7 +322,7 @@ impl UntypedHandle {
|
||||
pub fn typed_unchecked<A: Asset>(self) -> Handle<A> {
|
||||
match self {
|
||||
UntypedHandle::Strong(handle) => Handle::Strong(handle),
|
||||
UntypedHandle::Weak(id) => Handle::Weak(id.typed_unchecked::<A>()),
|
||||
UntypedHandle::Uuid { uuid, .. } => Handle::Uuid(uuid, PhantomData),
|
||||
}
|
||||
}
|
||||
|
||||
@ -345,10 +337,7 @@ impl UntypedHandle {
|
||||
TypeId::of::<A>(),
|
||||
"The target Handle<A>'s TypeId does not match the TypeId of this UntypedHandle"
|
||||
);
|
||||
match self {
|
||||
UntypedHandle::Strong(handle) => Handle::Strong(handle),
|
||||
UntypedHandle::Weak(id) => Handle::Weak(id.typed_unchecked::<A>()),
|
||||
}
|
||||
self.typed_unchecked()
|
||||
}
|
||||
|
||||
/// Converts to a typed Handle. This will panic if the internal [`TypeId`] does not match the given asset type `A`
|
||||
@ -376,7 +365,7 @@ impl UntypedHandle {
|
||||
pub fn meta_transform(&self) -> Option<&MetaTransform> {
|
||||
match self {
|
||||
UntypedHandle::Strong(handle) => handle.meta_transform.as_ref(),
|
||||
UntypedHandle::Weak(_) => None,
|
||||
UntypedHandle::Uuid { .. } => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -409,12 +398,9 @@ impl core::fmt::Debug for UntypedHandle {
|
||||
handle.path
|
||||
)
|
||||
}
|
||||
UntypedHandle::Weak(id) => write!(
|
||||
f,
|
||||
"WeakHandle{{ type_id: {:?}, id: {:?} }}",
|
||||
id.type_id(),
|
||||
id.internal()
|
||||
),
|
||||
UntypedHandle::Uuid { type_id, uuid } => {
|
||||
write!(f, "UuidHandle{{ type_id: {type_id:?}, uuid: {uuid:?} }}",)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -474,7 +460,10 @@ impl<A: Asset> From<Handle<A>> for UntypedHandle {
|
||||
fn from(value: Handle<A>) -> Self {
|
||||
match value {
|
||||
Handle::Strong(handle) => UntypedHandle::Strong(handle),
|
||||
Handle::Weak(id) => UntypedHandle::Weak(id.into()),
|
||||
Handle::Uuid(uuid, _) => UntypedHandle::Uuid {
|
||||
type_id: TypeId::of::<A>(),
|
||||
uuid,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -490,36 +479,37 @@ impl<A: Asset> TryFrom<UntypedHandle> for Handle<A> {
|
||||
return Err(UntypedAssetConversionError::TypeIdMismatch { expected, found });
|
||||
}
|
||||
|
||||
match value {
|
||||
UntypedHandle::Strong(handle) => Ok(Handle::Strong(handle)),
|
||||
UntypedHandle::Weak(id) => {
|
||||
let Ok(id) = id.try_into() else {
|
||||
return Err(UntypedAssetConversionError::TypeIdMismatch { expected, found });
|
||||
};
|
||||
Ok(Handle::Weak(id))
|
||||
}
|
||||
}
|
||||
Ok(match value {
|
||||
UntypedHandle::Strong(handle) => Handle::Strong(handle),
|
||||
UntypedHandle::Uuid { uuid, .. } => Handle::Uuid(uuid, PhantomData),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a weak [`Handle`] from a string literal containing a UUID.
|
||||
/// Creates a [`Handle`] from a string literal containing a UUID.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # use bevy_asset::{Handle, weak_handle};
|
||||
/// # use bevy_asset::{Handle, uuid_handle};
|
||||
/// # type Shader = ();
|
||||
/// const SHADER: Handle<Shader> = weak_handle!("1347c9b7-c46a-48e7-b7b8-023a354b7cac");
|
||||
/// const SHADER: Handle<Shader> = uuid_handle!("1347c9b7-c46a-48e7-b7b8-023a354b7cac");
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! weak_handle {
|
||||
macro_rules! uuid_handle {
|
||||
($uuid:expr) => {{
|
||||
$crate::Handle::Weak($crate::AssetId::Uuid {
|
||||
uuid: $crate::uuid::uuid!($uuid),
|
||||
})
|
||||
$crate::Handle::Uuid($crate::uuid::uuid!($uuid), core::marker::PhantomData)
|
||||
}};
|
||||
}
|
||||
|
||||
#[deprecated = "Use uuid_handle! instead"]
|
||||
#[macro_export]
|
||||
macro_rules! weak_handle {
|
||||
($uuid:expr) => {
|
||||
uuid_handle!($uuid)
|
||||
};
|
||||
}
|
||||
|
||||
/// Errors preventing the conversion of to/from an [`UntypedHandle`] and a [`Handle`].
|
||||
#[derive(Error, Debug, PartialEq, Clone)]
|
||||
#[non_exhaustive]
|
||||
@ -559,15 +549,12 @@ mod tests {
|
||||
/// Typed and Untyped `Handles` should be equivalent to each other and themselves
|
||||
#[test]
|
||||
fn equality() {
|
||||
let typed = AssetId::<TestAsset>::Uuid { uuid: UUID_1 };
|
||||
let untyped = UntypedAssetId::Uuid {
|
||||
let typed = Handle::<TestAsset>::Uuid(UUID_1, PhantomData);
|
||||
let untyped = UntypedHandle::Uuid {
|
||||
type_id: TypeId::of::<TestAsset>(),
|
||||
uuid: UUID_1,
|
||||
};
|
||||
|
||||
let typed = Handle::Weak(typed);
|
||||
let untyped = UntypedHandle::Weak(untyped);
|
||||
|
||||
assert_eq!(
|
||||
Ok(typed.clone()),
|
||||
Handle::<TestAsset>::try_from(untyped.clone())
|
||||
@ -585,22 +572,17 @@ mod tests {
|
||||
fn ordering() {
|
||||
assert!(UUID_1 < UUID_2);
|
||||
|
||||
let typed_1 = AssetId::<TestAsset>::Uuid { uuid: UUID_1 };
|
||||
let typed_2 = AssetId::<TestAsset>::Uuid { uuid: UUID_2 };
|
||||
let untyped_1 = UntypedAssetId::Uuid {
|
||||
let typed_1 = Handle::<TestAsset>::Uuid(UUID_1, PhantomData);
|
||||
let typed_2 = Handle::<TestAsset>::Uuid(UUID_2, PhantomData);
|
||||
let untyped_1 = UntypedHandle::Uuid {
|
||||
type_id: TypeId::of::<TestAsset>(),
|
||||
uuid: UUID_1,
|
||||
};
|
||||
let untyped_2 = UntypedAssetId::Uuid {
|
||||
let untyped_2 = UntypedHandle::Uuid {
|
||||
type_id: TypeId::of::<TestAsset>(),
|
||||
uuid: UUID_2,
|
||||
};
|
||||
|
||||
let typed_1 = Handle::Weak(typed_1);
|
||||
let typed_2 = Handle::Weak(typed_2);
|
||||
let untyped_1 = UntypedHandle::Weak(untyped_1);
|
||||
let untyped_2 = UntypedHandle::Weak(untyped_2);
|
||||
|
||||
assert!(typed_1 < typed_2);
|
||||
assert!(untyped_1 < untyped_2);
|
||||
|
||||
@ -617,15 +599,12 @@ mod tests {
|
||||
/// Typed and Untyped `Handles` should be equivalently hashable to each other and themselves
|
||||
#[test]
|
||||
fn hashing() {
|
||||
let typed = AssetId::<TestAsset>::Uuid { uuid: UUID_1 };
|
||||
let untyped = UntypedAssetId::Uuid {
|
||||
let typed = Handle::<TestAsset>::Uuid(UUID_1, PhantomData);
|
||||
let untyped = UntypedHandle::Uuid {
|
||||
type_id: TypeId::of::<TestAsset>(),
|
||||
uuid: UUID_1,
|
||||
};
|
||||
|
||||
let typed = Handle::Weak(typed);
|
||||
let untyped = UntypedHandle::Weak(untyped);
|
||||
|
||||
assert_eq!(
|
||||
hash(&typed),
|
||||
hash(&Handle::<TestAsset>::try_from(untyped.clone()).unwrap())
|
||||
@ -637,15 +616,12 @@ mod tests {
|
||||
/// Typed and Untyped `Handles` should be interchangeable
|
||||
#[test]
|
||||
fn conversion() {
|
||||
let typed = AssetId::<TestAsset>::Uuid { uuid: UUID_1 };
|
||||
let untyped = UntypedAssetId::Uuid {
|
||||
let typed = Handle::<TestAsset>::Uuid(UUID_1, PhantomData);
|
||||
let untyped = UntypedHandle::Uuid {
|
||||
type_id: TypeId::of::<TestAsset>(),
|
||||
uuid: UUID_1,
|
||||
};
|
||||
|
||||
let typed = Handle::Weak(typed);
|
||||
let untyped = UntypedHandle::Weak(untyped);
|
||||
|
||||
assert_eq!(typed, Handle::try_from(untyped.clone()).unwrap());
|
||||
assert_eq!(UntypedHandle::from(typed.clone()), untyped);
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ fn js_value_to_err(context: &str) -> impl FnOnce(JsValue) -> std::io::Error + '_
|
||||
}
|
||||
};
|
||||
|
||||
std::io::Error::new(std::io::ErrorKind::Other, message)
|
||||
std::io::Error::other(message)
|
||||
}
|
||||
}
|
||||
|
||||
@ -62,10 +62,7 @@ impl HttpWasmAssetReader {
|
||||
let worker: web_sys::WorkerGlobalScope = global.unchecked_into();
|
||||
worker.fetch_with_str(path.to_str().unwrap())
|
||||
} else {
|
||||
let error = std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Unsupported JavaScript global context",
|
||||
);
|
||||
let error = std::io::Error::other("Unsupported JavaScript global context");
|
||||
return Err(AssetReaderError::Io(error.into()));
|
||||
};
|
||||
let resp_value = JsFuture::from(promise)
|
||||
|
@ -2000,4 +2000,92 @@ mod tests {
|
||||
|
||||
app.world_mut().run_schedule(Update);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "blocked on https://github.com/bevyengine/bevy/issues/11111"]
|
||||
fn same_asset_different_settings() {
|
||||
// Test loading the same asset twice with different settings. This should
|
||||
// produce two distinct assets.
|
||||
|
||||
// First, implement an asset that's a single u8, whose value is copied from
|
||||
// the loader settings.
|
||||
|
||||
#[derive(Asset, TypePath)]
|
||||
struct U8Asset(u8);
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
struct U8LoaderSettings(u8);
|
||||
|
||||
struct U8Loader;
|
||||
|
||||
impl AssetLoader for U8Loader {
|
||||
type Asset = U8Asset;
|
||||
type Settings = U8LoaderSettings;
|
||||
type Error = crate::loader::LoadDirectError;
|
||||
|
||||
async fn load(
|
||||
&self,
|
||||
_: &mut dyn Reader,
|
||||
settings: &Self::Settings,
|
||||
_: &mut LoadContext<'_>,
|
||||
) -> Result<Self::Asset, Self::Error> {
|
||||
Ok(U8Asset(settings.0))
|
||||
}
|
||||
|
||||
fn extensions(&self) -> &[&str] {
|
||||
&["u8"]
|
||||
}
|
||||
}
|
||||
|
||||
// Create a test asset.
|
||||
|
||||
let dir = Dir::default();
|
||||
dir.insert_asset(Path::new("test.u8"), &[]);
|
||||
|
||||
let asset_source = AssetSource::build()
|
||||
.with_reader(move || Box::new(MemoryAssetReader { root: dir.clone() }));
|
||||
|
||||
// Set up the app.
|
||||
|
||||
let mut app = App::new();
|
||||
|
||||
app.register_asset_source(AssetSourceId::Default, asset_source)
|
||||
.add_plugins((TaskPoolPlugin::default(), AssetPlugin::default()))
|
||||
.init_asset::<U8Asset>()
|
||||
.register_asset_loader(U8Loader);
|
||||
|
||||
let asset_server = app.world().resource::<AssetServer>();
|
||||
|
||||
// Load the test asset twice but with different settings.
|
||||
|
||||
fn load(asset_server: &AssetServer, path: &str, value: u8) -> Handle<U8Asset> {
|
||||
asset_server.load_with_settings::<U8Asset, U8LoaderSettings>(
|
||||
path,
|
||||
move |s: &mut U8LoaderSettings| s.0 = value,
|
||||
)
|
||||
}
|
||||
|
||||
let handle_1 = load(asset_server, "test.u8", 1);
|
||||
let handle_2 = load(asset_server, "test.u8", 2);
|
||||
|
||||
// Handles should be different.
|
||||
|
||||
assert_ne!(handle_1, handle_2);
|
||||
|
||||
run_app_until(&mut app, |world| {
|
||||
let (Some(asset_1), Some(asset_2)) = (
|
||||
world.resource::<Assets<U8Asset>>().get(&handle_1),
|
||||
world.resource::<Assets<U8Asset>>().get(&handle_2),
|
||||
) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
// Values should match the settings.
|
||||
|
||||
assert_eq!(asset_1.0, 1);
|
||||
assert_eq!(asset_2.0, 2);
|
||||
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -553,7 +553,9 @@ impl AssetServer {
|
||||
path: impl Into<AssetPath<'a>>,
|
||||
) -> Result<UntypedHandle, AssetLoadError> {
|
||||
let path: AssetPath = path.into();
|
||||
self.load_internal(None, path, false, None).await
|
||||
self.load_internal(None, path, false, None)
|
||||
.await
|
||||
.map(|h| h.expect("handle must be returned, since we didn't pass in an input handle"))
|
||||
}
|
||||
|
||||
pub(crate) fn load_unknown_type_with_meta_transform<'a>(
|
||||
@ -643,21 +645,25 @@ impl AssetServer {
|
||||
|
||||
/// Performs an async asset load.
|
||||
///
|
||||
/// `input_handle` must only be [`Some`] if `should_load` was true when retrieving `input_handle`. This is an optimization to
|
||||
/// avoid looking up `should_load` twice, but it means you _must_ be sure a load is necessary when calling this function with [`Some`].
|
||||
/// `input_handle` must only be [`Some`] if `should_load` was true when retrieving
|
||||
/// `input_handle`. This is an optimization to avoid looking up `should_load` twice, but it
|
||||
/// means you _must_ be sure a load is necessary when calling this function with [`Some`].
|
||||
///
|
||||
/// Returns the handle of the asset if one was retrieved by this function. Otherwise, may return
|
||||
/// [`None`].
|
||||
async fn load_internal<'a>(
|
||||
&self,
|
||||
mut input_handle: Option<UntypedHandle>,
|
||||
input_handle: Option<UntypedHandle>,
|
||||
path: AssetPath<'a>,
|
||||
force: bool,
|
||||
meta_transform: Option<MetaTransform>,
|
||||
) -> Result<UntypedHandle, AssetLoadError> {
|
||||
let asset_type_id = input_handle.as_ref().map(UntypedHandle::type_id);
|
||||
) -> Result<Option<UntypedHandle>, AssetLoadError> {
|
||||
let input_handle_type_id = input_handle.as_ref().map(UntypedHandle::type_id);
|
||||
|
||||
let path = path.into_owned();
|
||||
let path_clone = path.clone();
|
||||
let (mut meta, loader, mut reader) = self
|
||||
.get_meta_loader_and_reader(&path_clone, asset_type_id)
|
||||
.get_meta_loader_and_reader(&path_clone, input_handle_type_id)
|
||||
.await
|
||||
.inspect_err(|e| {
|
||||
// if there was an input handle, a "load" operation has already started, so we must produce a "failure" event, if
|
||||
@ -674,76 +680,90 @@ impl AssetServer {
|
||||
if let Some(meta_transform) = input_handle.as_ref().and_then(|h| h.meta_transform()) {
|
||||
(*meta_transform)(&mut *meta);
|
||||
}
|
||||
// downgrade the input handle so we don't keep the asset alive just because we're loading it
|
||||
// note we can't just pass a weak handle in, as only strong handles contain the asset meta transform
|
||||
input_handle = input_handle.map(|h| h.clone_weak());
|
||||
|
||||
// This contains Some(UntypedHandle), if it was retrievable
|
||||
// If it is None, that is because it was _not_ retrievable, due to
|
||||
// 1. The handle was not already passed in for this path, meaning we can't just use that
|
||||
// 2. The asset has not been loaded yet, meaning there is no existing Handle for it
|
||||
// 3. The path has a label, meaning the AssetLoader's root asset type is not the path's asset type
|
||||
//
|
||||
// In the None case, the only course of action is to wait for the asset to load so we can allocate the
|
||||
// handle for that type.
|
||||
//
|
||||
// TODO: Note that in the None case, multiple asset loads for the same path can happen at the same time
|
||||
// (rather than "early out-ing" in the "normal" case)
|
||||
// This would be resolved by a universal asset id, as we would not need to resolve the asset type
|
||||
// to generate the ID. See this issue: https://github.com/bevyengine/bevy/issues/10549
|
||||
let handle_result = match input_handle {
|
||||
Some(handle) => {
|
||||
// if a handle was passed in, the "should load" check was already done
|
||||
Some((handle, true))
|
||||
}
|
||||
None => {
|
||||
let mut infos = self.data.infos.write();
|
||||
let result = infos.get_or_create_path_handle_internal(
|
||||
path.clone(),
|
||||
path.label().is_none().then(|| loader.asset_type_id()),
|
||||
HandleLoadingMode::Request,
|
||||
meta_transform,
|
||||
);
|
||||
unwrap_with_context(result, Either::Left(loader.asset_type_name()))
|
||||
}
|
||||
};
|
||||
let asset_id; // The asset ID of the asset we are trying to load.
|
||||
let fetched_handle; // The handle if one was looked up/created.
|
||||
let should_load; // Whether we need to load the asset.
|
||||
if let Some(input_handle) = input_handle {
|
||||
asset_id = Some(input_handle.id());
|
||||
// In this case, we intentionally drop the input handle so we can cancel loading the
|
||||
// asset if the handle gets dropped (externally) before it finishes loading.
|
||||
fetched_handle = None;
|
||||
// The handle was passed in, so the "should_load" check was already done.
|
||||
should_load = true;
|
||||
} else {
|
||||
// TODO: multiple asset loads for the same path can happen at the same time (rather than
|
||||
// "early out-ing" in the "normal" case). This would be resolved by a universal asset
|
||||
// id, as we would not need to resolve the asset type to generate the ID. See this
|
||||
// issue: https://github.com/bevyengine/bevy/issues/10549
|
||||
|
||||
let handle = if let Some((handle, should_load)) = handle_result {
|
||||
if path.label().is_none() && handle.type_id() != loader.asset_type_id() {
|
||||
let mut infos = self.data.infos.write();
|
||||
let result = infos.get_or_create_path_handle_internal(
|
||||
path.clone(),
|
||||
path.label().is_none().then(|| loader.asset_type_id()),
|
||||
HandleLoadingMode::Request,
|
||||
meta_transform,
|
||||
);
|
||||
match unwrap_with_context(result, Either::Left(loader.asset_type_name())) {
|
||||
// We couldn't figure out the correct handle without its type ID (which can only
|
||||
// happen if we are loading a subasset).
|
||||
None => {
|
||||
// We don't know the expected type since the subasset may have a different type
|
||||
// than the "root" asset (which is the type the loader will load).
|
||||
asset_id = None;
|
||||
fetched_handle = None;
|
||||
// If we couldn't find an appropriate handle, then the asset certainly needs to
|
||||
// be loaded.
|
||||
should_load = true;
|
||||
}
|
||||
Some((handle, result_should_load)) => {
|
||||
asset_id = Some(handle.id());
|
||||
fetched_handle = Some(handle);
|
||||
should_load = result_should_load;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Verify that the expected type matches the loader's type.
|
||||
if let Some(asset_type_id) = asset_id.map(|id| id.type_id()) {
|
||||
// If we are loading a subasset, then the subasset's type almost certainly doesn't match
|
||||
// the loader's type - and that's ok.
|
||||
if path.label().is_none() && asset_type_id != loader.asset_type_id() {
|
||||
error!(
|
||||
"Expected {:?}, got {:?}",
|
||||
handle.type_id(),
|
||||
asset_type_id,
|
||||
loader.asset_type_id()
|
||||
);
|
||||
return Err(AssetLoadError::RequestedHandleTypeMismatch {
|
||||
path: path.into_owned(),
|
||||
requested: handle.type_id(),
|
||||
requested: asset_type_id,
|
||||
actual_asset_name: loader.asset_type_name(),
|
||||
loader_name: loader.type_name(),
|
||||
});
|
||||
}
|
||||
if !should_load && !force {
|
||||
return Ok(handle);
|
||||
}
|
||||
Some(handle)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// if the handle result is None, we definitely need to load the asset
|
||||
}
|
||||
// Bail out earlier if we don't need to load the asset.
|
||||
if !should_load && !force {
|
||||
return Ok(fetched_handle);
|
||||
}
|
||||
|
||||
let (base_handle, base_path) = if path.label().is_some() {
|
||||
// We don't actually need to use _base_handle, but we do need to keep the handle alive.
|
||||
// Dropping it would cancel the load of the base asset, which would make the load of this
|
||||
// subasset never complete.
|
||||
let (base_asset_id, _base_handle, base_path) = if path.label().is_some() {
|
||||
let mut infos = self.data.infos.write();
|
||||
let base_path = path.without_label().into_owned();
|
||||
let (base_handle, _) = infos.get_or_create_path_handle_erased(
|
||||
base_path.clone(),
|
||||
loader.asset_type_id(),
|
||||
Some(loader.asset_type_name()),
|
||||
HandleLoadingMode::Force,
|
||||
None,
|
||||
);
|
||||
(base_handle, base_path)
|
||||
let base_handle = infos
|
||||
.get_or_create_path_handle_erased(
|
||||
base_path.clone(),
|
||||
loader.asset_type_id(),
|
||||
Some(loader.asset_type_name()),
|
||||
HandleLoadingMode::Force,
|
||||
None,
|
||||
)
|
||||
.0;
|
||||
(base_handle.id(), Some(base_handle), base_path)
|
||||
} else {
|
||||
(handle.clone().unwrap(), path.clone())
|
||||
(asset_id.unwrap(), None, path.clone())
|
||||
};
|
||||
|
||||
match self
|
||||
@ -760,7 +780,7 @@ impl AssetServer {
|
||||
Ok(loaded_asset) => {
|
||||
let final_handle = if let Some(label) = path.label_cow() {
|
||||
match loaded_asset.labeled_assets.get(&label) {
|
||||
Some(labeled_asset) => labeled_asset.handle.clone(),
|
||||
Some(labeled_asset) => Some(labeled_asset.handle.clone()),
|
||||
None => {
|
||||
let mut all_labels: Vec<String> = loaded_asset
|
||||
.labeled_assets
|
||||
@ -776,16 +796,15 @@ impl AssetServer {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// if the path does not have a label, the handle must exist at this point
|
||||
handle.unwrap()
|
||||
fetched_handle
|
||||
};
|
||||
|
||||
self.send_loaded_asset(base_handle.id(), loaded_asset);
|
||||
self.send_loaded_asset(base_asset_id, loaded_asset);
|
||||
Ok(final_handle)
|
||||
}
|
||||
Err(err) => {
|
||||
self.send_asset_event(InternalAssetEvent::Failed {
|
||||
id: base_handle.id(),
|
||||
id: base_asset_id,
|
||||
error: err.clone(),
|
||||
path: path.into_owned(),
|
||||
});
|
||||
@ -1931,7 +1950,7 @@ pub enum AssetLoadError {
|
||||
base_path,
|
||||
label,
|
||||
all_labels.len(),
|
||||
all_labels.iter().map(|l| format!("'{}'", l)).collect::<Vec<_>>().join(", "))]
|
||||
all_labels.iter().map(|l| format!("'{l}'")).collect::<Vec<_>>().join(", "))]
|
||||
MissingLabel {
|
||||
base_path: AssetPath<'static>,
|
||||
label: String,
|
||||
|
@ -344,17 +344,11 @@ mod tests {
|
||||
|
||||
assert!(
|
||||
db_delta.abs() < 1e-2,
|
||||
"Expected ~{}dB, got {}dB (delta {})",
|
||||
db,
|
||||
db_test,
|
||||
db_delta
|
||||
"Expected ~{db}dB, got {db_test}dB (delta {db_delta})",
|
||||
);
|
||||
assert!(
|
||||
linear_relative_delta.abs() < 1e-3,
|
||||
"Expected ~{}, got {} (relative delta {})",
|
||||
linear,
|
||||
linear_test,
|
||||
linear_relative_delta
|
||||
"Expected ~{linear}, got {linear_test} (relative delta {linear_relative_delta})",
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -474,15 +468,11 @@ mod tests {
|
||||
match (a, b) {
|
||||
(Decibels(a), Decibels(b)) | (Linear(a), Linear(b)) => assert!(
|
||||
(a - b).abs() < EPSILON,
|
||||
"Expected {:?} to be approximately equal to {:?}",
|
||||
a,
|
||||
b
|
||||
"Expected {a:?} to be approximately equal to {b:?}",
|
||||
),
|
||||
(a, b) => assert!(
|
||||
(a.to_decibels() - b.to_decibels()).abs() < EPSILON,
|
||||
"Expected {:?} to be approximately equal to {:?}",
|
||||
a,
|
||||
b
|
||||
"Expected {a:?} to be approximately equal to {b:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ serde = { version = "1.0", features = [
|
||||
], default-features = false, optional = true }
|
||||
thiserror = { version = "2", default-features = false }
|
||||
derive_more = { version = "2", default-features = false, features = ["from"] }
|
||||
wgpu-types = { version = "24", default-features = false, optional = true }
|
||||
wgpu-types = { version = "25", default-features = false, optional = true }
|
||||
encase = { version = "0.10", default-features = false, optional = true }
|
||||
|
||||
[features]
|
||||
|
@ -4,7 +4,7 @@ use bevy_ecs::prelude::*;
|
||||
use bevy_render::{
|
||||
extract_component::ExtractComponentPlugin,
|
||||
render_asset::RenderAssetPlugin,
|
||||
render_graph::RenderGraphApp,
|
||||
render_graph::RenderGraphExt,
|
||||
render_resource::{
|
||||
Buffer, BufferDescriptor, BufferUsages, PipelineCache, SpecializedComputePipelines,
|
||||
},
|
||||
|
@ -10,6 +10,7 @@ use bevy_render::{
|
||||
renderer::RenderDevice,
|
||||
view::ViewUniform,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
use core::num::NonZero;
|
||||
|
||||
#[derive(Resource)]
|
||||
@ -82,12 +83,11 @@ impl SpecializedComputePipeline for AutoExposurePipeline {
|
||||
layout: vec![self.histogram_layout.clone()],
|
||||
shader: self.histogram_shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: match pass {
|
||||
entry_point: Some(match pass {
|
||||
AutoExposurePass::Histogram => "compute_histogram".into(),
|
||||
AutoExposurePass::Average => "compute_average".into(),
|
||||
},
|
||||
push_constant_ranges: vec![],
|
||||
zero_initialize_workgroup_memory: false,
|
||||
}),
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use crate::FullscreenShader;
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_ecs::prelude::*;
|
||||
@ -9,8 +10,7 @@ use bevy_render::{
|
||||
renderer::RenderDevice,
|
||||
RenderApp,
|
||||
};
|
||||
|
||||
use crate::FullscreenShader;
|
||||
use bevy_utils::default;
|
||||
|
||||
/// Adds support for specialized "blit pipelines", which can be used to write one texture to another.
|
||||
pub struct BlitPlugin;
|
||||
@ -85,22 +85,18 @@ impl SpecializedRenderPipeline for BlitPipeline {
|
||||
vertex: self.fullscreen_shader.to_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "fs_main".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: key.texture_format,
|
||||
blend: key.blend_state,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState {
|
||||
count: key.samples,
|
||||
..Default::default()
|
||||
..default()
|
||||
},
|
||||
push_constant_ranges: Vec::new(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ use bevy_render::{
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
|
||||
#[derive(Component)]
|
||||
pub struct BloomDownsamplingPipelineIds {
|
||||
@ -130,18 +131,14 @@ impl SpecializedRenderPipeline for BloomDownsamplingPipeline {
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs,
|
||||
entry_point,
|
||||
entry_point: Some(entry_point),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: BLOOM_TEXTURE_FORMAT,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: Vec::new(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ mod downsampling_pipeline;
|
||||
mod settings;
|
||||
mod upsampling_pipeline;
|
||||
|
||||
use bevy_image::ToExtents;
|
||||
pub use settings::{Bloom, BloomCompositeMode, BloomPrefilter};
|
||||
|
||||
use crate::{
|
||||
@ -19,7 +20,7 @@ use bevy_render::{
|
||||
extract_component::{
|
||||
ComponentUniforms, DynamicUniformIndex, ExtractComponentPlugin, UniformComponentPlugin,
|
||||
},
|
||||
render_graph::{NodeRunError, RenderGraphApp, RenderGraphContext, ViewNode, ViewNodeRunner},
|
||||
render_graph::{NodeRunError, RenderGraphContext, RenderGraphExt, ViewNode, ViewNodeRunner},
|
||||
render_resource::*,
|
||||
renderer::{RenderContext, RenderDevice},
|
||||
texture::{CachedTexture, TextureCache},
|
||||
@ -347,26 +348,22 @@ fn prepare_bloom_textures(
|
||||
views: Query<(Entity, &ExtractedCamera, &Bloom)>,
|
||||
) {
|
||||
for (entity, camera, bloom) in &views {
|
||||
if let Some(UVec2 {
|
||||
x: width,
|
||||
y: height,
|
||||
}) = camera.physical_viewport_size
|
||||
{
|
||||
if let Some(viewport) = camera.physical_viewport_size {
|
||||
// How many times we can halve the resolution minus one so we don't go unnecessarily low
|
||||
let mip_count = bloom.max_mip_dimension.ilog2().max(2) - 1;
|
||||
let mip_height_ratio = if height != 0 {
|
||||
bloom.max_mip_dimension as f32 / height as f32
|
||||
let mip_height_ratio = if viewport.y != 0 {
|
||||
bloom.max_mip_dimension as f32 / viewport.y as f32
|
||||
} else {
|
||||
0.
|
||||
};
|
||||
|
||||
let texture_descriptor = TextureDescriptor {
|
||||
label: Some("bloom_texture"),
|
||||
size: Extent3d {
|
||||
width: ((width as f32 * mip_height_ratio).round() as u32).max(1),
|
||||
height: ((height as f32 * mip_height_ratio).round() as u32).max(1),
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
size: (viewport.as_vec2() * mip_height_ratio)
|
||||
.round()
|
||||
.as_uvec2()
|
||||
.max(UVec2::ONE)
|
||||
.to_extents(),
|
||||
mip_level_count: mip_count,
|
||||
sample_count: 1,
|
||||
dimension: TextureDimension::D2,
|
||||
|
@ -18,6 +18,7 @@ use bevy_render::{
|
||||
renderer::RenderDevice,
|
||||
view::ViewTarget,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
|
||||
#[derive(Component)]
|
||||
pub struct UpsamplingPipelineIds {
|
||||
@ -115,8 +116,7 @@ impl SpecializedRenderPipeline for BloomUpsamplingPipeline {
|
||||
vertex: self.fullscreen_shader.to_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "upsample".into(),
|
||||
entry_point: Some("upsample".into()),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: texture_format,
|
||||
blend: Some(BlendState {
|
||||
@ -129,12 +129,9 @@ impl SpecializedRenderPipeline for BloomUpsamplingPipeline {
|
||||
}),
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: Vec::new(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -34,6 +34,7 @@ pub mod graph {
|
||||
use core::ops::Range;
|
||||
|
||||
use bevy_asset::UntypedAssetId;
|
||||
use bevy_image::ToExtents;
|
||||
use bevy_platform::collections::{HashMap, HashSet};
|
||||
use bevy_render::{
|
||||
batching::gpu_preprocessing::GpuPreprocessingMode,
|
||||
@ -51,15 +52,15 @@ use bevy_math::FloatOrd;
|
||||
use bevy_render::{
|
||||
camera::{Camera, ExtractedCamera},
|
||||
extract_component::ExtractComponentPlugin,
|
||||
render_graph::{EmptyNode, RenderGraphApp, ViewNodeRunner},
|
||||
render_graph::{EmptyNode, RenderGraphExt, ViewNodeRunner},
|
||||
render_phase::{
|
||||
sort_phase_system, BinnedPhaseItem, CachedRenderPipelinePhaseItem, DrawFunctionId,
|
||||
DrawFunctions, PhaseItem, PhaseItemExtraIndex, SortedPhaseItem, ViewBinnedRenderPhases,
|
||||
ViewSortedRenderPhases,
|
||||
},
|
||||
render_resource::{
|
||||
BindGroupId, CachedRenderPipelineId, Extent3d, TextureDescriptor, TextureDimension,
|
||||
TextureFormat, TextureUsages,
|
||||
BindGroupId, CachedRenderPipelineId, TextureDescriptor, TextureDimension, TextureFormat,
|
||||
TextureUsages,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
sync_world::MainEntity,
|
||||
@ -474,16 +475,10 @@ pub fn prepare_core_2d_depth_textures(
|
||||
let cached_texture = textures
|
||||
.entry(camera.target.clone())
|
||||
.or_insert_with(|| {
|
||||
// The size of the depth texture
|
||||
let size = Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
};
|
||||
|
||||
let descriptor = TextureDescriptor {
|
||||
label: Some("view_depth_texture"),
|
||||
size,
|
||||
// The size of the depth texture
|
||||
size: physical_target_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: msaa.samples(),
|
||||
dimension: TextureDimension::D2,
|
||||
|
@ -4,7 +4,7 @@ use crate::{
|
||||
};
|
||||
use bevy_ecs::{prelude::World, query::QueryItem};
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
camera::{ExtractedCamera, MainPassResolutionOverride},
|
||||
diagnostic::RecordDiagnostics,
|
||||
render_graph::{NodeRunError, RenderGraphContext, ViewNode},
|
||||
render_phase::{TrackedRenderPass, ViewBinnedRenderPhases},
|
||||
@ -31,6 +31,7 @@ impl ViewNode for MainOpaquePass3dNode {
|
||||
Option<&'static SkyboxPipelineId>,
|
||||
Option<&'static SkyboxBindGroup>,
|
||||
&'static ViewUniformOffset,
|
||||
Option<&'static MainPassResolutionOverride>,
|
||||
);
|
||||
|
||||
fn run<'w>(
|
||||
@ -45,6 +46,7 @@ impl ViewNode for MainOpaquePass3dNode {
|
||||
skybox_pipeline,
|
||||
skybox_bind_group,
|
||||
view_uniform_offset,
|
||||
resolution_override,
|
||||
): QueryItem<'w, '_, Self::ViewQuery>,
|
||||
world: &'w World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
@ -90,7 +92,7 @@ impl ViewNode for MainOpaquePass3dNode {
|
||||
let pass_span = diagnostics.pass_span(&mut render_pass, "main_opaque_pass_3d");
|
||||
|
||||
if let Some(viewport) = camera.viewport.as_ref() {
|
||||
render_pass.set_camera_viewport(viewport);
|
||||
render_pass.set_camera_viewport(&viewport.with_override(resolution_override));
|
||||
}
|
||||
|
||||
// Opaque draws
|
||||
|
@ -1,11 +1,12 @@
|
||||
use super::{Camera3d, ViewTransmissionTexture};
|
||||
use crate::core_3d::Transmissive3d;
|
||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||
use bevy_image::ToExtents;
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
camera::{ExtractedCamera, MainPassResolutionOverride},
|
||||
render_graph::{NodeRunError, RenderGraphContext, ViewNode},
|
||||
render_phase::ViewSortedRenderPhases,
|
||||
render_resource::{Extent3d, RenderPassDescriptor, StoreOp},
|
||||
render_resource::{RenderPassDescriptor, StoreOp},
|
||||
renderer::RenderContext,
|
||||
view::{ExtractedView, ViewDepthTexture, ViewTarget},
|
||||
};
|
||||
@ -27,13 +28,16 @@ impl ViewNode for MainTransmissivePass3dNode {
|
||||
&'static ViewTarget,
|
||||
Option<&'static ViewTransmissionTexture>,
|
||||
&'static ViewDepthTexture,
|
||||
Option<&'static MainPassResolutionOverride>,
|
||||
);
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext,
|
||||
(camera, view, camera_3d, target, transmission, depth): QueryItem<Self::ViewQuery>,
|
||||
(camera, view, camera_3d, target, transmission, depth, resolution_override): QueryItem<
|
||||
Self::ViewQuery,
|
||||
>,
|
||||
world: &World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
let view_entity = graph.view_entity();
|
||||
@ -85,11 +89,7 @@ impl ViewNode for MainTransmissivePass3dNode {
|
||||
render_context.command_encoder().copy_texture_to_texture(
|
||||
target.main_texture().as_image_copy(),
|
||||
transmission.texture.as_image_copy(),
|
||||
Extent3d {
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
physical_target_size.to_extents(),
|
||||
);
|
||||
|
||||
let mut render_pass =
|
||||
@ -111,7 +111,7 @@ impl ViewNode for MainTransmissivePass3dNode {
|
||||
render_context.begin_tracked_render_pass(render_pass_descriptor);
|
||||
|
||||
if let Some(viewport) = camera.viewport.as_ref() {
|
||||
render_pass.set_camera_viewport(viewport);
|
||||
render_pass.set_camera_viewport(&viewport.with_override(resolution_override));
|
||||
}
|
||||
|
||||
if let Err(err) = transmissive_phase.render(&mut render_pass, world, view_entity) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::core_3d::Transparent3d;
|
||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
camera::{ExtractedCamera, MainPassResolutionOverride},
|
||||
diagnostic::RecordDiagnostics,
|
||||
render_graph::{NodeRunError, RenderGraphContext, ViewNode},
|
||||
render_phase::ViewSortedRenderPhases,
|
||||
@ -24,12 +24,13 @@ impl ViewNode for MainTransparentPass3dNode {
|
||||
&'static ExtractedView,
|
||||
&'static ViewTarget,
|
||||
&'static ViewDepthTexture,
|
||||
Option<&'static MainPassResolutionOverride>,
|
||||
);
|
||||
fn run(
|
||||
&self,
|
||||
graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext,
|
||||
(camera, view, target, depth): QueryItem<Self::ViewQuery>,
|
||||
(camera, view, target, depth, resolution_override): QueryItem<Self::ViewQuery>,
|
||||
world: &World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
let view_entity = graph.view_entity();
|
||||
@ -69,7 +70,7 @@ impl ViewNode for MainTransparentPass3dNode {
|
||||
let pass_span = diagnostics.pass_span(&mut render_pass, "main_transparent_pass_3d");
|
||||
|
||||
if let Some(viewport) = camera.viewport.as_ref() {
|
||||
render_pass.set_camera_viewport(viewport);
|
||||
render_pass.set_camera_viewport(&viewport.with_override(resolution_override));
|
||||
}
|
||||
|
||||
if let Err(err) = transparent_phase.render(&mut render_pass, world, view_entity) {
|
||||
|
@ -85,22 +85,22 @@ use bevy_app::{App, Plugin, PostUpdate};
|
||||
use bevy_asset::UntypedAssetId;
|
||||
use bevy_color::LinearRgba;
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_image::BevyDefault;
|
||||
use bevy_image::{BevyDefault, ToExtents};
|
||||
use bevy_math::FloatOrd;
|
||||
use bevy_platform::collections::{HashMap, HashSet};
|
||||
use bevy_render::{
|
||||
camera::{Camera, ExtractedCamera},
|
||||
extract_component::ExtractComponentPlugin,
|
||||
prelude::Msaa,
|
||||
render_graph::{EmptyNode, RenderGraphApp, ViewNodeRunner},
|
||||
render_graph::{EmptyNode, RenderGraphExt, ViewNodeRunner},
|
||||
render_phase::{
|
||||
sort_phase_system, BinnedPhaseItem, CachedRenderPipelinePhaseItem, DrawFunctionId,
|
||||
DrawFunctions, PhaseItem, PhaseItemExtraIndex, SortedPhaseItem, ViewBinnedRenderPhases,
|
||||
ViewSortedRenderPhases,
|
||||
},
|
||||
render_resource::{
|
||||
CachedRenderPipelineId, Extent3d, FilterMode, Sampler, SamplerDescriptor, Texture,
|
||||
TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, TextureView,
|
||||
CachedRenderPipelineId, FilterMode, Sampler, SamplerDescriptor, Texture, TextureDescriptor,
|
||||
TextureDimension, TextureFormat, TextureUsages, TextureView,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
sync_world::{MainEntity, RenderEntity},
|
||||
@ -811,20 +811,14 @@ pub fn prepare_core_3d_depth_textures(
|
||||
let cached_texture = textures
|
||||
.entry((camera.target.clone(), msaa))
|
||||
.or_insert_with(|| {
|
||||
// The size of the depth texture
|
||||
let size = Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
};
|
||||
|
||||
let usage = *render_target_usage
|
||||
.get(&camera.target.clone())
|
||||
.expect("The depth texture usage should already exist for this target");
|
||||
|
||||
let descriptor = TextureDescriptor {
|
||||
label: Some("view_depth_texture"),
|
||||
size,
|
||||
// The size of the depth texture
|
||||
size: physical_target_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: msaa.samples(),
|
||||
dimension: TextureDimension::D2,
|
||||
@ -897,13 +891,6 @@ pub fn prepare_core_3d_transmission_textures(
|
||||
.or_insert_with(|| {
|
||||
let usage = TextureUsages::TEXTURE_BINDING | TextureUsages::COPY_DST;
|
||||
|
||||
// The size of the transmission texture
|
||||
let size = Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
};
|
||||
|
||||
let format = if view.hdr {
|
||||
ViewTarget::TEXTURE_FORMAT_HDR
|
||||
} else {
|
||||
@ -912,7 +899,8 @@ pub fn prepare_core_3d_transmission_textures(
|
||||
|
||||
let descriptor = TextureDescriptor {
|
||||
label: Some("view_transmission_texture"),
|
||||
size,
|
||||
// The size of the transmission texture
|
||||
size: physical_target_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: 1, // No need for MSAA, as we'll only copy the main texture here
|
||||
dimension: TextureDimension::D2,
|
||||
@ -1023,11 +1011,7 @@ pub fn prepare_prepass_textures(
|
||||
continue;
|
||||
};
|
||||
|
||||
let size = Extent3d {
|
||||
depth_or_array_layers: 1,
|
||||
width: physical_target_size.x,
|
||||
height: physical_target_size.y,
|
||||
};
|
||||
let size = physical_target_size.to_extents();
|
||||
|
||||
let cached_depth_texture = depth_prepass.then(|| {
|
||||
depth_textures
|
||||
@ -1042,7 +1026,8 @@ pub fn prepare_prepass_textures(
|
||||
format: CORE_3D_DEPTH_FORMAT,
|
||||
usage: TextureUsages::COPY_DST
|
||||
| TextureUsages::RENDER_ATTACHMENT
|
||||
| TextureUsages::TEXTURE_BINDING,
|
||||
| TextureUsages::TEXTURE_BINDING
|
||||
| TextureUsages::COPY_SRC, // TODO: Remove COPY_SRC, double buffer instead (for bevy_solari)
|
||||
view_formats: &[],
|
||||
};
|
||||
texture_cache.get(&render_device, descriptor)
|
||||
@ -1108,7 +1093,8 @@ pub fn prepare_prepass_textures(
|
||||
dimension: TextureDimension::D2,
|
||||
format: DEFERRED_PREPASS_FORMAT,
|
||||
usage: TextureUsages::RENDER_ATTACHMENT
|
||||
| TextureUsages::TEXTURE_BINDING,
|
||||
| TextureUsages::TEXTURE_BINDING
|
||||
| TextureUsages::COPY_SRC, // TODO: Remove COPY_SRC, double buffer instead (for bevy_solari)
|
||||
view_formats: &[],
|
||||
},
|
||||
)
|
||||
|
@ -5,7 +5,7 @@ use crate::{
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_math::UVec2;
|
||||
use bevy_image::ToExtents;
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
render_resource::{binding_types::texture_2d, *},
|
||||
@ -15,13 +15,13 @@ use bevy_render::{
|
||||
Render, RenderApp, RenderSystems,
|
||||
};
|
||||
|
||||
use super::DEFERRED_LIGHTING_PASS_ID_DEPTH_FORMAT;
|
||||
use bevy_ecs::query::QueryItem;
|
||||
use bevy_render::{
|
||||
render_graph::{NodeRunError, RenderGraphContext, ViewNode},
|
||||
renderer::RenderContext,
|
||||
};
|
||||
|
||||
use super::DEFERRED_LIGHTING_PASS_ID_DEPTH_FORMAT;
|
||||
use bevy_utils::default;
|
||||
|
||||
pub struct CopyDeferredLightingIdPlugin;
|
||||
|
||||
@ -142,11 +142,8 @@ impl FromWorld for CopyDeferredLightingIdPipeline {
|
||||
vertex: vertex_state,
|
||||
fragment: Some(FragmentState {
|
||||
shader,
|
||||
shader_defs: vec![],
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: Some(DepthStencilState {
|
||||
format: DEFERRED_LIGHTING_PASS_ID_DEPTH_FORMAT,
|
||||
depth_write_enabled: true,
|
||||
@ -154,9 +151,7 @@ impl FromWorld for CopyDeferredLightingIdPipeline {
|
||||
stencil: StencilState::default(),
|
||||
bias: DepthBiasState::default(),
|
||||
}),
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: vec![],
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
});
|
||||
|
||||
Self {
|
||||
@ -178,18 +173,10 @@ fn prepare_deferred_lighting_id_textures(
|
||||
views: Query<(Entity, &ExtractedCamera), With<DeferredPrepass>>,
|
||||
) {
|
||||
for (entity, camera) in &views {
|
||||
if let Some(UVec2 {
|
||||
x: width,
|
||||
y: height,
|
||||
}) = camera.physical_target_size
|
||||
{
|
||||
if let Some(physical_target_size) = camera.physical_target_size {
|
||||
let texture_descriptor = TextureDescriptor {
|
||||
label: Some("deferred_lighting_id_depth_texture_a"),
|
||||
size: Extent3d {
|
||||
width,
|
||||
height,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
size: physical_target_size.to_extents(),
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: TextureDimension::D2,
|
||||
|
@ -1,4 +1,5 @@
|
||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||
use bevy_render::camera::MainPassResolutionOverride;
|
||||
use bevy_render::experimental::occlusion_culling::OcclusionCulling;
|
||||
use bevy_render::render_graph::ViewNode;
|
||||
|
||||
@ -66,6 +67,7 @@ impl ViewNode for LateDeferredGBufferPrepassNode {
|
||||
&'static ExtractedView,
|
||||
&'static ViewDepthTexture,
|
||||
&'static ViewPrepassTextures,
|
||||
Option<&'static MainPassResolutionOverride>,
|
||||
Has<OcclusionCulling>,
|
||||
Has<NoIndirectDrawing>,
|
||||
);
|
||||
@ -77,7 +79,7 @@ impl ViewNode for LateDeferredGBufferPrepassNode {
|
||||
view_query: QueryItem<'w, '_, Self::ViewQuery>,
|
||||
world: &'w World,
|
||||
) -> Result<(), NodeRunError> {
|
||||
let (_, _, _, _, occlusion_culling, no_indirect_drawing) = view_query;
|
||||
let (.., occlusion_culling, no_indirect_drawing) = view_query;
|
||||
if !occlusion_culling || no_indirect_drawing {
|
||||
return Ok(());
|
||||
}
|
||||
@ -105,7 +107,7 @@ impl ViewNode for LateDeferredGBufferPrepassNode {
|
||||
fn run_deferred_prepass<'w>(
|
||||
graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext<'w>,
|
||||
(camera, extracted_view, view_depth_texture, view_prepass_textures, _, _): QueryItem<
|
||||
(camera, extracted_view, view_depth_texture, view_prepass_textures, resolution_override, _, _): QueryItem<
|
||||
'w,
|
||||
'_,
|
||||
<LateDeferredGBufferPrepassNode as ViewNode>::ViewQuery,
|
||||
@ -220,7 +222,7 @@ fn run_deferred_prepass<'w>(
|
||||
});
|
||||
let mut render_pass = TrackedRenderPass::new(&render_device, render_pass);
|
||||
if let Some(viewport) = camera.viewport.as_ref() {
|
||||
render_pass.set_camera_viewport(viewport);
|
||||
render_pass.set_camera_viewport(&viewport.with_override(resolution_override));
|
||||
}
|
||||
|
||||
// Opaque draws
|
||||
|
@ -34,7 +34,7 @@ use bevy_render::{
|
||||
camera::{PhysicalCameraParameters, Projection},
|
||||
extract_component::{ComponentUniforms, DynamicUniformIndex, UniformComponentPlugin},
|
||||
render_graph::{
|
||||
NodeRunError, RenderGraphApp as _, RenderGraphContext, ViewNode, ViewNodeRunner,
|
||||
NodeRunError, RenderGraphContext, RenderGraphExt as _, ViewNode, ViewNodeRunner,
|
||||
},
|
||||
render_resource::{
|
||||
binding_types::{
|
||||
@ -800,23 +800,19 @@ impl SpecializedRenderPipeline for DepthOfFieldPipeline {
|
||||
RenderPipelineDescriptor {
|
||||
label: Some("depth of field pipeline".into()),
|
||||
layout,
|
||||
push_constant_ranges: vec![],
|
||||
vertex: self.fullscreen_shader.to_vertex_state(),
|
||||
primitive: default(),
|
||||
depth_stencil: None,
|
||||
multisample: default(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: match key.pass {
|
||||
entry_point: Some(match key.pass {
|
||||
DofPass::GaussianHorizontal => "gaussian_horizontal".into(),
|
||||
DofPass::GaussianVertical => "gaussian_vertical".into(),
|
||||
DofPass::BokehPass0 => "bokeh_pass_0".into(),
|
||||
DofPass::BokehPass1 => "bokeh_pass_1".into(),
|
||||
},
|
||||
}),
|
||||
targets,
|
||||
}),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ use crate::core_3d::{
|
||||
prepare_core_3d_depth_textures,
|
||||
};
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{load_internal_asset, uuid_handle, Handle};
|
||||
use bevy_derive::{Deref, DerefMut};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
@ -30,7 +30,7 @@ use bevy_render::{
|
||||
experimental::occlusion_culling::{
|
||||
OcclusionCulling, OcclusionCullingSubview, OcclusionCullingSubviewEntities,
|
||||
},
|
||||
render_graph::{Node, NodeRunError, RenderGraphApp, RenderGraphContext},
|
||||
render_graph::{Node, NodeRunError, RenderGraphContext, RenderGraphExt},
|
||||
render_resource::{
|
||||
binding_types::{sampler, texture_2d, texture_2d_multisampled, texture_storage_2d},
|
||||
BindGroup, BindGroupEntries, BindGroupLayout, BindGroupLayoutEntries,
|
||||
@ -46,12 +46,13 @@ use bevy_render::{
|
||||
view::{ExtractedView, NoIndirectDrawing, ViewDepthTexture},
|
||||
Render, RenderApp, RenderSystems,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
use bitflags::bitflags;
|
||||
use tracing::debug;
|
||||
|
||||
/// Identifies the `downsample_depth.wgsl` shader.
|
||||
pub const DOWNSAMPLE_DEPTH_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("a09a149e-5922-4fa4-9170-3c1a13065364");
|
||||
uuid_handle!("a09a149e-5922-4fa4-9170-3c1a13065364");
|
||||
|
||||
/// The maximum number of mip levels that we can produce.
|
||||
///
|
||||
@ -492,12 +493,12 @@ impl SpecializedComputePipeline for DownsampleDepthPipeline {
|
||||
}],
|
||||
shader: DOWNSAMPLE_DEPTH_SHADER_HANDLE,
|
||||
shader_defs,
|
||||
entry_point: if key.contains(DownsampleDepthPipelineKey::SECOND_PHASE) {
|
||||
entry_point: Some(if key.contains(DownsampleDepthPipelineKey::SECOND_PHASE) {
|
||||
"downsample_depth_second".into()
|
||||
} else {
|
||||
"downsample_depth_first".into()
|
||||
},
|
||||
zero_initialize_workgroup_memory: false,
|
||||
}),
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -529,11 +530,7 @@ pub fn create_depth_pyramid_dummy_texture(
|
||||
render_device
|
||||
.create_texture(&TextureDescriptor {
|
||||
label: Some(texture_label),
|
||||
size: Extent3d {
|
||||
width: 1,
|
||||
height: 1,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
size: Extent3d::default(),
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: TextureDimension::D2,
|
||||
|
@ -33,7 +33,7 @@ impl FullscreenShader {
|
||||
VertexState {
|
||||
shader: self.0.clone(),
|
||||
shader_defs: Vec::new(),
|
||||
entry_point: "fullscreen_vertex_shader".into(),
|
||||
entry_point: Some("fullscreen_vertex_shader".into()),
|
||||
buffers: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
camera::Camera,
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin},
|
||||
render_graph::{RenderGraphApp, ViewNodeRunner},
|
||||
render_graph::{RenderGraphExt, ViewNodeRunner},
|
||||
render_resource::{ShaderType, SpecializedRenderPipelines},
|
||||
Render, RenderApp, RenderSystems,
|
||||
};
|
||||
|
@ -1,3 +1,4 @@
|
||||
use crate::FullscreenShader;
|
||||
use bevy_asset::{load_embedded_asset, Handle};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
@ -16,16 +17,14 @@ use bevy_render::{
|
||||
texture_depth_2d_multisampled, uniform_buffer_sized,
|
||||
},
|
||||
BindGroupLayout, BindGroupLayoutEntries, CachedRenderPipelineId, ColorTargetState,
|
||||
ColorWrites, FragmentState, MultisampleState, PipelineCache, PrimitiveState,
|
||||
RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, Shader,
|
||||
ShaderDefVal, ShaderStages, ShaderType, SpecializedRenderPipeline,
|
||||
SpecializedRenderPipelines, TextureFormat, TextureSampleType,
|
||||
ColorWrites, FragmentState, PipelineCache, RenderPipelineDescriptor, Sampler,
|
||||
SamplerBindingType, SamplerDescriptor, Shader, ShaderDefVal, ShaderStages, ShaderType,
|
||||
SpecializedRenderPipeline, SpecializedRenderPipelines, TextureFormat, TextureSampleType,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
view::{ExtractedView, Msaa, ViewTarget},
|
||||
};
|
||||
|
||||
use crate::FullscreenShader;
|
||||
use bevy_utils::default;
|
||||
|
||||
use super::MotionBlurUniform;
|
||||
|
||||
@ -139,7 +138,6 @@ impl SpecializedRenderPipeline for MotionBlurPipeline {
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: if key.hdr {
|
||||
ViewTarget::TEXTURE_FORMAT_HDR
|
||||
@ -149,12 +147,9 @@ impl SpecializedRenderPipeline for MotionBlurPipeline {
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: vec![],
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use bevy_color::LinearRgba;
|
||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
render_graph::{NodeRunError, RenderGraphApp, RenderGraphContext, ViewNode, ViewNodeRunner},
|
||||
render_graph::{NodeRunError, RenderGraphContext, RenderGraphExt, ViewNode, ViewNodeRunner},
|
||||
render_resource::*,
|
||||
renderer::RenderContext,
|
||||
view::{Msaa, ViewTarget},
|
||||
|
@ -10,7 +10,7 @@ use bevy_render::{
|
||||
camera::{Camera, ExtractedCamera},
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin},
|
||||
load_shader_library,
|
||||
render_graph::{RenderGraphApp, ViewNodeRunner},
|
||||
render_graph::{RenderGraphExt, ViewNodeRunner},
|
||||
render_resource::{BufferUsages, BufferVec, DynamicUniformBuffer, ShaderType, TextureUsages},
|
||||
renderer::{RenderDevice, RenderQueue},
|
||||
view::Msaa,
|
||||
|
@ -1,3 +1,4 @@
|
||||
use super::OitBuffers;
|
||||
use crate::{oit::OrderIndependentTransparencySettings, FullscreenShader};
|
||||
use bevy_app::Plugin;
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer};
|
||||
@ -12,17 +13,16 @@ use bevy_render::{
|
||||
binding_types::{storage_buffer_sized, texture_depth_2d, uniform_buffer},
|
||||
BindGroup, BindGroupEntries, BindGroupLayout, BindGroupLayoutEntries, BlendComponent,
|
||||
BlendState, CachedRenderPipelineId, ColorTargetState, ColorWrites, DownlevelFlags,
|
||||
FragmentState, MultisampleState, PipelineCache, PrimitiveState, RenderPipelineDescriptor,
|
||||
ShaderDefVal, ShaderStages, TextureFormat,
|
||||
FragmentState, PipelineCache, RenderPipelineDescriptor, ShaderDefVal, ShaderStages,
|
||||
TextureFormat,
|
||||
},
|
||||
renderer::{RenderAdapter, RenderDevice},
|
||||
view::{ExtractedView, ViewTarget, ViewUniform, ViewUniforms},
|
||||
Render, RenderApp, RenderSystems,
|
||||
};
|
||||
use bevy_utils::default;
|
||||
use tracing::warn;
|
||||
|
||||
use super::OitBuffers;
|
||||
|
||||
/// Contains the render node used to run the resolve pass.
|
||||
pub mod node;
|
||||
|
||||
@ -213,7 +213,6 @@ fn specialize_oit_resolve_pipeline(
|
||||
resolve_pipeline.oit_depth_bind_group_layout.clone(),
|
||||
],
|
||||
fragment: Some(FragmentState {
|
||||
entry_point: "fragment".into(),
|
||||
shader: load_embedded_asset!(asset_server, "oit_resolve.wgsl"),
|
||||
shader_defs: vec![ShaderDefVal::UInt(
|
||||
"LAYER_COUNT".into(),
|
||||
@ -227,13 +226,10 @@ fn specialize_oit_resolve_pipeline(
|
||||
}),
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
vertex: fullscreen_shader.to_vertex_state(),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: vec![],
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
camera::{ExtractedCamera, MainPassResolutionOverride},
|
||||
render_graph::{NodeRunError, RenderGraphContext, RenderLabel, ViewNode},
|
||||
render_resource::{BindGroupEntries, PipelineCache, RenderPassDescriptor},
|
||||
renderer::RenderContext,
|
||||
@ -23,13 +23,14 @@ impl ViewNode for OitResolveNode {
|
||||
&'static ViewUniformOffset,
|
||||
&'static OitResolvePipelineId,
|
||||
&'static ViewDepthTexture,
|
||||
Option<&'static MainPassResolutionOverride>,
|
||||
);
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
_graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext,
|
||||
(camera, view_target, view_uniform, oit_resolve_pipeline_id, depth): QueryItem<
|
||||
(camera, view_target, view_uniform, oit_resolve_pipeline_id, depth, resolution_override): QueryItem<
|
||||
Self::ViewQuery,
|
||||
>,
|
||||
world: &World,
|
||||
@ -63,7 +64,7 @@ impl ViewNode for OitResolveNode {
|
||||
});
|
||||
|
||||
if let Some(viewport) = camera.viewport.as_ref() {
|
||||
render_pass.set_camera_viewport(viewport);
|
||||
render_pass.set_camera_viewport(&viewport.with_override(resolution_override));
|
||||
}
|
||||
|
||||
render_pass.set_render_pipeline(pipeline);
|
||||
|
@ -3,7 +3,7 @@
|
||||
//! Currently, this consists only of chromatic aberration.
|
||||
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, weak_handle, Assets, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, uuid_handle, Assets, Handle};
|
||||
use bevy_derive::{Deref, DerefMut};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
@ -23,7 +23,7 @@ use bevy_render::{
|
||||
load_shader_library,
|
||||
render_asset::{RenderAssetUsages, RenderAssets},
|
||||
render_graph::{
|
||||
NodeRunError, RenderGraphApp as _, RenderGraphContext, ViewNode, ViewNodeRunner,
|
||||
NodeRunError, RenderGraphContext, RenderGraphExt as _, ViewNode, ViewNodeRunner,
|
||||
},
|
||||
render_resource::{
|
||||
binding_types::{sampler, texture_2d, uniform_buffer},
|
||||
@ -52,7 +52,7 @@ use crate::{
|
||||
/// This is just a 3x1 image consisting of one red pixel, one green pixel, and
|
||||
/// one blue pixel, in that order.
|
||||
const DEFAULT_CHROMATIC_ABERRATION_LUT_HANDLE: Handle<Image> =
|
||||
weak_handle!("dc3e3307-40a1-49bb-be6d-e0634e8836b2");
|
||||
uuid_handle!("dc3e3307-40a1-49bb-be6d-e0634e8836b2");
|
||||
|
||||
/// The default chromatic aberration intensity amount, in a fraction of the
|
||||
/// window size.
|
||||
@ -326,19 +326,14 @@ impl SpecializedRenderPipeline for PostProcessingPipeline {
|
||||
vertex: self.fullscreen_shader.to_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "fragment_main".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: key.texture_format,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
primitive: default(),
|
||||
depth_stencil: None,
|
||||
multisample: default(),
|
||||
push_constant_ranges: vec![],
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||
use bevy_render::{
|
||||
camera::ExtractedCamera,
|
||||
camera::{ExtractedCamera, MainPassResolutionOverride},
|
||||
diagnostic::RecordDiagnostics,
|
||||
experimental::occlusion_culling::OcclusionCulling,
|
||||
render_graph::{NodeRunError, RenderGraphContext, ViewNode},
|
||||
@ -55,18 +55,25 @@ pub struct LatePrepassNode;
|
||||
|
||||
impl ViewNode for LatePrepassNode {
|
||||
type ViewQuery = (
|
||||
&'static ExtractedCamera,
|
||||
&'static ExtractedView,
|
||||
&'static ViewDepthTexture,
|
||||
&'static ViewPrepassTextures,
|
||||
&'static ViewUniformOffset,
|
||||
Option<&'static DeferredPrepass>,
|
||||
Option<&'static RenderSkyboxPrepassPipeline>,
|
||||
Option<&'static SkyboxPrepassBindGroup>,
|
||||
Option<&'static PreviousViewUniformOffset>,
|
||||
Has<OcclusionCulling>,
|
||||
Has<NoIndirectDrawing>,
|
||||
Has<DeferredPrepass>,
|
||||
(
|
||||
&'static ExtractedCamera,
|
||||
&'static ExtractedView,
|
||||
&'static ViewDepthTexture,
|
||||
&'static ViewPrepassTextures,
|
||||
&'static ViewUniformOffset,
|
||||
),
|
||||
(
|
||||
Option<&'static DeferredPrepass>,
|
||||
Option<&'static RenderSkyboxPrepassPipeline>,
|
||||
Option<&'static SkyboxPrepassBindGroup>,
|
||||
Option<&'static PreviousViewUniformOffset>,
|
||||
Option<&'static MainPassResolutionOverride>,
|
||||
),
|
||||
(
|
||||
Has<OcclusionCulling>,
|
||||
Has<NoIndirectDrawing>,
|
||||
Has<DeferredPrepass>,
|
||||
),
|
||||
);
|
||||
|
||||
fn run<'w>(
|
||||
@ -78,7 +85,7 @@ impl ViewNode for LatePrepassNode {
|
||||
) -> Result<(), NodeRunError> {
|
||||
// We only need a late prepass if we have occlusion culling and indirect
|
||||
// drawing.
|
||||
let (_, _, _, _, _, _, _, _, _, occlusion_culling, no_indirect_drawing, _) = query;
|
||||
let (_, _, (occlusion_culling, no_indirect_drawing, _)) = query;
|
||||
if !occlusion_culling || no_indirect_drawing {
|
||||
return Ok(());
|
||||
}
|
||||
@ -100,18 +107,15 @@ fn run_prepass<'w>(
|
||||
graph: &mut RenderGraphContext,
|
||||
render_context: &mut RenderContext<'w>,
|
||||
(
|
||||
camera,
|
||||
extracted_view,
|
||||
view_depth_texture,
|
||||
view_prepass_textures,
|
||||
view_uniform_offset,
|
||||
deferred_prepass,
|
||||
skybox_prepass_pipeline,
|
||||
skybox_prepass_bind_group,
|
||||
view_prev_uniform_offset,
|
||||
_,
|
||||
_,
|
||||
has_deferred,
|
||||
(camera, extracted_view, view_depth_texture, view_prepass_textures, view_uniform_offset),
|
||||
(
|
||||
deferred_prepass,
|
||||
skybox_prepass_pipeline,
|
||||
skybox_prepass_bind_group,
|
||||
view_prev_uniform_offset,
|
||||
resolution_override,
|
||||
),
|
||||
(_, _, has_deferred),
|
||||
): QueryItem<'w, '_, <LatePrepassNode as ViewNode>::ViewQuery>,
|
||||
world: &'w World,
|
||||
label: &'static str,
|
||||
@ -183,7 +187,7 @@ fn run_prepass<'w>(
|
||||
let pass_span = diagnostics.pass_span(&mut render_pass, label);
|
||||
|
||||
if let Some(viewport) = camera.viewport.as_ref() {
|
||||
render_pass.set_camera_viewport(viewport);
|
||||
render_pass.set_camera_viewport(&viewport.with_override(resolution_override));
|
||||
}
|
||||
|
||||
// Opaque draws
|
||||
|
@ -28,6 +28,7 @@ use bevy_render::{
|
||||
Render, RenderApp, RenderSystems,
|
||||
};
|
||||
use bevy_transform::components::Transform;
|
||||
use bevy_utils::default;
|
||||
use prepass::SkyboxPrepassPipeline;
|
||||
|
||||
use crate::{core_3d::CORE_3D_DEPTH_FORMAT, prepass::PreviousViewUniforms};
|
||||
@ -192,14 +193,10 @@ impl SpecializedRenderPipeline for SkyboxPipeline {
|
||||
RenderPipelineDescriptor {
|
||||
label: Some("skybox_pipeline".into()),
|
||||
layout: vec![self.bind_group_layout.clone()],
|
||||
push_constant_ranges: Vec::new(),
|
||||
vertex: VertexState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: Vec::new(),
|
||||
entry_point: "skybox_vertex".into(),
|
||||
buffers: Vec::new(),
|
||||
..default()
|
||||
},
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: Some(DepthStencilState {
|
||||
format: key.depth_format,
|
||||
depth_write_enabled: false,
|
||||
@ -223,8 +220,6 @@ impl SpecializedRenderPipeline for SkyboxPipeline {
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: Vec::new(),
|
||||
entry_point: "skybox_fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: if key.hdr {
|
||||
ViewTarget::TEXTURE_FORMAT_HDR
|
||||
@ -235,8 +230,9 @@ impl SpecializedRenderPipeline for SkyboxPipeline {
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -87,9 +87,7 @@ impl SpecializedRenderPipeline for SkyboxPrepassPipeline {
|
||||
RenderPipelineDescriptor {
|
||||
label: Some("skybox_prepass_pipeline".into()),
|
||||
layout: vec![self.bind_group_layout.clone()],
|
||||
push_constant_ranges: vec![],
|
||||
vertex: self.fullscreen_shader.to_vertex_state(),
|
||||
primitive: default(),
|
||||
depth_stencil: Some(DepthStencilState {
|
||||
format: CORE_3D_DEPTH_FORMAT,
|
||||
depth_write_enabled: false,
|
||||
@ -104,11 +102,10 @@ impl SpecializedRenderPipeline for SkyboxPrepassPipeline {
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "fragment".into(),
|
||||
targets: prepass_target_descriptors(key.normal_prepass, true, false),
|
||||
..default()
|
||||
}),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,9 @@
|
||||
struct PreviousViewUniforms {
|
||||
view_from_world: mat4x4<f32>,
|
||||
clip_from_world: mat4x4<f32>,
|
||||
clip_from_view: mat4x4<f32>,
|
||||
world_from_clip: mat4x4<f32>,
|
||||
view_from_clip: mat4x4<f32>,
|
||||
}
|
||||
|
||||
@group(0) @binding(0) var<uniform> view: View;
|
||||
|
@ -279,18 +279,14 @@ impl SpecializedRenderPipeline for TonemappingPipeline {
|
||||
fragment: Some(FragmentState {
|
||||
shader: self.fragment_shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
format: ViewTarget::TEXTURE_FORMAT_HDR,
|
||||
blend: None,
|
||||
write_mask: ColorWrites::ALL,
|
||||
})],
|
||||
..default()
|
||||
}),
|
||||
primitive: PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: MultisampleState::default(),
|
||||
push_constant_ranges: Vec::new(),
|
||||
zero_initialize_workgroup_memory: false,
|
||||
..default()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -447,12 +443,9 @@ pub fn lut_placeholder() -> Image {
|
||||
let data = vec![255, 0, 255, 255];
|
||||
Image {
|
||||
data: Some(data),
|
||||
data_order: TextureDataOrder::default(),
|
||||
texture_descriptor: TextureDescriptor {
|
||||
size: Extent3d {
|
||||
width: 1,
|
||||
height: 1,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
size: Extent3d::default(),
|
||||
format,
|
||||
dimension: TextureDimension::D3,
|
||||
label: None,
|
||||
|
@ -3,7 +3,7 @@ name = "bevy_core_widgets"
|
||||
version = "0.17.0-dev"
|
||||
edition = "2024"
|
||||
description = "Unstyled common widgets for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
113
crates/bevy_core_widgets/src/callback.rs
Normal file
113
crates/bevy_core_widgets/src/callback.rs
Normal file
@ -0,0 +1,113 @@
|
||||
use bevy_ecs::system::{Commands, SystemId, SystemInput};
|
||||
use bevy_ecs::world::{DeferredWorld, World};
|
||||
|
||||
/// A callback defines how we want to be notified when a widget changes state. Unlike an event
|
||||
/// or observer, callbacks are intended for "point-to-point" communication that cuts across the
|
||||
/// hierarchy of entities. Callbacks can be created in advance of the entity they are attached
|
||||
/// to, and can be passed around as parameters.
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use bevy_app::App;
|
||||
/// use bevy_core_widgets::{Callback, Notify};
|
||||
/// use bevy_ecs::system::{Commands, IntoSystem};
|
||||
///
|
||||
/// let mut app = App::new();
|
||||
///
|
||||
/// // Register a one-shot system
|
||||
/// fn my_callback_system() {
|
||||
/// println!("Callback executed!");
|
||||
/// }
|
||||
///
|
||||
/// let system_id = app.world_mut().register_system(my_callback_system);
|
||||
///
|
||||
/// // Wrap system in a callback
|
||||
/// let callback = Callback::System(system_id);
|
||||
///
|
||||
/// // Later, when we want to execute the callback:
|
||||
/// app.world_mut().commands().notify(&callback);
|
||||
/// ```
|
||||
#[derive(Default, Debug)]
|
||||
pub enum Callback<I: SystemInput = ()> {
|
||||
/// Invoke a one-shot system
|
||||
System(SystemId<I>),
|
||||
/// Ignore this notification
|
||||
#[default]
|
||||
Ignore,
|
||||
}
|
||||
|
||||
/// Trait used to invoke a [`Callback`], unifying the API across callers.
|
||||
pub trait Notify {
|
||||
/// Invoke the callback with no arguments.
|
||||
fn notify(&mut self, callback: &Callback<()>);
|
||||
|
||||
/// Invoke the callback with one argument.
|
||||
fn notify_with<I>(&mut self, callback: &Callback<I>, input: I::Inner<'static>)
|
||||
where
|
||||
I: SystemInput<Inner<'static>: Send> + 'static;
|
||||
}
|
||||
|
||||
impl<'w, 's> Notify for Commands<'w, 's> {
|
||||
fn notify(&mut self, callback: &Callback<()>) {
|
||||
match callback {
|
||||
Callback::System(system_id) => self.run_system(*system_id),
|
||||
Callback::Ignore => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn notify_with<I>(&mut self, callback: &Callback<I>, input: I::Inner<'static>)
|
||||
where
|
||||
I: SystemInput<Inner<'static>: Send> + 'static,
|
||||
{
|
||||
match callback {
|
||||
Callback::System(system_id) => self.run_system_with(*system_id, input),
|
||||
Callback::Ignore => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Notify for World {
|
||||
fn notify(&mut self, callback: &Callback<()>) {
|
||||
match callback {
|
||||
Callback::System(system_id) => {
|
||||
let _ = self.run_system(*system_id);
|
||||
}
|
||||
Callback::Ignore => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn notify_with<I>(&mut self, callback: &Callback<I>, input: I::Inner<'static>)
|
||||
where
|
||||
I: SystemInput<Inner<'static>: Send> + 'static,
|
||||
{
|
||||
match callback {
|
||||
Callback::System(system_id) => {
|
||||
let _ = self.run_system_with(*system_id, input);
|
||||
}
|
||||
Callback::Ignore => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Notify for DeferredWorld<'_> {
|
||||
fn notify(&mut self, callback: &Callback<()>) {
|
||||
match callback {
|
||||
Callback::System(system_id) => {
|
||||
self.commands().run_system(*system_id);
|
||||
}
|
||||
Callback::Ignore => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn notify_with<I>(&mut self, callback: &Callback<I>, input: I::Inner<'static>)
|
||||
where
|
||||
I: SystemInput<Inner<'static>: Send> + 'static,
|
||||
{
|
||||
match callback {
|
||||
Callback::System(system_id) => {
|
||||
self.commands().run_system_with(*system_id, input);
|
||||
}
|
||||
Callback::Ignore => (),
|
||||
}
|
||||
}
|
||||
}
|
@ -7,7 +7,7 @@ use bevy_ecs::{
|
||||
entity::Entity,
|
||||
observer::On,
|
||||
query::With,
|
||||
system::{Commands, Query, SystemId},
|
||||
system::{Commands, Query},
|
||||
};
|
||||
use bevy_input::keyboard::{KeyCode, KeyboardInput};
|
||||
use bevy_input::ButtonState;
|
||||
@ -15,16 +15,17 @@ use bevy_input_focus::FocusedInput;
|
||||
use bevy_picking::events::{Cancel, Click, DragEnd, Pointer, Press, Release};
|
||||
use bevy_ui::{InteractionDisabled, Pressed};
|
||||
|
||||
use crate::{Callback, Notify};
|
||||
|
||||
/// Headless button widget. This widget maintains a "pressed" state, which is used to
|
||||
/// indicate whether the button is currently being pressed by the user. It emits a `ButtonClicked`
|
||||
/// event when the button is un-pressed.
|
||||
#[derive(Component, Debug)]
|
||||
#[derive(Component, Default, Debug)]
|
||||
#[require(AccessibilityNode(accesskit::Node::new(Role::Button)))]
|
||||
pub struct CoreButton {
|
||||
/// Optional system to run when the button is clicked, or when the Enter or Space key
|
||||
/// is pressed while the button is focused. If this field is `None`, the button will
|
||||
/// emit a `ButtonClicked` event when clicked.
|
||||
pub on_click: Option<SystemId>,
|
||||
/// Callback to invoke when the button is clicked, or when the `Enter` or `Space` key
|
||||
/// is pressed while the button is focused.
|
||||
pub on_activate: Callback,
|
||||
}
|
||||
|
||||
fn button_on_key_event(
|
||||
@ -39,10 +40,8 @@ fn button_on_key_event(
|
||||
&& event.state == ButtonState::Pressed
|
||||
&& (event.key_code == KeyCode::Enter || event.key_code == KeyCode::Space)
|
||||
{
|
||||
if let Some(on_click) = bstate.on_click {
|
||||
trigger.propagate(false);
|
||||
commands.run_system(on_click);
|
||||
}
|
||||
trigger.propagate(false);
|
||||
commands.notify(&bstate.on_activate);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -56,9 +55,7 @@ fn button_on_pointer_click(
|
||||
if let Ok((bstate, pressed, disabled)) = q_state.get_mut(trigger.target()) {
|
||||
trigger.propagate(false);
|
||||
if pressed && !disabled {
|
||||
if let Some(on_click) = bstate.on_click {
|
||||
commands.run_system(on_click);
|
||||
}
|
||||
commands.notify(&bstate.on_activate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use bevy_ecs::system::{In, ResMut};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
observer::On,
|
||||
system::{Commands, Query, SystemId},
|
||||
system::{Commands, Query},
|
||||
};
|
||||
use bevy_input::keyboard::{KeyCode, KeyboardInput};
|
||||
use bevy_input::ButtonState;
|
||||
@ -15,11 +15,13 @@ use bevy_input_focus::{FocusedInput, InputFocus, InputFocusVisible};
|
||||
use bevy_picking::events::{Click, Pointer};
|
||||
use bevy_ui::{Checkable, Checked, InteractionDisabled};
|
||||
|
||||
use crate::{Callback, Notify as _};
|
||||
|
||||
/// Headless widget implementation for checkboxes. The [`Checked`] component represents the current
|
||||
/// state of the checkbox. The `on_change` field is an optional system id that will be run when the
|
||||
/// checkbox is clicked, or when the `Enter` or `Space` key is pressed while the checkbox is
|
||||
/// focused. If the `on_change` field is `None`, then instead of calling a callback, the checkbox
|
||||
/// will update its own [`Checked`] state directly.
|
||||
/// focused. If the `on_change` field is `Callback::Ignore`, then instead of calling a callback, the
|
||||
/// checkbox will update its own [`Checked`] state directly.
|
||||
///
|
||||
/// # Toggle switches
|
||||
///
|
||||
@ -29,8 +31,10 @@ use bevy_ui::{Checkable, Checked, InteractionDisabled};
|
||||
#[derive(Component, Debug, Default)]
|
||||
#[require(AccessibilityNode(accesskit::Node::new(Role::CheckBox)), Checkable)]
|
||||
pub struct CoreCheckbox {
|
||||
/// One-shot system that is run when the checkbox state needs to be changed.
|
||||
pub on_change: Option<SystemId<In<bool>>>,
|
||||
/// One-shot system that is run when the checkbox state needs to be changed. If this value is
|
||||
/// `Callback::Ignore`, then the checkbox will update it's own internal [`Checked`] state
|
||||
/// without notification.
|
||||
pub on_change: Callback<In<bool>>,
|
||||
}
|
||||
|
||||
fn checkbox_on_key_input(
|
||||
@ -157,8 +161,8 @@ fn set_checkbox_state(
|
||||
checkbox: &CoreCheckbox,
|
||||
new_state: bool,
|
||||
) {
|
||||
if let Some(on_change) = checkbox.on_change {
|
||||
commands.run_system_with(on_change, new_state);
|
||||
if !matches!(checkbox.on_change, Callback::Ignore) {
|
||||
commands.notify_with(&checkbox.on_change, new_state);
|
||||
} else if new_state {
|
||||
commands.entity(entity.into()).insert(Checked);
|
||||
} else {
|
||||
|
@ -9,13 +9,15 @@ use bevy_ecs::{
|
||||
entity::Entity,
|
||||
observer::On,
|
||||
query::With,
|
||||
system::{Commands, Query, SystemId},
|
||||
system::{Commands, Query},
|
||||
};
|
||||
use bevy_input::keyboard::{KeyCode, KeyboardInput};
|
||||
use bevy_input::ButtonState;
|
||||
use bevy_input_focus::FocusedInput;
|
||||
use bevy_picking::events::{Click, Pointer};
|
||||
use bevy_ui::{Checked, InteractionDisabled};
|
||||
use bevy_ui::{Checkable, Checked, InteractionDisabled};
|
||||
|
||||
use crate::{Callback, Notify};
|
||||
|
||||
/// Headless widget implementation for a "radio button group". This component is used to group
|
||||
/// multiple [`CoreRadio`] components together, allowing them to behave as a single unit. It
|
||||
@ -36,7 +38,7 @@ use bevy_ui::{Checked, InteractionDisabled};
|
||||
#[require(AccessibilityNode(accesskit::Node::new(Role::RadioGroup)))]
|
||||
pub struct CoreRadioGroup {
|
||||
/// Callback which is called when the selected radio button changes.
|
||||
pub on_change: Option<SystemId<In<Entity>>>,
|
||||
pub on_change: Callback<In<Entity>>,
|
||||
}
|
||||
|
||||
/// Headless widget implementation for radio buttons. These should be enclosed within a
|
||||
@ -46,7 +48,7 @@ pub struct CoreRadioGroup {
|
||||
/// but rather the enclosing group should be focusable.
|
||||
/// See <https://www.w3.org/WAI/ARIA/apg/patterns/radio>/
|
||||
#[derive(Component, Debug)]
|
||||
#[require(AccessibilityNode(accesskit::Node::new(Role::RadioButton)), Checked)]
|
||||
#[require(AccessibilityNode(accesskit::Node::new(Role::RadioButton)), Checkable)]
|
||||
pub struct CoreRadio;
|
||||
|
||||
fn radio_group_on_key_input(
|
||||
@ -131,9 +133,7 @@ fn radio_group_on_key_input(
|
||||
let (next_id, _) = radio_buttons[next_index];
|
||||
|
||||
// Trigger the on_change event for the newly checked radio button
|
||||
if let Some(on_change) = on_change {
|
||||
commands.run_system_with(*on_change, next_id);
|
||||
}
|
||||
commands.notify_with(on_change, next_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -170,6 +170,11 @@ fn radio_group_on_button_click(
|
||||
}
|
||||
};
|
||||
|
||||
// Radio button is disabled.
|
||||
if q_radio.get(radio_id).unwrap().1 {
|
||||
return;
|
||||
}
|
||||
|
||||
// Gather all the enabled radio group descendants for exclusion.
|
||||
let radio_buttons = q_children
|
||||
.iter_descendants(ev.target())
|
||||
@ -196,9 +201,7 @@ fn radio_group_on_button_click(
|
||||
}
|
||||
|
||||
// Trigger the on_change event for the newly checked radio button
|
||||
if let Some(on_change) = on_change {
|
||||
commands.run_system_with(*on_change, radio_id);
|
||||
}
|
||||
commands.notify_with(on_change, radio_id);
|
||||
}
|
||||
}
|
||||
|
||||
|
329
crates/bevy_core_widgets/src/core_scrollbar.rs
Normal file
329
crates/bevy_core_widgets/src/core_scrollbar.rs
Normal file
@ -0,0 +1,329 @@
|
||||
use bevy_app::{App, Plugin, PostUpdate};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
hierarchy::{ChildOf, Children},
|
||||
observer::On,
|
||||
query::{With, Without},
|
||||
system::{Query, Res},
|
||||
};
|
||||
use bevy_math::Vec2;
|
||||
use bevy_picking::events::{Cancel, Drag, DragEnd, DragStart, Pointer, Press};
|
||||
use bevy_ui::{
|
||||
ComputedNode, ComputedNodeTarget, Node, ScrollPosition, UiGlobalTransform, UiScale, Val,
|
||||
};
|
||||
|
||||
/// Used to select the orientation of a scrollbar, slider, or other oriented control.
|
||||
// TODO: Move this to a more central place.
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq)]
|
||||
pub enum ControlOrientation {
|
||||
/// Horizontal orientation (stretching from left to right)
|
||||
Horizontal,
|
||||
/// Vertical orientation (stretching from top to bottom)
|
||||
#[default]
|
||||
Vertical,
|
||||
}
|
||||
|
||||
/// A headless scrollbar widget, which can be used to build custom scrollbars.
|
||||
///
|
||||
/// Scrollbars operate differently than the other core widgets in a number of respects.
|
||||
///
|
||||
/// Unlike sliders, scrollbars don't have an [`AccessibilityNode`](bevy_a11y::AccessibilityNode)
|
||||
/// component, nor can they have keyboard focus. This is because scrollbars are usually used in
|
||||
/// conjunction with a scrollable container, which is itself accessible and focusable. This also
|
||||
/// means that scrollbars don't accept keyboard events, which is also the responsibility of the
|
||||
/// scrollable container.
|
||||
///
|
||||
/// Scrollbars don't emit notification events; instead they modify the scroll position of the target
|
||||
/// entity directly.
|
||||
///
|
||||
/// A scrollbar can have any number of child entities, but one entity must be the scrollbar thumb,
|
||||
/// which is marked with the [`CoreScrollbarThumb`] component. Other children are ignored. The core
|
||||
/// scrollbar will directly update the position and size of this entity; the application is free to
|
||||
/// set any other style properties as desired.
|
||||
///
|
||||
/// The application is free to position the scrollbars relative to the scrolling container however
|
||||
/// it wants: it can overlay them on top of the scrolling content, or use a grid layout to displace
|
||||
/// the content to make room for the scrollbars.
|
||||
#[derive(Component, Debug)]
|
||||
pub struct CoreScrollbar {
|
||||
/// Entity being scrolled.
|
||||
pub target: Entity,
|
||||
/// Whether the scrollbar is vertical or horizontal.
|
||||
pub orientation: ControlOrientation,
|
||||
/// Minimum length of the scrollbar thumb, in pixel units, in the direction parallel to the main
|
||||
/// scrollbar axis. The scrollbar will resize the thumb entity based on the proportion of
|
||||
/// visible size to content size, but no smaller than this. This prevents the thumb from
|
||||
/// disappearing in cases where the ratio of content size to visible size is large.
|
||||
pub min_thumb_length: f32,
|
||||
}
|
||||
|
||||
/// Marker component to indicate that the entity is a scrollbar thumb (the moving, draggable part of
|
||||
/// the scrollbar). This should be a child of the scrollbar entity.
|
||||
#[derive(Component, Debug)]
|
||||
#[require(CoreScrollbarDragState)]
|
||||
pub struct CoreScrollbarThumb;
|
||||
|
||||
impl CoreScrollbar {
|
||||
/// Construct a new scrollbar.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `target` - The scrollable entity that this scrollbar will control.
|
||||
/// * `orientation` - The orientation of the scrollbar (horizontal or vertical).
|
||||
/// * `min_thumb_length` - The minimum size of the scrollbar's thumb, in pixels.
|
||||
pub fn new(target: Entity, orientation: ControlOrientation, min_thumb_length: f32) -> Self {
|
||||
Self {
|
||||
target,
|
||||
orientation,
|
||||
min_thumb_length,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Component used to manage the state of a scrollbar during dragging. This component is
|
||||
/// inserted on the thumb entity.
|
||||
#[derive(Component, Default)]
|
||||
pub struct CoreScrollbarDragState {
|
||||
/// Whether the scrollbar is currently being dragged.
|
||||
pub dragging: bool,
|
||||
/// The value of the scrollbar when dragging started.
|
||||
drag_origin: f32,
|
||||
}
|
||||
|
||||
fn scrollbar_on_pointer_down(
|
||||
mut ev: On<Pointer<Press>>,
|
||||
q_thumb: Query<&ChildOf, With<CoreScrollbarThumb>>,
|
||||
mut q_scrollbar: Query<(
|
||||
&CoreScrollbar,
|
||||
&ComputedNode,
|
||||
&ComputedNodeTarget,
|
||||
&UiGlobalTransform,
|
||||
)>,
|
||||
mut q_scroll_pos: Query<(&mut ScrollPosition, &ComputedNode), Without<CoreScrollbar>>,
|
||||
ui_scale: Res<UiScale>,
|
||||
) {
|
||||
if q_thumb.contains(ev.target()) {
|
||||
// If they click on the thumb, do nothing. This will be handled by the drag event.
|
||||
ev.propagate(false);
|
||||
} else if let Ok((scrollbar, node, node_target, transform)) = q_scrollbar.get_mut(ev.target()) {
|
||||
// If they click on the scrollbar track, page up or down.
|
||||
ev.propagate(false);
|
||||
|
||||
// Convert to widget-local coordinates.
|
||||
let local_pos = transform.try_inverse().unwrap().transform_point2(
|
||||
ev.event().pointer_location.position * node_target.scale_factor() / ui_scale.0,
|
||||
) + node.size() * 0.5;
|
||||
|
||||
// Bail if we don't find the target entity.
|
||||
let Ok((mut scroll_pos, scroll_content)) = q_scroll_pos.get_mut(scrollbar.target) else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Convert the click coordinates into a scroll position. If it's greater than the
|
||||
// current scroll position, scroll forward by one step (visible size) otherwise scroll
|
||||
// back.
|
||||
let visible_size = scroll_content.size() * scroll_content.inverse_scale_factor;
|
||||
let content_size = scroll_content.content_size() * scroll_content.inverse_scale_factor;
|
||||
let max_range = (content_size - visible_size).max(Vec2::ZERO);
|
||||
|
||||
fn adjust_scroll_pos(scroll_pos: &mut f32, click_pos: f32, step: f32, range: f32) {
|
||||
*scroll_pos =
|
||||
(*scroll_pos + if click_pos > *scroll_pos { step } else { -step }).clamp(0., range);
|
||||
}
|
||||
|
||||
match scrollbar.orientation {
|
||||
ControlOrientation::Horizontal => {
|
||||
if node.size().x > 0. {
|
||||
let click_pos = local_pos.x * content_size.x / node.size().x;
|
||||
adjust_scroll_pos(&mut scroll_pos.x, click_pos, visible_size.x, max_range.x);
|
||||
}
|
||||
}
|
||||
ControlOrientation::Vertical => {
|
||||
if node.size().y > 0. {
|
||||
let click_pos = local_pos.y * content_size.y / node.size().y;
|
||||
adjust_scroll_pos(&mut scroll_pos.y, click_pos, visible_size.y, max_range.y);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn scrollbar_on_drag_start(
|
||||
mut ev: On<Pointer<DragStart>>,
|
||||
mut q_thumb: Query<(&ChildOf, &mut CoreScrollbarDragState), With<CoreScrollbarThumb>>,
|
||||
q_scrollbar: Query<&CoreScrollbar>,
|
||||
q_scroll_area: Query<&ScrollPosition>,
|
||||
) {
|
||||
if let Ok((ChildOf(thumb_parent), mut drag)) = q_thumb.get_mut(ev.target()) {
|
||||
ev.propagate(false);
|
||||
if let Ok(scrollbar) = q_scrollbar.get(*thumb_parent) {
|
||||
if let Ok(scroll_area) = q_scroll_area.get(scrollbar.target) {
|
||||
drag.dragging = true;
|
||||
drag.drag_origin = match scrollbar.orientation {
|
||||
ControlOrientation::Horizontal => scroll_area.x,
|
||||
ControlOrientation::Vertical => scroll_area.y,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn scrollbar_on_drag(
|
||||
mut ev: On<Pointer<Drag>>,
|
||||
mut q_thumb: Query<(&ChildOf, &mut CoreScrollbarDragState), With<CoreScrollbarThumb>>,
|
||||
mut q_scrollbar: Query<(&ComputedNode, &CoreScrollbar)>,
|
||||
mut q_scroll_pos: Query<(&mut ScrollPosition, &ComputedNode), Without<CoreScrollbar>>,
|
||||
ui_scale: Res<UiScale>,
|
||||
) {
|
||||
if let Ok((ChildOf(thumb_parent), drag)) = q_thumb.get_mut(ev.target()) {
|
||||
if let Ok((node, scrollbar)) = q_scrollbar.get_mut(*thumb_parent) {
|
||||
ev.propagate(false);
|
||||
let Ok((mut scroll_pos, scroll_content)) = q_scroll_pos.get_mut(scrollbar.target)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if drag.dragging {
|
||||
let distance = ev.event().distance / ui_scale.0;
|
||||
let visible_size = scroll_content.size() * scroll_content.inverse_scale_factor;
|
||||
let content_size =
|
||||
scroll_content.content_size() * scroll_content.inverse_scale_factor;
|
||||
let scrollbar_size = (node.size() * node.inverse_scale_factor).max(Vec2::ONE);
|
||||
|
||||
match scrollbar.orientation {
|
||||
ControlOrientation::Horizontal => {
|
||||
let range = (content_size.x - visible_size.x).max(0.);
|
||||
scroll_pos.x = (drag.drag_origin
|
||||
+ (distance.x * content_size.x) / scrollbar_size.x)
|
||||
.clamp(0., range);
|
||||
}
|
||||
ControlOrientation::Vertical => {
|
||||
let range = (content_size.y - visible_size.y).max(0.);
|
||||
scroll_pos.y = (drag.drag_origin
|
||||
+ (distance.y * content_size.y) / scrollbar_size.y)
|
||||
.clamp(0., range);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn scrollbar_on_drag_end(
|
||||
mut ev: On<Pointer<DragEnd>>,
|
||||
mut q_thumb: Query<&mut CoreScrollbarDragState, With<CoreScrollbarThumb>>,
|
||||
) {
|
||||
if let Ok(mut drag) = q_thumb.get_mut(ev.target()) {
|
||||
ev.propagate(false);
|
||||
if drag.dragging {
|
||||
drag.dragging = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn scrollbar_on_drag_cancel(
|
||||
mut ev: On<Pointer<Cancel>>,
|
||||
mut q_thumb: Query<&mut CoreScrollbarDragState, With<CoreScrollbarThumb>>,
|
||||
) {
|
||||
if let Ok(mut drag) = q_thumb.get_mut(ev.target()) {
|
||||
ev.propagate(false);
|
||||
if drag.dragging {
|
||||
drag.dragging = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update_scrollbar_thumb(
|
||||
q_scroll_area: Query<(&ScrollPosition, &ComputedNode)>,
|
||||
q_scrollbar: Query<(&CoreScrollbar, &ComputedNode, &Children)>,
|
||||
mut q_thumb: Query<&mut Node, With<CoreScrollbarThumb>>,
|
||||
) {
|
||||
for (scrollbar, scrollbar_node, children) in q_scrollbar.iter() {
|
||||
let Ok(scroll_area) = q_scroll_area.get(scrollbar.target) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// Size of the visible scrolling area.
|
||||
let visible_size = scroll_area.1.size() * scroll_area.1.inverse_scale_factor;
|
||||
|
||||
// Size of the scrolling content.
|
||||
let content_size = scroll_area.1.content_size() * scroll_area.1.inverse_scale_factor;
|
||||
|
||||
// Length of the scrollbar track.
|
||||
let track_length = scrollbar_node.size() * scrollbar_node.inverse_scale_factor;
|
||||
|
||||
fn size_and_pos(
|
||||
content_size: f32,
|
||||
visible_size: f32,
|
||||
track_length: f32,
|
||||
min_size: f32,
|
||||
offset: f32,
|
||||
) -> (f32, f32) {
|
||||
let thumb_size = if content_size > visible_size {
|
||||
(track_length * visible_size / content_size)
|
||||
.max(min_size)
|
||||
.min(track_length)
|
||||
} else {
|
||||
track_length
|
||||
};
|
||||
|
||||
let thumb_pos = if content_size > visible_size {
|
||||
offset * (track_length - thumb_size) / (content_size - visible_size)
|
||||
} else {
|
||||
0.
|
||||
};
|
||||
|
||||
(thumb_size, thumb_pos)
|
||||
}
|
||||
|
||||
for child in children {
|
||||
if let Ok(mut thumb) = q_thumb.get_mut(*child) {
|
||||
match scrollbar.orientation {
|
||||
ControlOrientation::Horizontal => {
|
||||
let (thumb_size, thumb_pos) = size_and_pos(
|
||||
content_size.x,
|
||||
visible_size.x,
|
||||
track_length.x,
|
||||
scrollbar.min_thumb_length,
|
||||
scroll_area.0.x,
|
||||
);
|
||||
|
||||
thumb.top = Val::Px(0.);
|
||||
thumb.bottom = Val::Px(0.);
|
||||
thumb.left = Val::Px(thumb_pos);
|
||||
thumb.width = Val::Px(thumb_size);
|
||||
}
|
||||
ControlOrientation::Vertical => {
|
||||
let (thumb_size, thumb_pos) = size_and_pos(
|
||||
content_size.y,
|
||||
visible_size.y,
|
||||
track_length.y,
|
||||
scrollbar.min_thumb_length,
|
||||
scroll_area.0.y,
|
||||
);
|
||||
|
||||
thumb.left = Val::Px(0.);
|
||||
thumb.right = Val::Px(0.);
|
||||
thumb.top = Val::Px(thumb_pos);
|
||||
thumb.height = Val::Px(thumb_size);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Plugin that adds the observers for the [`CoreScrollbar`] widget.
|
||||
pub struct CoreScrollbarPlugin;
|
||||
|
||||
impl Plugin for CoreScrollbarPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.add_observer(scrollbar_on_pointer_down)
|
||||
.add_observer(scrollbar_on_drag_start)
|
||||
.add_observer(scrollbar_on_drag_end)
|
||||
.add_observer(scrollbar_on_drag_cancel)
|
||||
.add_observer(scrollbar_on_drag)
|
||||
.add_systems(PostUpdate, update_scrollbar_thumb);
|
||||
}
|
||||
}
|
@ -13,7 +13,7 @@ use bevy_ecs::{
|
||||
component::Component,
|
||||
observer::On,
|
||||
query::With,
|
||||
system::{Commands, Query, SystemId},
|
||||
system::{Commands, Query},
|
||||
};
|
||||
use bevy_input::keyboard::{KeyCode, KeyboardInput};
|
||||
use bevy_input::ButtonState;
|
||||
@ -22,6 +22,8 @@ use bevy_log::warn_once;
|
||||
use bevy_picking::events::{Drag, DragEnd, DragStart, Pointer, Press};
|
||||
use bevy_ui::{ComputedNode, ComputedNodeTarget, InteractionDisabled, UiGlobalTransform, UiScale};
|
||||
|
||||
use crate::{Callback, Notify};
|
||||
|
||||
/// Defines how the slider should behave when you click on the track (not the thumb).
|
||||
#[derive(Debug, Default, PartialEq, Clone, Copy)]
|
||||
pub enum TrackClick {
|
||||
@ -72,8 +74,9 @@ pub enum TrackClick {
|
||||
)]
|
||||
pub struct CoreSlider {
|
||||
/// Callback which is called when the slider is dragged or the value is changed via other user
|
||||
/// interaction. If this value is `None`, then the slider will self-update.
|
||||
pub on_change: Option<SystemId<In<f32>>>,
|
||||
/// interaction. If this value is `Callback::Ignore`, then the slider will update it's own
|
||||
/// internal [`SliderValue`] state without notification.
|
||||
pub on_change: Callback<In<f32>>,
|
||||
/// Set the track-clicking behavior for this slider.
|
||||
pub track_click: TrackClick,
|
||||
// TODO: Think about whether we want a "vertical" option.
|
||||
@ -92,7 +95,9 @@ pub struct SliderValue(pub f32);
|
||||
#[derive(Component, Debug, PartialEq, Clone, Copy)]
|
||||
#[component(immutable)]
|
||||
pub struct SliderRange {
|
||||
/// The beginning of the allowed range for the slider value.
|
||||
start: f32,
|
||||
/// The end of the allowed range for the slider value.
|
||||
end: f32,
|
||||
}
|
||||
|
||||
@ -255,12 +260,12 @@ pub(crate) fn slider_on_pointer_down(
|
||||
TrackClick::Snap => click_val,
|
||||
});
|
||||
|
||||
if let Some(on_change) = slider.on_change {
|
||||
commands.run_system_with(on_change, new_value);
|
||||
} else {
|
||||
if matches!(slider.on_change, Callback::Ignore) {
|
||||
commands
|
||||
.entity(trigger.target())
|
||||
.insert(SliderValue(new_value));
|
||||
} else {
|
||||
commands.notify_with(&slider.on_change, new_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -320,12 +325,12 @@ pub(crate) fn slider_on_drag(
|
||||
range.start() + span * 0.5
|
||||
};
|
||||
|
||||
if let Some(on_change) = slider.on_change {
|
||||
commands.run_system_with(on_change, new_value);
|
||||
} else {
|
||||
if matches!(slider.on_change, Callback::Ignore) {
|
||||
commands
|
||||
.entity(trigger.target())
|
||||
.insert(SliderValue(new_value));
|
||||
} else {
|
||||
commands.notify_with(&slider.on_change, new_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -367,12 +372,12 @@ fn slider_on_key_input(
|
||||
}
|
||||
};
|
||||
trigger.propagate(false);
|
||||
if let Some(on_change) = slider.on_change {
|
||||
commands.run_system_with(on_change, new_value);
|
||||
} else {
|
||||
if matches!(slider.on_change, Callback::Ignore) {
|
||||
commands
|
||||
.entity(trigger.target())
|
||||
.insert(SliderValue(new_value));
|
||||
} else {
|
||||
commands.notify_with(&slider.on_change, new_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -459,12 +464,12 @@ fn slider_on_set_value(
|
||||
range.clamp(value.0 + *delta * step.map(|s| s.0).unwrap_or_default())
|
||||
}
|
||||
};
|
||||
if let Some(on_change) = slider.on_change {
|
||||
commands.run_system_with(on_change, new_value);
|
||||
} else {
|
||||
if matches!(slider.on_change, Callback::Ignore) {
|
||||
commands
|
||||
.entity(trigger.target())
|
||||
.insert(SliderValue(new_value));
|
||||
} else {
|
||||
commands.notify_with(&slider.on_change, new_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,16 +14,23 @@
|
||||
// styled/opinionated widgets that use them. Components which are directly exposed to users above
|
||||
// the widget level, like `SliderValue`, should not have the `Core` prefix.
|
||||
|
||||
mod callback;
|
||||
mod core_button;
|
||||
mod core_checkbox;
|
||||
mod core_radio;
|
||||
mod core_scrollbar;
|
||||
mod core_slider;
|
||||
|
||||
use bevy_app::{App, Plugin};
|
||||
|
||||
pub use callback::{Callback, Notify};
|
||||
pub use core_button::{CoreButton, CoreButtonPlugin};
|
||||
pub use core_checkbox::{CoreCheckbox, CoreCheckboxPlugin, SetChecked, ToggleChecked};
|
||||
pub use core_radio::{CoreRadio, CoreRadioGroup, CoreRadioGroupPlugin};
|
||||
pub use core_scrollbar::{
|
||||
ControlOrientation, CoreScrollbar, CoreScrollbarDragState, CoreScrollbarPlugin,
|
||||
CoreScrollbarThumb,
|
||||
};
|
||||
pub use core_slider::{
|
||||
CoreSlider, CoreSliderDragState, CoreSliderPlugin, CoreSliderThumb, SetSliderValue,
|
||||
SliderRange, SliderStep, SliderValue, TrackClick,
|
||||
@ -39,6 +46,7 @@ impl Plugin for CoreWidgetsPlugin {
|
||||
CoreButtonPlugin,
|
||||
CoreCheckboxPlugin,
|
||||
CoreRadioGroupPlugin,
|
||||
CoreScrollbarPlugin,
|
||||
CoreSliderPlugin,
|
||||
));
|
||||
}
|
||||
|
@ -37,6 +37,9 @@ pub enum CiTestingEvent {
|
||||
/// Takes a screenshot of the entire screen, and saves the results to
|
||||
/// `screenshot-{current_frame}.png`.
|
||||
Screenshot,
|
||||
/// Takes a screenshot of the entire screen, saves the results to
|
||||
/// `screenshot-{current_frame}.png`, and exits once the screenshot is taken.
|
||||
ScreenshotAndExit,
|
||||
/// Takes a screenshot of the entire screen, and saves the results to
|
||||
/// `screenshot-{name}.png`.
|
||||
NamedScreenshot(String),
|
||||
|
@ -21,6 +21,19 @@ pub(crate) fn send_events(world: &mut World, mut current_frame: Local<u32>) {
|
||||
world.send_event(AppExit::Success);
|
||||
info!("Exiting after {} frames. Test successful!", *current_frame);
|
||||
}
|
||||
CiTestingEvent::ScreenshotAndExit => {
|
||||
let this_frame = *current_frame;
|
||||
world.spawn(Screenshot::primary_window()).observe(
|
||||
move |captured: On<bevy_render::view::screenshot::ScreenshotCaptured>,
|
||||
mut exit_event: EventWriter<AppExit>| {
|
||||
let path = format!("./screenshot-{this_frame}.png");
|
||||
save_to_disk(path)(captured);
|
||||
info!("Exiting. Test successful!");
|
||||
exit_event.write(AppExit::Success);
|
||||
},
|
||||
);
|
||||
info!("Took a screenshot at frame {}.", *current_frame);
|
||||
}
|
||||
CiTestingEvent::Screenshot => {
|
||||
let path = format!("./screenshot-{}.png", *current_frame);
|
||||
world
|
||||
@ -29,7 +42,7 @@ pub(crate) fn send_events(world: &mut World, mut current_frame: Local<u32>) {
|
||||
info!("Took a screenshot at frame {}.", *current_frame);
|
||||
}
|
||||
CiTestingEvent::NamedScreenshot(name) => {
|
||||
let path = format!("./screenshot-{}.png", name);
|
||||
let path = format!("./screenshot-{name}.png");
|
||||
world
|
||||
.spawn(Screenshot::primary_window())
|
||||
.observe(save_to_disk(path));
|
||||
|
@ -202,7 +202,7 @@ impl LogDiagnosticsPlugin {
|
||||
) {
|
||||
if state.timer.tick(time.delta()).is_finished() {
|
||||
Self::for_each_diagnostic(&state, &diagnostics, |diagnostic| {
|
||||
debug!("{:#?}\n", diagnostic);
|
||||
debug!("{diagnostic:#?}\n");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ pub mod internal {
|
||||
memory: format!("{:.1} GiB", sys.total_memory() as f64 * BYTES_TO_GIB),
|
||||
};
|
||||
|
||||
info!("{:?}", system_info);
|
||||
info!("{system_info:?}");
|
||||
system_info
|
||||
}
|
||||
}
|
||||
|
@ -40,19 +40,26 @@ pub fn derive_entity_event(input: TokenStream) -> TokenStream {
|
||||
let mut traversal: Type = parse_quote!(());
|
||||
let bevy_ecs_path: Path = crate::bevy_ecs_path();
|
||||
|
||||
let mut processed_attrs = Vec::new();
|
||||
|
||||
ast.generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.push(parse_quote! { Self: Send + Sync + 'static });
|
||||
|
||||
if let Some(attr) = ast.attrs.iter().find(|attr| attr.path().is_ident(EVENT)) {
|
||||
for attr in ast.attrs.iter().filter(|attr| attr.path().is_ident(EVENT)) {
|
||||
if let Err(e) = attr.parse_nested_meta(|meta| match meta.path.get_ident() {
|
||||
Some(ident) if processed_attrs.iter().any(|i| ident == i) => {
|
||||
Err(meta.error(format!("duplicate attribute: {ident}")))
|
||||
}
|
||||
Some(ident) if ident == AUTO_PROPAGATE => {
|
||||
auto_propagate = true;
|
||||
processed_attrs.push(AUTO_PROPAGATE);
|
||||
Ok(())
|
||||
}
|
||||
Some(ident) if ident == TRAVERSAL => {
|
||||
traversal = meta.value()?.parse()?;
|
||||
processed_attrs.push(TRAVERSAL);
|
||||
Ok(())
|
||||
}
|
||||
Some(ident) => Err(meta.error(format!("unsupported attribute: {ident}"))),
|
||||
@ -108,6 +115,7 @@ pub fn derive_resource(input: TokenStream) -> TokenStream {
|
||||
})
|
||||
}
|
||||
|
||||
/// Component derive syntax is documented on both the macro and the trait.
|
||||
pub fn derive_component(input: TokenStream) -> TokenStream {
|
||||
let mut ast = parse_macro_input!(input as DeriveInput);
|
||||
let bevy_ecs_path: Path = crate::bevy_ecs_path();
|
||||
@ -446,7 +454,11 @@ pub const MAP_ENTITIES: &str = "map_entities";
|
||||
pub const IMMUTABLE: &str = "immutable";
|
||||
pub const CLONE_BEHAVIOR: &str = "clone_behavior";
|
||||
|
||||
/// All allowed attribute value expression kinds for component hooks
|
||||
/// All allowed attribute value expression kinds for component hooks.
|
||||
/// This doesn't simply use general expressions because of conflicting needs:
|
||||
/// - we want to be able to use `Self` & generic parameters in paths
|
||||
/// - call expressions producing a closure need to be wrapped in a function
|
||||
/// to turn them into function pointers, which prevents access to the outer generic params
|
||||
#[derive(Debug)]
|
||||
enum HookAttributeKind {
|
||||
/// expressions like function or struct names
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user