Merge branch 'main' into dlss3

This commit is contained in:
JMS55 2025-07-08 18:57:26 -07:00 committed by GitHub
commit 8084745c25
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 268 additions and 315 deletions

View File

@ -30,9 +30,7 @@
//! //!
//! [SMAA]: https://www.iryoku.com/smaa/ //! [SMAA]: https://www.iryoku.com/smaa/
use bevy_app::{App, Plugin}; use bevy_app::{App, Plugin};
#[cfg(feature = "smaa_luts")] use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer, Handle};
use bevy_asset::load_internal_binary_asset;
use bevy_asset::{embedded_asset, load_embedded_asset, uuid_handle, AssetServer, Handle};
#[cfg(not(feature = "smaa_luts"))] #[cfg(not(feature = "smaa_luts"))]
use bevy_core_pipeline::tonemapping::lut_placeholder; use bevy_core_pipeline::tonemapping::lut_placeholder;
use bevy_core_pipeline::{ use bevy_core_pipeline::{
@ -79,13 +77,6 @@ use bevy_render::{
}; };
use bevy_utils::prelude::default; use bevy_utils::prelude::default;
/// The handle of the area LUT, a KTX2 format texture that SMAA uses internally.
const SMAA_AREA_LUT_TEXTURE_HANDLE: Handle<Image> =
uuid_handle!("569c4d67-c7fa-4958-b1af-0836023603c0");
/// The handle of the search LUT, a KTX2 format texture that SMAA uses internally.
const SMAA_SEARCH_LUT_TEXTURE_HANDLE: Handle<Image> =
uuid_handle!("43b97515-252e-4c8a-b9af-f2fc528a1c27");
/// Adds support for subpixel morphological antialiasing, or SMAA. /// Adds support for subpixel morphological antialiasing, or SMAA.
pub struct SmaaPlugin; pub struct SmaaPlugin;
@ -125,6 +116,14 @@ pub enum SmaaPreset {
Ultra, Ultra,
} }
#[derive(Resource)]
struct SmaaLuts {
/// The handle of the area LUT, a KTX2 format texture that SMAA uses internally.
area_lut: Handle<Image>,
/// The handle of the search LUT, a KTX2 format texture that SMAA uses internally.
search_lut: Handle<Image>,
}
/// A render world resource that holds all render pipeline data needed for SMAA. /// A render world resource that holds all render pipeline data needed for SMAA.
/// ///
/// There are three separate passes, so we need three separate pipelines. /// There are three separate passes, so we need three separate pipelines.
@ -292,49 +291,26 @@ impl Plugin for SmaaPlugin {
// Load the shader. // Load the shader.
embedded_asset!(app, "smaa.wgsl"); embedded_asset!(app, "smaa.wgsl");
// Load the two lookup textures. These are compressed textures in KTX2
// format.
#[cfg(feature = "smaa_luts")] #[cfg(feature = "smaa_luts")]
load_internal_binary_asset!( let smaa_luts = {
app, // Load the two lookup textures. These are compressed textures in KTX2 format.
SMAA_AREA_LUT_TEXTURE_HANDLE, embedded_asset!(app, "SMAAAreaLUT.ktx2");
"SMAAAreaLUT.ktx2", embedded_asset!(app, "SMAASearchLUT.ktx2");
|bytes, _: String| Image::from_buffer(
bytes,
bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2),
bevy_image::CompressedImageFormats::NONE,
false,
bevy_image::ImageSampler::Default,
bevy_asset::RenderAssetUsages::RENDER_WORLD,
)
.expect("Failed to load SMAA area LUT")
);
#[cfg(feature = "smaa_luts")]
load_internal_binary_asset!(
app,
SMAA_SEARCH_LUT_TEXTURE_HANDLE,
"SMAASearchLUT.ktx2",
|bytes, _: String| Image::from_buffer(
bytes,
bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2),
bevy_image::CompressedImageFormats::NONE,
false,
bevy_image::ImageSampler::Default,
bevy_asset::RenderAssetUsages::RENDER_WORLD,
)
.expect("Failed to load SMAA search LUT")
);
SmaaLuts {
area_lut: load_embedded_asset!(app, "SMAAAreaLUT.ktx2"),
search_lut: load_embedded_asset!(app, "SMAASearchLUT.ktx2"),
}
};
#[cfg(not(feature = "smaa_luts"))] #[cfg(not(feature = "smaa_luts"))]
app.world_mut() let smaa_luts = {
.resource_mut::<bevy_asset::Assets<Image>>() let mut images = app.world_mut().resource_mut::<bevy_asset::Assets<Image>>();
.insert(SMAA_AREA_LUT_TEXTURE_HANDLE.id(), lut_placeholder()); let handle = images.add(lut_placeholder());
SmaaLuts {
#[cfg(not(feature = "smaa_luts"))] area_lut: handle.clone(),
app.world_mut() search_lut: handle.clone(),
.resource_mut::<bevy_asset::Assets<Image>>() }
.insert(SMAA_SEARCH_LUT_TEXTURE_HANDLE.id(), lut_placeholder()); };
app.add_plugins(ExtractComponentPlugin::<Smaa>::default()) app.add_plugins(ExtractComponentPlugin::<Smaa>::default())
.register_type::<Smaa>(); .register_type::<Smaa>();
@ -344,6 +320,7 @@ impl Plugin for SmaaPlugin {
}; };
render_app render_app
.insert_resource(smaa_luts)
.init_resource::<SmaaSpecializedRenderPipelines>() .init_resource::<SmaaSpecializedRenderPipelines>()
.init_resource::<SmaaInfoUniformBuffer>() .init_resource::<SmaaInfoUniformBuffer>()
.add_systems(RenderStartup, init_smaa_pipelines) .add_systems(RenderStartup, init_smaa_pipelines)
@ -747,13 +724,14 @@ fn prepare_smaa_bind_groups(
mut commands: Commands, mut commands: Commands,
render_device: Res<RenderDevice>, render_device: Res<RenderDevice>,
smaa_pipelines: Res<SmaaPipelines>, smaa_pipelines: Res<SmaaPipelines>,
smaa_luts: Res<SmaaLuts>,
images: Res<RenderAssets<GpuImage>>, images: Res<RenderAssets<GpuImage>>,
view_targets: Query<(Entity, &SmaaTextures), (With<ExtractedView>, With<Smaa>)>, view_targets: Query<(Entity, &SmaaTextures), (With<ExtractedView>, With<Smaa>)>,
) { ) {
// Fetch the two lookup textures. These are bundled in this library. // Fetch the two lookup textures. These are bundled in this library.
let (Some(search_texture), Some(area_texture)) = ( let (Some(search_texture), Some(area_texture)) = (
images.get(&SMAA_SEARCH_LUT_TEXTURE_HANDLE), images.get(&smaa_luts.search_lut),
images.get(&SMAA_AREA_LUT_TEXTURE_HANDLE), images.get(&smaa_luts.area_lut),
) else { ) else {
return; return;
}; };

View File

@ -492,8 +492,8 @@ impl<A: Asset> TryFrom<UntypedHandle> for Handle<A> {
/// ///
/// ``` /// ```
/// # use bevy_asset::{Handle, uuid_handle}; /// # use bevy_asset::{Handle, uuid_handle};
/// # type Shader = (); /// # type Image = ();
/// const SHADER: Handle<Shader> = uuid_handle!("1347c9b7-c46a-48e7-b7b8-023a354b7cac"); /// const IMAGE: Handle<Image> = uuid_handle!("1347c9b7-c46a-48e7-b7b8-023a354b7cac");
/// ``` /// ```
#[macro_export] #[macro_export]
macro_rules! uuid_handle { macro_rules! uuid_handle {

View File

@ -12,7 +12,7 @@ use crate::core_3d::{
prepare_core_3d_depth_textures, prepare_core_3d_depth_textures,
}; };
use bevy_app::{App, Plugin}; use bevy_app::{App, Plugin};
use bevy_asset::{load_internal_asset, uuid_handle, Handle}; use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
use bevy_derive::{Deref, DerefMut}; use bevy_derive::{Deref, DerefMut};
use bevy_ecs::{ use bevy_ecs::{
component::Component, component::Component,
@ -51,8 +51,8 @@ use bitflags::bitflags;
use tracing::debug; use tracing::debug;
/// Identifies the `downsample_depth.wgsl` shader. /// Identifies the `downsample_depth.wgsl` shader.
pub const DOWNSAMPLE_DEPTH_SHADER_HANDLE: Handle<Shader> = #[derive(Resource, Deref)]
uuid_handle!("a09a149e-5922-4fa4-9170-3c1a13065364"); pub struct DownsampleDepthShader(Handle<Shader>);
/// The maximum number of mip levels that we can produce. /// The maximum number of mip levels that we can produce.
/// ///
@ -69,18 +69,16 @@ pub struct MipGenerationPlugin;
impl Plugin for MipGenerationPlugin { impl Plugin for MipGenerationPlugin {
fn build(&self, app: &mut App) { fn build(&self, app: &mut App) {
load_internal_asset!( embedded_asset!(app, "downsample_depth.wgsl");
app,
DOWNSAMPLE_DEPTH_SHADER_HANDLE, let downsample_depth_shader = load_embedded_asset!(app, "downsample_depth.wgsl");
"downsample_depth.wgsl",
Shader::from_wgsl
);
let Some(render_app) = app.get_sub_app_mut(RenderApp) else { let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
return; return;
}; };
render_app render_app
.insert_resource(DownsampleDepthShader(downsample_depth_shader))
.init_resource::<SpecializedComputePipelines<DownsampleDepthPipeline>>() .init_resource::<SpecializedComputePipelines<DownsampleDepthPipeline>>()
.add_render_graph_node::<DownsampleDepthNode>(Core3d, Node3d::EarlyDownsampleDepth) .add_render_graph_node::<DownsampleDepthNode>(Core3d, Node3d::EarlyDownsampleDepth)
.add_render_graph_node::<DownsampleDepthNode>(Core3d, Node3d::LateDownsampleDepth) .add_render_graph_node::<DownsampleDepthNode>(Core3d, Node3d::LateDownsampleDepth)
@ -294,17 +292,21 @@ pub struct DownsampleDepthPipeline {
bind_group_layout: BindGroupLayout, bind_group_layout: BindGroupLayout,
/// A handle that identifies the compiled shader. /// A handle that identifies the compiled shader.
pipeline_id: Option<CachedComputePipelineId>, pipeline_id: Option<CachedComputePipelineId>,
/// The shader asset handle.
shader: Handle<Shader>,
} }
impl DownsampleDepthPipeline { impl DownsampleDepthPipeline {
/// Creates a new [`DownsampleDepthPipeline`] from a bind group layout. /// Creates a new [`DownsampleDepthPipeline`] from a bind group layout and the downsample
/// shader.
/// ///
/// This doesn't actually specialize the pipeline; that must be done /// This doesn't actually specialize the pipeline; that must be done
/// afterward. /// afterward.
fn new(bind_group_layout: BindGroupLayout) -> DownsampleDepthPipeline { fn new(bind_group_layout: BindGroupLayout, shader: Handle<Shader>) -> DownsampleDepthPipeline {
DownsampleDepthPipeline { DownsampleDepthPipeline {
bind_group_layout, bind_group_layout,
pipeline_id: None, pipeline_id: None,
shader,
} }
} }
} }
@ -335,6 +337,7 @@ fn create_downsample_depth_pipelines(
pipeline_cache: Res<PipelineCache>, pipeline_cache: Res<PipelineCache>,
mut specialized_compute_pipelines: ResMut<SpecializedComputePipelines<DownsampleDepthPipeline>>, mut specialized_compute_pipelines: ResMut<SpecializedComputePipelines<DownsampleDepthPipeline>>,
gpu_preprocessing_support: Res<GpuPreprocessingSupport>, gpu_preprocessing_support: Res<GpuPreprocessingSupport>,
downsample_depth_shader: Res<DownsampleDepthShader>,
mut has_run: Local<bool>, mut has_run: Local<bool>,
) { ) {
// Only run once. // Only run once.
@ -368,10 +371,22 @@ fn create_downsample_depth_pipelines(
// Initialize the pipelines. // Initialize the pipelines.
let mut downsample_depth_pipelines = DownsampleDepthPipelines { let mut downsample_depth_pipelines = DownsampleDepthPipelines {
first: DownsampleDepthPipeline::new(standard_bind_group_layout.clone()), first: DownsampleDepthPipeline::new(
second: DownsampleDepthPipeline::new(standard_bind_group_layout.clone()), standard_bind_group_layout.clone(),
first_multisample: DownsampleDepthPipeline::new(multisampled_bind_group_layout.clone()), downsample_depth_shader.0.clone(),
second_multisample: DownsampleDepthPipeline::new(multisampled_bind_group_layout.clone()), ),
second: DownsampleDepthPipeline::new(
standard_bind_group_layout.clone(),
downsample_depth_shader.0.clone(),
),
first_multisample: DownsampleDepthPipeline::new(
multisampled_bind_group_layout.clone(),
downsample_depth_shader.0.clone(),
),
second_multisample: DownsampleDepthPipeline::new(
multisampled_bind_group_layout.clone(),
downsample_depth_shader.0.clone(),
),
sampler, sampler,
}; };
@ -491,7 +506,7 @@ impl SpecializedComputePipeline for DownsampleDepthPipeline {
stages: ShaderStages::COMPUTE, stages: ShaderStages::COMPUTE,
range: 0..4, range: 0..4,
}], }],
shader: DOWNSAMPLE_DEPTH_SHADER_HANDLE, shader: self.shader.clone(),
shader_defs, shader_defs,
entry_point: Some(if key.contains(DownsampleDepthPipelineKey::SECOND_PHASE) { entry_point: Some(if key.contains(DownsampleDepthPipelineKey::SECOND_PHASE) {
"downsample_depth_second".into() "downsample_depth_second".into()

View File

@ -3,7 +3,7 @@
//! Currently, this consists only of chromatic aberration. //! Currently, this consists only of chromatic aberration.
use bevy_app::{App, Plugin}; use bevy_app::{App, Plugin};
use bevy_asset::{embedded_asset, load_embedded_asset, uuid_handle, Assets, Handle}; use bevy_asset::{embedded_asset, load_embedded_asset, Assets, Handle};
use bevy_derive::{Deref, DerefMut}; use bevy_derive::{Deref, DerefMut};
use bevy_ecs::{ use bevy_ecs::{
component::Component, component::Component,
@ -47,13 +47,6 @@ use crate::{
FullscreenShader, FullscreenShader,
}; };
/// The handle to the default chromatic aberration lookup texture.
///
/// This is just a 3x1 image consisting of one red pixel, one green pixel, and
/// one blue pixel, in that order.
const DEFAULT_CHROMATIC_ABERRATION_LUT_HANDLE: Handle<Image> =
uuid_handle!("dc3e3307-40a1-49bb-be6d-e0634e8836b2");
/// The default chromatic aberration intensity amount, in a fraction of the /// The default chromatic aberration intensity amount, in a fraction of the
/// window size. /// window size.
const DEFAULT_CHROMATIC_ABERRATION_INTENSITY: f32 = 0.02; const DEFAULT_CHROMATIC_ABERRATION_INTENSITY: f32 = 0.02;
@ -68,6 +61,9 @@ const DEFAULT_CHROMATIC_ABERRATION_MAX_SAMPLES: u32 = 8;
static DEFAULT_CHROMATIC_ABERRATION_LUT_DATA: [u8; 12] = static DEFAULT_CHROMATIC_ABERRATION_LUT_DATA: [u8; 12] =
[255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255]; [255, 0, 0, 255, 0, 255, 0, 255, 0, 0, 255, 255];
#[derive(Resource)]
struct DefaultChromaticAberrationLut(Handle<Image>);
/// A plugin that implements a built-in postprocessing stack with some common /// A plugin that implements a built-in postprocessing stack with some common
/// effects. /// effects.
/// ///
@ -96,14 +92,14 @@ pub struct PostProcessingPlugin;
pub struct ChromaticAberration { pub struct ChromaticAberration {
/// The lookup texture that determines the color gradient. /// The lookup texture that determines the color gradient.
/// ///
/// By default, this is a 3×1 texel texture consisting of one red pixel, one /// By default (if None), this is a 3×1 texel texture consisting of one red
/// green pixel, and one blue texel, in that order. This recreates the most /// pixel, one green pixel, and one blue texel, in that order. This
/// typical chromatic aberration pattern. However, you can change it to /// recreates the most typical chromatic aberration pattern. However, you
/// achieve different artistic effects. /// can change it to achieve different artistic effects.
/// ///
/// The texture is always sampled in its vertical center, so it should /// The texture is always sampled in its vertical center, so it should
/// ordinarily have a height of 1 texel. /// ordinarily have a height of 1 texel.
pub color_lut: Handle<Image>, pub color_lut: Option<Handle<Image>>,
/// The size of the streaks around the edges of objects, as a fraction of /// The size of the streaks around the edges of objects, as a fraction of
/// the window size. /// the window size.
@ -192,20 +188,17 @@ impl Plugin for PostProcessingPlugin {
// Load the default chromatic aberration LUT. // Load the default chromatic aberration LUT.
let mut assets = app.world_mut().resource_mut::<Assets<_>>(); let mut assets = app.world_mut().resource_mut::<Assets<_>>();
assets.insert( let default_lut = assets.add(Image::new(
DEFAULT_CHROMATIC_ABERRATION_LUT_HANDLE.id(), Extent3d {
Image::new( width: 3,
Extent3d { height: 1,
width: 3, depth_or_array_layers: 1,
height: 1, },
depth_or_array_layers: 1, TextureDimension::D2,
}, DEFAULT_CHROMATIC_ABERRATION_LUT_DATA.to_vec(),
TextureDimension::D2, TextureFormat::Rgba8UnormSrgb,
DEFAULT_CHROMATIC_ABERRATION_LUT_DATA.to_vec(), RenderAssetUsages::RENDER_WORLD,
TextureFormat::Rgba8UnormSrgb, ));
RenderAssetUsages::RENDER_WORLD,
),
);
app.register_type::<ChromaticAberration>(); app.register_type::<ChromaticAberration>();
app.add_plugins(ExtractComponentPlugin::<ChromaticAberration>::default()); app.add_plugins(ExtractComponentPlugin::<ChromaticAberration>::default());
@ -215,6 +208,7 @@ impl Plugin for PostProcessingPlugin {
}; };
render_app render_app
.insert_resource(DefaultChromaticAberrationLut(default_lut))
.init_resource::<SpecializedRenderPipelines<PostProcessingPipeline>>() .init_resource::<SpecializedRenderPipelines<PostProcessingPipeline>>()
.init_resource::<PostProcessingUniformBuffers>() .init_resource::<PostProcessingUniformBuffers>()
.add_systems( .add_systems(
@ -258,7 +252,7 @@ impl Plugin for PostProcessingPlugin {
impl Default for ChromaticAberration { impl Default for ChromaticAberration {
fn default() -> Self { fn default() -> Self {
Self { Self {
color_lut: DEFAULT_CHROMATIC_ABERRATION_LUT_HANDLE, color_lut: None,
intensity: DEFAULT_CHROMATIC_ABERRATION_INTENSITY, intensity: DEFAULT_CHROMATIC_ABERRATION_INTENSITY,
max_samples: DEFAULT_CHROMATIC_ABERRATION_MAX_SAMPLES, max_samples: DEFAULT_CHROMATIC_ABERRATION_MAX_SAMPLES,
} }
@ -357,6 +351,7 @@ impl ViewNode for PostProcessingNode {
let post_processing_pipeline = world.resource::<PostProcessingPipeline>(); let post_processing_pipeline = world.resource::<PostProcessingPipeline>();
let post_processing_uniform_buffers = world.resource::<PostProcessingUniformBuffers>(); let post_processing_uniform_buffers = world.resource::<PostProcessingUniformBuffers>();
let gpu_image_assets = world.resource::<RenderAssets<GpuImage>>(); let gpu_image_assets = world.resource::<RenderAssets<GpuImage>>();
let default_lut = world.resource::<DefaultChromaticAberrationLut>();
// We need a render pipeline to be prepared. // We need a render pipeline to be prepared.
let Some(pipeline) = pipeline_cache.get_render_pipeline(**pipeline_id) else { let Some(pipeline) = pipeline_cache.get_render_pipeline(**pipeline_id) else {
@ -364,8 +359,12 @@ impl ViewNode for PostProcessingNode {
}; };
// We need the chromatic aberration LUT to be present. // We need the chromatic aberration LUT to be present.
let Some(chromatic_aberration_lut) = gpu_image_assets.get(&chromatic_aberration.color_lut) let Some(chromatic_aberration_lut) = gpu_image_assets.get(
else { chromatic_aberration
.color_lut
.as_ref()
.unwrap_or(&default_lut.0),
) else {
return Ok(()); return Ok(());
}; };

View File

@ -21,7 +21,7 @@ mod core_radio;
mod core_scrollbar; mod core_scrollbar;
mod core_slider; mod core_slider;
use bevy_app::{App, Plugin}; use bevy_app::{PluginGroup, PluginGroupBuilder};
pub use callback::{Callback, Notify}; pub use callback::{Callback, Notify};
pub use core_button::{CoreButton, CoreButtonPlugin}; pub use core_button::{CoreButton, CoreButtonPlugin};
@ -36,18 +36,17 @@ pub use core_slider::{
SliderRange, SliderStep, SliderValue, TrackClick, SliderRange, SliderStep, SliderValue, TrackClick,
}; };
/// A plugin that registers the observers for all of the core widgets. If you don't want to /// A plugin group that registers the observers for all of the core widgets. If you don't want to
/// use all of the widgets, you can import the individual widget plugins instead. /// use all of the widgets, you can import the individual widget plugins instead.
pub struct CoreWidgetsPlugin; pub struct CoreWidgetsPlugins;
impl Plugin for CoreWidgetsPlugin { impl PluginGroup for CoreWidgetsPlugins {
fn build(&self, app: &mut App) { fn build(self) -> PluginGroupBuilder {
app.add_plugins(( PluginGroupBuilder::start::<Self>()
CoreButtonPlugin, .add(CoreButtonPlugin)
CoreCheckboxPlugin, .add(CoreCheckboxPlugin)
CoreRadioGroupPlugin, .add(CoreRadioGroupPlugin)
CoreScrollbarPlugin, .add(CoreScrollbarPlugin)
CoreSliderPlugin, .add(CoreSliderPlugin)
));
} }
} }

View File

@ -563,10 +563,21 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
#[should_panic]
fn dependency_loop() { fn dependency_loop() {
let mut schedule = Schedule::default(); let mut schedule = Schedule::default();
schedule.configure_sets(TestSystems::X.after(TestSystems::X)); schedule.configure_sets(TestSystems::X.after(TestSystems::X));
let mut world = World::new();
let result = schedule.initialize(&mut world);
assert!(matches!(result, Err(ScheduleBuildError::DependencyLoop(_))));
}
#[test]
fn dependency_loop_from_chain() {
let mut schedule = Schedule::default();
schedule.configure_sets((TestSystems::X, TestSystems::X).chain());
let mut world = World::new();
let result = schedule.initialize(&mut world);
assert!(matches!(result, Err(ScheduleBuildError::DependencyLoop(_))));
} }
#[test] #[test]
@ -598,10 +609,12 @@ mod tests {
} }
#[test] #[test]
#[should_panic]
fn hierarchy_loop() { fn hierarchy_loop() {
let mut schedule = Schedule::default(); let mut schedule = Schedule::default();
schedule.configure_sets(TestSystems::X.in_set(TestSystems::X)); schedule.configure_sets(TestSystems::X.in_set(TestSystems::X));
let mut world = World::new();
let result = schedule.initialize(&mut world);
assert!(matches!(result, Err(ScheduleBuildError::HierarchyLoop(_))));
} }
#[test] #[test]

View File

@ -390,14 +390,14 @@ impl Schedule {
let a = a.into_system_set(); let a = a.into_system_set();
let b = b.into_system_set(); let b = b.into_system_set();
let Some(&a_id) = self.graph.system_set_ids.get(&a.intern()) else { let Some(&a_id) = self.graph.system_sets.ids.get(&a.intern()) else {
panic!( panic!(
"Could not mark system as ambiguous, `{:?}` was not found in the schedule. "Could not mark system as ambiguous, `{:?}` was not found in the schedule.
Did you try to call `ambiguous_with` before adding the system to the world?", Did you try to call `ambiguous_with` before adding the system to the world?",
a a
); );
}; };
let Some(&b_id) = self.graph.system_set_ids.get(&b.intern()) else { let Some(&b_id) = self.graph.system_sets.ids.get(&b.intern()) else {
panic!( panic!(
"Could not mark system as ambiguous, `{:?}` was not found in the schedule. "Could not mark system as ambiguous, `{:?}` was not found in the schedule.
Did you try to call `ambiguous_with` before adding the system to the world?", Did you try to call `ambiguous_with` before adding the system to the world?",
@ -760,6 +760,27 @@ enum UninitializedId {
}, },
} }
/// Metadata for system sets in a schedule.
#[derive(Default)]
struct SystemSets {
/// List of system sets in the schedule
sets: SlotMap<SystemSetKey, SystemSetNode>,
/// List of conditions for each system set, in the same order as `system_sets`
conditions: SecondaryMap<SystemSetKey, Vec<ConditionWithAccess>>,
/// Map from system set to node id
ids: HashMap<InternedSystemSet, SystemSetKey>,
}
impl SystemSets {
fn get_or_add_set(&mut self, set: InternedSystemSet) -> SystemSetKey {
*self.ids.entry(set).or_insert_with(|| {
let key = self.sets.insert(SystemSetNode::new(set));
self.conditions.insert(key, Vec::new());
key
})
}
}
/// Metadata for a [`Schedule`]. /// Metadata for a [`Schedule`].
/// ///
/// The order isn't optimized; calling `ScheduleGraph::build_schedule` will return a /// The order isn't optimized; calling `ScheduleGraph::build_schedule` will return a
@ -770,12 +791,8 @@ pub struct ScheduleGraph {
pub systems: SlotMap<SystemKey, SystemNode>, pub systems: SlotMap<SystemKey, SystemNode>,
/// List of conditions for each system, in the same order as `systems` /// List of conditions for each system, in the same order as `systems`
pub system_conditions: SecondaryMap<SystemKey, Vec<ConditionWithAccess>>, pub system_conditions: SecondaryMap<SystemKey, Vec<ConditionWithAccess>>,
/// List of system sets in the schedule /// Data about system sets in the schedule
system_sets: SlotMap<SystemSetKey, SystemSetNode>, system_sets: SystemSets,
/// List of conditions for each system set, in the same order as `system_sets`
system_set_conditions: SecondaryMap<SystemSetKey, Vec<ConditionWithAccess>>,
/// Map from system set to node id
system_set_ids: HashMap<InternedSystemSet, SystemSetKey>,
/// Systems that have not been initialized yet; for system sets, we store the index of the first uninitialized condition /// Systems that have not been initialized yet; for system sets, we store the index of the first uninitialized condition
/// (all the conditions after that index still need to be initialized) /// (all the conditions after that index still need to be initialized)
uninit: Vec<UninitializedId>, uninit: Vec<UninitializedId>,
@ -800,9 +817,7 @@ impl ScheduleGraph {
Self { Self {
systems: SlotMap::with_key(), systems: SlotMap::with_key(),
system_conditions: SecondaryMap::new(), system_conditions: SecondaryMap::new(),
system_sets: SlotMap::with_key(), system_sets: SystemSets::default(),
system_set_conditions: SecondaryMap::new(),
system_set_ids: HashMap::default(),
uninit: Vec::new(), uninit: Vec::new(),
hierarchy: Dag::new(), hierarchy: Dag::new(),
dependency: Dag::new(), dependency: Dag::new(),
@ -826,7 +841,7 @@ impl ScheduleGraph {
/// Returns `true` if the given system set is part of the graph. Otherwise, returns `false`. /// Returns `true` if the given system set is part of the graph. Otherwise, returns `false`.
pub fn contains_set(&self, set: impl SystemSet) -> bool { pub fn contains_set(&self, set: impl SystemSet) -> bool {
self.system_set_ids.contains_key(&set.intern()) self.system_sets.ids.contains_key(&set.intern())
} }
/// Returns the system at the given [`NodeId`]. /// Returns the system at the given [`NodeId`].
@ -840,7 +855,7 @@ impl ScheduleGraph {
/// Returns the set at the given [`NodeId`], if it exists. /// Returns the set at the given [`NodeId`], if it exists.
pub fn get_set_at(&self, key: SystemSetKey) -> Option<&dyn SystemSet> { pub fn get_set_at(&self, key: SystemSetKey) -> Option<&dyn SystemSet> {
self.system_sets.get(key).map(|set| &*set.inner) self.system_sets.sets.get(key).map(|set| &*set.inner)
} }
/// Returns the set at the given [`NodeId`]. /// Returns the set at the given [`NodeId`].
@ -854,7 +869,7 @@ impl ScheduleGraph {
/// Returns the conditions for the set at the given [`SystemSetKey`], if it exists. /// Returns the conditions for the set at the given [`SystemSetKey`], if it exists.
pub fn get_set_conditions_at(&self, key: SystemSetKey) -> Option<&[ConditionWithAccess]> { pub fn get_set_conditions_at(&self, key: SystemSetKey) -> Option<&[ConditionWithAccess]> {
self.system_set_conditions.get(key).map(Vec::as_slice) self.system_sets.conditions.get(key).map(Vec::as_slice)
} }
/// Returns the conditions for the set at the given [`SystemSetKey`]. /// Returns the conditions for the set at the given [`SystemSetKey`].
@ -882,9 +897,9 @@ impl ScheduleGraph {
pub fn system_sets( pub fn system_sets(
&self, &self,
) -> impl Iterator<Item = (SystemSetKey, &dyn SystemSet, &[ConditionWithAccess])> { ) -> impl Iterator<Item = (SystemSetKey, &dyn SystemSet, &[ConditionWithAccess])> {
self.system_sets.iter().filter_map(|(key, set_node)| { self.system_sets.sets.iter().filter_map(|(key, set_node)| {
let set = &*set_node.inner; let set = &*set_node.inner;
let conditions = self.system_set_conditions.get(key)?.as_slice(); let conditions = self.system_sets.conditions.get(key)?.as_slice();
Some((key, set, conditions)) Some((key, set, conditions))
}) })
} }
@ -946,7 +961,7 @@ impl ScheduleGraph {
} }
let mut set_config = InternedSystemSet::into_config(set.intern()); let mut set_config = InternedSystemSet::into_config(set.intern());
set_config.conditions.extend(collective_conditions); set_config.conditions.extend(collective_conditions);
self.configure_set_inner(set_config).unwrap(); self.configure_set_inner(set_config);
} }
} }
} }
@ -1047,10 +1062,7 @@ impl ScheduleGraph {
} }
/// Add a [`ScheduleConfig`] to the graph, including its dependencies and conditions. /// Add a [`ScheduleConfig`] to the graph, including its dependencies and conditions.
fn add_system_inner( fn add_system_inner(&mut self, config: ScheduleConfig<ScheduleSystem>) -> SystemKey {
&mut self,
config: ScheduleConfig<ScheduleSystem>,
) -> Result<NodeId, ScheduleBuildError> {
let key = self.systems.insert(SystemNode::new(config.node)); let key = self.systems.insert(SystemNode::new(config.node));
self.system_conditions.insert( self.system_conditions.insert(
key, key,
@ -1064,9 +1076,9 @@ impl ScheduleGraph {
self.uninit.push(UninitializedId::System(key)); self.uninit.push(UninitializedId::System(key));
// graph updates are immediate // graph updates are immediate
self.update_graphs(NodeId::System(key), config.metadata)?; self.update_graphs(NodeId::System(key), config.metadata);
Ok(NodeId::System(key)) key
} }
#[track_caller] #[track_caller]
@ -1075,39 +1087,26 @@ impl ScheduleGraph {
} }
/// Add a single `ScheduleConfig` to the graph, including its dependencies and conditions. /// Add a single `ScheduleConfig` to the graph, including its dependencies and conditions.
fn configure_set_inner( fn configure_set_inner(&mut self, set: ScheduleConfig<InternedSystemSet>) -> SystemSetKey {
&mut self,
set: ScheduleConfig<InternedSystemSet>,
) -> Result<NodeId, ScheduleBuildError> {
let ScheduleConfig { let ScheduleConfig {
node: set, node: set,
metadata, metadata,
conditions, conditions,
} = set; } = set;
let key = match self.system_set_ids.get(&set) { let key = self.system_sets.get_or_add_set(set);
Some(&id) => id,
None => self.add_set(set),
};
// graph updates are immediate // graph updates are immediate
self.update_graphs(NodeId::Set(key), metadata)?; self.update_graphs(NodeId::Set(key), metadata);
// system init has to be deferred (need `&mut World`) // system init has to be deferred (need `&mut World`)
let system_set_conditions = self.system_set_conditions.entry(key).unwrap().or_default(); let system_set_conditions = self.system_sets.conditions.entry(key).unwrap().or_default();
self.uninit.push(UninitializedId::Set { self.uninit.push(UninitializedId::Set {
key, key,
first_uninit_condition: system_set_conditions.len(), first_uninit_condition: system_set_conditions.len(),
}); });
system_set_conditions.extend(conditions.into_iter().map(ConditionWithAccess::new)); system_set_conditions.extend(conditions.into_iter().map(ConditionWithAccess::new));
Ok(NodeId::Set(key))
}
fn add_set(&mut self, set: InternedSystemSet) -> SystemSetKey {
let key = self.system_sets.insert(SystemSetNode::new(set));
self.system_set_conditions.insert(key, Vec::new());
self.system_set_ids.insert(set, key);
key key
} }
@ -1117,78 +1116,8 @@ impl ScheduleGraph {
AnonymousSet::new(id) AnonymousSet::new(id)
} }
/// Check that no set is included in itself.
/// Add all the sets from the [`GraphInfo`]'s hierarchy to the graph.
fn check_hierarchy_sets(
&mut self,
id: NodeId,
graph_info: &GraphInfo,
) -> Result<(), ScheduleBuildError> {
for &set in &graph_info.hierarchy {
if let Some(&set_id) = self.system_set_ids.get(&set) {
if let NodeId::Set(key) = id
&& set_id == key
{
{
return Err(ScheduleBuildError::HierarchyLoop(
self.get_node_name(&NodeId::Set(key)),
));
}
}
} else {
// If the set is not in the graph, we add it
self.add_set(set);
}
}
Ok(())
}
/// Checks that no system set is dependent on itself.
/// Add all the sets from the [`GraphInfo`]'s dependencies to the graph.
fn check_edges(
&mut self,
id: NodeId,
graph_info: &GraphInfo,
) -> Result<(), ScheduleBuildError> {
for Dependency { set, .. } in &graph_info.dependencies {
if let Some(&set_id) = self.system_set_ids.get(set) {
if let NodeId::Set(key) = id
&& set_id == key
{
return Err(ScheduleBuildError::DependencyLoop(
self.get_node_name(&NodeId::Set(key)),
));
}
} else {
// If the set is not in the graph, we add it
self.add_set(*set);
}
}
Ok(())
}
/// Add all the sets from the [`GraphInfo`]'s ambiguity to the graph.
fn add_ambiguities(&mut self, graph_info: &GraphInfo) {
if let Ambiguity::IgnoreWithSet(ambiguous_with) = &graph_info.ambiguous_with {
for set in ambiguous_with {
if !self.system_set_ids.contains_key(set) {
self.add_set(*set);
}
}
}
}
/// Update the internal graphs (hierarchy, dependency, ambiguity) by adding a single [`GraphInfo`] /// Update the internal graphs (hierarchy, dependency, ambiguity) by adding a single [`GraphInfo`]
fn update_graphs( fn update_graphs(&mut self, id: NodeId, graph_info: GraphInfo) {
&mut self,
id: NodeId,
graph_info: GraphInfo,
) -> Result<(), ScheduleBuildError> {
self.check_hierarchy_sets(id, &graph_info)?;
self.check_edges(id, &graph_info)?;
self.add_ambiguities(&graph_info);
self.changed = true; self.changed = true;
let GraphInfo { let GraphInfo {
@ -1201,16 +1130,22 @@ impl ScheduleGraph {
self.hierarchy.graph.add_node(id); self.hierarchy.graph.add_node(id);
self.dependency.graph.add_node(id); self.dependency.graph.add_node(id);
for key in sets.into_iter().map(|set| self.system_set_ids[&set]) { for key in sets
.into_iter()
.map(|set| self.system_sets.get_or_add_set(set))
{
self.hierarchy.graph.add_edge(NodeId::Set(key), id); self.hierarchy.graph.add_edge(NodeId::Set(key), id);
// ensure set also appears in dependency graph // ensure set also appears in dependency graph
self.dependency.graph.add_node(NodeId::Set(key)); self.dependency.graph.add_node(NodeId::Set(key));
} }
for (kind, key, options) in dependencies for (kind, key, options) in
.into_iter() dependencies
.map(|Dependency { kind, set, options }| (kind, self.system_set_ids[&set], options)) .into_iter()
.map(|Dependency { kind, set, options }| {
(kind, self.system_sets.get_or_add_set(set), options)
})
{ {
let (lhs, rhs) = match kind { let (lhs, rhs) = match kind {
DependencyKind::Before => (id, NodeId::Set(key)), DependencyKind::Before => (id, NodeId::Set(key)),
@ -1230,7 +1165,7 @@ impl ScheduleGraph {
Ambiguity::IgnoreWithSet(ambiguous_with) => { Ambiguity::IgnoreWithSet(ambiguous_with) => {
for key in ambiguous_with for key in ambiguous_with
.into_iter() .into_iter()
.map(|set| self.system_set_ids[&set]) .map(|set| self.system_sets.get_or_add_set(set))
{ {
self.ambiguous_with.add_edge(id, NodeId::Set(key)); self.ambiguous_with.add_edge(id, NodeId::Set(key));
} }
@ -1239,8 +1174,6 @@ impl ScheduleGraph {
self.ambiguous_with_all.insert(id); self.ambiguous_with_all.insert(id);
} }
} }
Ok(())
} }
/// Initializes any newly-added systems and conditions by calling [`System::initialize`](crate::system::System) /// Initializes any newly-added systems and conditions by calling [`System::initialize`](crate::system::System)
@ -1258,7 +1191,7 @@ impl ScheduleGraph {
key, key,
first_uninit_condition, first_uninit_condition,
} => { } => {
for condition in self.system_set_conditions[key] for condition in self.system_sets.conditions[key]
.iter_mut() .iter_mut()
.skip(first_uninit_condition) .skip(first_uninit_condition)
{ {
@ -1358,9 +1291,9 @@ impl ScheduleGraph {
HashMap<SystemSetKey, HashSet<SystemKey>>, HashMap<SystemSetKey, HashSet<SystemKey>>,
) { ) {
let mut set_systems: HashMap<SystemSetKey, Vec<SystemKey>> = let mut set_systems: HashMap<SystemSetKey, Vec<SystemKey>> =
HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default()); HashMap::with_capacity_and_hasher(self.system_sets.sets.len(), Default::default());
let mut set_system_sets: HashMap<SystemSetKey, HashSet<SystemKey>> = let mut set_system_sets: HashMap<SystemSetKey, HashSet<SystemKey>> =
HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default()); HashMap::with_capacity_and_hasher(self.system_sets.sets.len(), Default::default());
for &id in hierarchy_topsort.iter().rev() { for &id in hierarchy_topsort.iter().rev() {
let NodeId::Set(set_key) = id else { let NodeId::Set(set_key) = id else {
continue; continue;
@ -1559,7 +1492,7 @@ impl ScheduleGraph {
// ignore system sets that have no conditions // ignore system sets that have no conditions
// ignore system type sets (already covered, they don't have conditions) // ignore system type sets (already covered, they don't have conditions)
let key = id.as_set()?; let key = id.as_set()?;
(!self.system_set_conditions[key].is_empty()).then_some((i, key)) (!self.system_sets.conditions[key].is_empty()).then_some((i, key))
}) })
.unzip(); .unzip();
@ -1659,7 +1592,7 @@ impl ScheduleGraph {
.drain(..) .drain(..)
.zip(schedule.set_conditions.drain(..)) .zip(schedule.set_conditions.drain(..))
{ {
self.system_set_conditions[key] = conditions; self.system_sets.conditions[key] = conditions;
} }
*schedule = self.build_schedule(world, schedule_label, ignored_ambiguities)?; *schedule = self.build_schedule(world, schedule_label, ignored_ambiguities)?;
@ -1673,7 +1606,7 @@ impl ScheduleGraph {
} }
for &key in &schedule.set_ids { for &key in &schedule.set_ids {
let conditions = core::mem::take(&mut self.system_set_conditions[key]); let conditions = core::mem::take(&mut self.system_sets.conditions[key]);
schedule.set_conditions.push(conditions); schedule.set_conditions.push(conditions);
} }
@ -1700,13 +1633,13 @@ trait ProcessScheduleConfig: Schedulable + Sized {
impl ProcessScheduleConfig for ScheduleSystem { impl ProcessScheduleConfig for ScheduleSystem {
fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId { fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId {
schedule_graph.add_system_inner(config).unwrap() NodeId::System(schedule_graph.add_system_inner(config))
} }
} }
impl ProcessScheduleConfig for InternedSystemSet { impl ProcessScheduleConfig for InternedSystemSet {
fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId { fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId {
schedule_graph.configure_set_inner(config).unwrap() NodeId::Set(schedule_graph.configure_set_inner(config))
} }
} }
@ -1748,7 +1681,7 @@ impl ScheduleGraph {
} }
} }
NodeId::Set(key) => { NodeId::Set(key) => {
let set = &self.system_sets[key]; let set = &self.system_sets.sets[key];
if set.is_anonymous() { if set.is_anonymous() {
self.anonymous_set_name(id) self.anonymous_set_name(id)
} else { } else {
@ -1833,6 +1766,17 @@ impl ScheduleGraph {
graph: &DiGraph, graph: &DiGraph,
report: ReportCycles, report: ReportCycles,
) -> Result<Vec<NodeId>, ScheduleBuildError> { ) -> Result<Vec<NodeId>, ScheduleBuildError> {
// Check explicitly for self-edges.
// `iter_sccs` won't report them as cycles because they still form components of one node.
if let Some((node, _)) = graph.all_edges().find(|(left, right)| left == right) {
let name = self.get_node_name(&node);
let error = match report {
ReportCycles::Hierarchy => ScheduleBuildError::HierarchyLoop(name),
ReportCycles::Dependency => ScheduleBuildError::DependencyLoop(name),
};
return Err(error);
}
// Tarjan's SCC algorithm returns elements in *reverse* topological order. // Tarjan's SCC algorithm returns elements in *reverse* topological order.
let mut top_sorted_nodes = Vec::with_capacity(graph.node_count()); let mut top_sorted_nodes = Vec::with_capacity(graph.node_count());
let mut sccs_with_cycles = Vec::new(); let mut sccs_with_cycles = Vec::new();
@ -1963,7 +1907,7 @@ impl ScheduleGraph {
set_systems: &HashMap<SystemSetKey, Vec<SystemKey>>, set_systems: &HashMap<SystemSetKey, Vec<SystemKey>>,
) -> Result<(), ScheduleBuildError> { ) -> Result<(), ScheduleBuildError> {
for (&key, systems) in set_systems { for (&key, systems) in set_systems {
let set = &self.system_sets[key]; let set = &self.system_sets.sets[key];
if set.is_system_type() { if set.is_system_type() {
let instances = systems.len(); let instances = systems.len();
let ambiguous_with = self.ambiguous_with.edges(NodeId::Set(key)); let ambiguous_with = self.ambiguous_with.edges(NodeId::Set(key));
@ -2070,7 +2014,7 @@ impl ScheduleGraph {
fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> { fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> {
let mut sets = <HashSet<_>>::default(); let mut sets = <HashSet<_>>::default();
self.traverse_sets_containing_node(*id, &mut |key| { self.traverse_sets_containing_node(*id, &mut |key| {
!self.system_sets[key].is_system_type() && sets.insert(key) !self.system_sets.sets[key].is_system_type() && sets.insert(key)
}); });
let mut sets: Vec<_> = sets let mut sets: Vec<_> = sets
.into_iter() .into_iter()

View File

@ -2,17 +2,13 @@
use { use {
super::{Measured2d, Triangle2d}, super::{Measured2d, Triangle2d},
alloc::{collections::BTreeMap, vec::Vec}, alloc::{collections::BTreeMap, vec::Vec},
core::cmp::Ordering,
}; };
use core::cmp::Ordering;
use crate::Vec2; use crate::Vec2;
#[cfg_attr(
not(feature = "alloc"),
expect(dead_code, reason = "this type is only used with the alloc feature")
)]
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
#[cfg(feature = "alloc")]
enum Endpoint { enum Endpoint {
Left, Left,
Right, Right,
@ -24,22 +20,16 @@ enum Endpoint {
/// If `e1.position().x == e2.position().x` the events are ordered from bottom to top. /// If `e1.position().x == e2.position().x` the events are ordered from bottom to top.
/// ///
/// This is the order expected by the [`SweepLine`]. /// This is the order expected by the [`SweepLine`].
#[cfg(feature = "alloc")]
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
#[cfg_attr(
not(feature = "alloc"),
allow(dead_code, reason = "this type is only used with the alloc feature")
)]
struct SweepLineEvent { struct SweepLineEvent {
segment: Segment, segment: Segment,
/// Type of the vertex (left or right) /// Type of the vertex (left or right)
endpoint: Endpoint, endpoint: Endpoint,
} }
#[cfg(feature = "alloc")]
impl SweepLineEvent { impl SweepLineEvent {
#[cfg_attr(
not(feature = "alloc"),
allow(dead_code, reason = "this type is only used with the alloc feature")
)]
fn position(&self) -> Vec2 { fn position(&self) -> Vec2 {
match self.endpoint { match self.endpoint {
Endpoint::Left => self.segment.left, Endpoint::Left => self.segment.left,
@ -48,20 +38,24 @@ impl SweepLineEvent {
} }
} }
#[cfg(feature = "alloc")]
impl PartialEq for SweepLineEvent { impl PartialEq for SweepLineEvent {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.position() == other.position() self.position() == other.position()
} }
} }
#[cfg(feature = "alloc")]
impl Eq for SweepLineEvent {} impl Eq for SweepLineEvent {}
#[cfg(feature = "alloc")]
impl PartialOrd for SweepLineEvent { impl PartialOrd for SweepLineEvent {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other)) Some(self.cmp(other))
} }
} }
#[cfg(feature = "alloc")]
impl Ord for SweepLineEvent { impl Ord for SweepLineEvent {
fn cmp(&self, other: &Self) -> Ordering { fn cmp(&self, other: &Self) -> Ordering {
xy_order(self.position(), other.position()) xy_order(self.position(), other.position())
@ -69,10 +63,7 @@ impl Ord for SweepLineEvent {
} }
/// Orders 2D points according to the order expected by the sweep line and event queue from -X to +X and then -Y to Y. /// Orders 2D points according to the order expected by the sweep line and event queue from -X to +X and then -Y to Y.
#[cfg_attr( #[cfg(feature = "alloc")]
not(feature = "alloc"),
allow(dead_code, reason = "this type is only used with the alloc feature")
)]
fn xy_order(a: Vec2, b: Vec2) -> Ordering { fn xy_order(a: Vec2, b: Vec2) -> Ordering {
a.x.total_cmp(&b.x).then_with(|| a.y.total_cmp(&b.y)) a.x.total_cmp(&b.x).then_with(|| a.y.total_cmp(&b.y))
} }
@ -129,26 +120,31 @@ impl EventQueue {
/// Segments are ordered from bottom to top based on their left vertices if possible. /// Segments are ordered from bottom to top based on their left vertices if possible.
/// If their y values are identical, the segments are ordered based on the y values of their right vertices. /// If their y values are identical, the segments are ordered based on the y values of their right vertices.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
#[cfg(feature = "alloc")]
struct Segment { struct Segment {
edge_index: usize, edge_index: usize,
left: Vec2, left: Vec2,
right: Vec2, right: Vec2,
} }
#[cfg(feature = "alloc")]
impl PartialEq for Segment { impl PartialEq for Segment {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.edge_index == other.edge_index self.edge_index == other.edge_index
} }
} }
#[cfg(feature = "alloc")]
impl Eq for Segment {} impl Eq for Segment {}
#[cfg(feature = "alloc")]
impl PartialOrd for Segment { impl PartialOrd for Segment {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other)) Some(self.cmp(other))
} }
} }
#[cfg(feature = "alloc")]
impl Ord for Segment { impl Ord for Segment {
fn cmp(&self, other: &Self) -> Ordering { fn cmp(&self, other: &Self) -> Ordering {
self.left self.left
@ -159,10 +155,7 @@ impl Ord for Segment {
} }
/// Holds information about which segment is above and which is below a given [`Segment`] /// Holds information about which segment is above and which is below a given [`Segment`]
#[cfg_attr( #[cfg(feature = "alloc")]
not(feature = "alloc"),
expect(dead_code, reason = "this type is only used with the alloc feature")
)]
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
struct SegmentOrder { struct SegmentOrder {
above: Option<usize>, above: Option<usize>,
@ -173,8 +166,8 @@ struct SegmentOrder {
/// ///
/// It can be thought of as a vertical line sweeping from -X to +X across the polygon that keeps track of the order of the segments /// It can be thought of as a vertical line sweeping from -X to +X across the polygon that keeps track of the order of the segments
/// the sweep line is intersecting at any given moment. /// the sweep line is intersecting at any given moment.
#[cfg(feature = "alloc")]
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[cfg(feature = "alloc")]
struct SweepLine<'a> { struct SweepLine<'a> {
vertices: &'a [Vec2], vertices: &'a [Vec2],
tree: BTreeMap<Segment, SegmentOrder>, tree: BTreeMap<Segment, SegmentOrder>,

View File

@ -1,7 +1,7 @@
use super::resource_manager::ResourceManager; use super::resource_manager::ResourceManager;
use bevy_asset::{load_embedded_asset, Handle}; use bevy_asset::{load_embedded_asset, Handle};
use bevy_core_pipeline::{ use bevy_core_pipeline::{
core_3d::CORE_3D_DEPTH_FORMAT, experimental::mip_generation::DOWNSAMPLE_DEPTH_SHADER_HANDLE, core_3d::CORE_3D_DEPTH_FORMAT, experimental::mip_generation::DownsampleDepthShader,
FullscreenShader, FullscreenShader,
}; };
use bevy_ecs::{ use bevy_ecs::{
@ -84,6 +84,7 @@ impl FromWorld for MeshletPipelines {
.remap_1d_to_2d_dispatch_bind_group_layout .remap_1d_to_2d_dispatch_bind_group_layout
.clone(); .clone();
let downsample_depth_shader = (*world.resource::<DownsampleDepthShader>()).clone();
let vertex_state = world.resource::<FullscreenShader>().to_vertex_state(); let vertex_state = world.resource::<FullscreenShader>().to_vertex_state();
let fill_counts_layout = resource_manager.fill_counts_bind_group_layout.clone(); let fill_counts_layout = resource_manager.fill_counts_bind_group_layout.clone();
@ -230,7 +231,7 @@ impl FromWorld for MeshletPipelines {
stages: ShaderStages::COMPUTE, stages: ShaderStages::COMPUTE,
range: 0..4, range: 0..4,
}], }],
shader: DOWNSAMPLE_DEPTH_SHADER_HANDLE, shader: downsample_depth_shader.clone(),
shader_defs: vec![ shader_defs: vec![
"MESHLET_VISIBILITY_BUFFER_RASTER_PASS_OUTPUT".into(), "MESHLET_VISIBILITY_BUFFER_RASTER_PASS_OUTPUT".into(),
"MESHLET".into(), "MESHLET".into(),
@ -248,7 +249,7 @@ impl FromWorld for MeshletPipelines {
stages: ShaderStages::COMPUTE, stages: ShaderStages::COMPUTE,
range: 0..4, range: 0..4,
}], }],
shader: DOWNSAMPLE_DEPTH_SHADER_HANDLE, shader: downsample_depth_shader.clone(),
shader_defs: vec![ shader_defs: vec![
"MESHLET_VISIBILITY_BUFFER_RASTER_PASS_OUTPUT".into(), "MESHLET_VISIBILITY_BUFFER_RASTER_PASS_OUTPUT".into(),
"MESHLET".into(), "MESHLET".into(),
@ -266,7 +267,7 @@ impl FromWorld for MeshletPipelines {
stages: ShaderStages::COMPUTE, stages: ShaderStages::COMPUTE,
range: 0..4, range: 0..4,
}], }],
shader: DOWNSAMPLE_DEPTH_SHADER_HANDLE, shader: downsample_depth_shader.clone(),
shader_defs: vec!["MESHLET".into()], shader_defs: vec!["MESHLET".into()],
entry_point: Some("downsample_depth_first".into()), entry_point: Some("downsample_depth_first".into()),
..default() ..default()
@ -281,7 +282,7 @@ impl FromWorld for MeshletPipelines {
stages: ShaderStages::COMPUTE, stages: ShaderStages::COMPUTE,
range: 0..4, range: 0..4,
}], }],
shader: DOWNSAMPLE_DEPTH_SHADER_HANDLE, shader: downsample_depth_shader,
shader_defs: vec!["MESHLET".into()], shader_defs: vec!["MESHLET".into()],
entry_point: Some("downsample_depth_second".into()), entry_point: Some("downsample_depth_second".into()),
zero_initialize_workgroup_memory: false, zero_initialize_workgroup_memory: false,

View File

@ -30,12 +30,12 @@
//! [Henyey-Greenstein phase function]: https://www.pbr-book.org/4ed/Volume_Scattering/Phase_Functions#TheHenyeyndashGreensteinPhaseFunction //! [Henyey-Greenstein phase function]: https://www.pbr-book.org/4ed/Volume_Scattering/Phase_Functions#TheHenyeyndashGreensteinPhaseFunction
use bevy_app::{App, Plugin}; use bevy_app::{App, Plugin};
use bevy_asset::{embedded_asset, Assets}; use bevy_asset::{embedded_asset, Assets, Handle};
use bevy_core_pipeline::core_3d::{ use bevy_core_pipeline::core_3d::{
graph::{Core3d, Node3d}, graph::{Core3d, Node3d},
prepare_core_3d_depth_textures, prepare_core_3d_depth_textures,
}; };
use bevy_ecs::schedule::IntoScheduleConfigs as _; use bevy_ecs::{resource::Resource, schedule::IntoScheduleConfigs as _};
use bevy_light::FogVolume; use bevy_light::FogVolume;
use bevy_math::{ use bevy_math::{
primitives::{Cuboid, Plane3d}, primitives::{Cuboid, Plane3d},
@ -48,9 +48,7 @@ use bevy_render::{
sync_component::SyncComponentPlugin, sync_component::SyncComponentPlugin,
ExtractSchedule, Render, RenderApp, RenderSystems, ExtractSchedule, Render, RenderApp, RenderSystems,
}; };
use render::{ use render::{VolumetricFogNode, VolumetricFogPipeline, VolumetricFogUniformBuffer};
VolumetricFogNode, VolumetricFogPipeline, VolumetricFogUniformBuffer, CUBE_MESH, PLANE_MESH,
};
use crate::graph::NodePbr; use crate::graph::NodePbr;
@ -59,13 +57,19 @@ pub mod render;
/// A plugin that implements volumetric fog. /// A plugin that implements volumetric fog.
pub struct VolumetricFogPlugin; pub struct VolumetricFogPlugin;
#[derive(Resource)]
pub struct FogAssets {
plane_mesh: Handle<Mesh>,
cube_mesh: Handle<Mesh>,
}
impl Plugin for VolumetricFogPlugin { impl Plugin for VolumetricFogPlugin {
fn build(&self, app: &mut App) { fn build(&self, app: &mut App) {
embedded_asset!(app, "volumetric_fog.wgsl"); embedded_asset!(app, "volumetric_fog.wgsl");
let mut meshes = app.world_mut().resource_mut::<Assets<Mesh>>(); let mut meshes = app.world_mut().resource_mut::<Assets<Mesh>>();
meshes.insert(&PLANE_MESH, Plane3d::new(Vec3::Z, Vec2::ONE).mesh().into()); let plane_mesh = meshes.add(Plane3d::new(Vec3::Z, Vec2::ONE).mesh());
meshes.insert(&CUBE_MESH, Cuboid::new(1.0, 1.0, 1.0).mesh().into()); let cube_mesh = meshes.add(Cuboid::new(1.0, 1.0, 1.0).mesh());
app.add_plugins(SyncComponentPlugin::<FogVolume>::default()); app.add_plugins(SyncComponentPlugin::<FogVolume>::default());
@ -74,6 +78,10 @@ impl Plugin for VolumetricFogPlugin {
}; };
render_app render_app
.insert_resource(FogAssets {
plane_mesh,
cube_mesh,
})
.init_resource::<SpecializedRenderPipelines<VolumetricFogPipeline>>() .init_resource::<SpecializedRenderPipelines<VolumetricFogPipeline>>()
.init_resource::<VolumetricFogUniformBuffer>() .init_resource::<VolumetricFogUniformBuffer>()
.add_systems(ExtractSchedule, render::extract_volumetric_fog) .add_systems(ExtractSchedule, render::extract_volumetric_fog)

View File

@ -2,7 +2,7 @@
use core::array; use core::array;
use bevy_asset::{load_embedded_asset, uuid_handle, AssetId, Handle}; use bevy_asset::{load_embedded_asset, AssetId, Handle};
use bevy_color::ColorToComponents as _; use bevy_color::ColorToComponents as _;
use bevy_core_pipeline::{ use bevy_core_pipeline::{
core_3d::Camera3d, core_3d::Camera3d,
@ -54,6 +54,8 @@ use crate::{
VolumetricLight, VolumetricLight,
}; };
use super::FogAssets;
bitflags! { bitflags! {
/// Flags that describe the bind group layout used to render volumetric fog. /// Flags that describe the bind group layout used to render volumetric fog.
#[derive(Clone, Copy, PartialEq)] #[derive(Clone, Copy, PartialEq)]
@ -77,20 +79,6 @@ bitflags! {
} }
} }
/// The plane mesh, which is used to render a fog volume that the camera is
/// inside.
///
/// This mesh is simply stretched to the size of the framebuffer, as when the
/// camera is inside a fog volume it's essentially a full-screen effect.
pub const PLANE_MESH: Handle<Mesh> = uuid_handle!("92523617-c708-4fd0-b42f-ceb4300c930b");
/// The cube mesh, which is used to render a fog volume that the camera is
/// outside.
///
/// Note that only the front faces of this cuboid will be rasterized in
/// hardware. The back faces will be calculated in the shader via raytracing.
pub const CUBE_MESH: Handle<Mesh> = uuid_handle!("4a1dd661-2d91-4377-a17a-a914e21e277e");
/// The total number of bind group layouts. /// The total number of bind group layouts.
/// ///
/// This is the total number of combinations of all /// This is the total number of combinations of all
@ -370,6 +358,7 @@ impl ViewNode for VolumetricFogNode {
return Ok(()); return Ok(());
}; };
let fog_assets = world.resource::<FogAssets>();
let render_meshes = world.resource::<RenderAssets<RenderMesh>>(); let render_meshes = world.resource::<RenderAssets<RenderMesh>>();
for view_fog_volume in view_fog_volumes.iter() { for view_fog_volume in view_fog_volumes.iter() {
@ -377,9 +366,9 @@ impl ViewNode for VolumetricFogNode {
// otherwise, pick the plane mesh. In the latter case we'll be // otherwise, pick the plane mesh. In the latter case we'll be
// effectively rendering a full-screen quad. // effectively rendering a full-screen quad.
let mesh_handle = if view_fog_volume.exterior { let mesh_handle = if view_fog_volume.exterior {
CUBE_MESH.clone() fog_assets.cube_mesh.clone()
} else { } else {
PLANE_MESH.clone() fog_assets.plane_mesh.clone()
}; };
let Some(vertex_buffer_slice) = mesh_allocator.mesh_vertex_slice(&mesh_handle.id()) let Some(vertex_buffer_slice) = mesh_allocator.mesh_vertex_slice(&mesh_handle.id())
@ -615,6 +604,7 @@ pub fn prepare_volumetric_fog_pipelines(
pipeline_cache: Res<PipelineCache>, pipeline_cache: Res<PipelineCache>,
mut pipelines: ResMut<SpecializedRenderPipelines<VolumetricFogPipeline>>, mut pipelines: ResMut<SpecializedRenderPipelines<VolumetricFogPipeline>>,
volumetric_lighting_pipeline: Res<VolumetricFogPipeline>, volumetric_lighting_pipeline: Res<VolumetricFogPipeline>,
fog_assets: Res<FogAssets>,
view_targets: Query< view_targets: Query<
( (
Entity, Entity,
@ -629,7 +619,7 @@ pub fn prepare_volumetric_fog_pipelines(
>, >,
meshes: Res<RenderAssets<RenderMesh>>, meshes: Res<RenderAssets<RenderMesh>>,
) { ) {
let Some(plane_mesh) = meshes.get(&PLANE_MESH) else { let Some(plane_mesh) = meshes.get(&fog_assets.plane_mesh) else {
// There's an off chance that the mesh won't be prepared yet if `RenderAssetBytesPerFrame` limiting is in use. // There's an off chance that the mesh won't be prepared yet if `RenderAssetBytesPerFrame` limiting is in use.
return; return;
}; };

View File

@ -30,6 +30,7 @@ bevy_platform = { path = "../bevy_platform", version = "0.17.0-dev", default-fea
"serialize", "serialize",
] } ] }
bevy_asset = { path = "../bevy_asset", version = "0.17.0-dev", optional = true } bevy_asset = { path = "../bevy_asset", version = "0.17.0-dev", optional = true }
bevy_log = { path = "../bevy_log", version = "0.17.0-dev" }
# other # other
anyhow = "1" anyhow = "1"
@ -38,7 +39,6 @@ serde = { version = "1", features = ["derive"] }
serde_json = "1.0.140" serde_json = "1.0.140"
http-body-util = "0.1" http-body-util = "0.1"
async-channel = "2" async-channel = "2"
bevy_log = { version = "0.17.0-dev", path = "../bevy_log" }
# dependencies that will not compile on wasm # dependencies that will not compile on wasm
[target.'cfg(not(target_family = "wasm"))'.dependencies] [target.'cfg(not(target_family = "wasm"))'.dependencies]

View File

@ -14,7 +14,7 @@ proc-macro = true
[dependencies] [dependencies]
bevy_macro_utils = { path = "../../bevy_macro_utils", version = "0.17.0-dev" } bevy_macro_utils = { path = "../../bevy_macro_utils", version = "0.17.0-dev" }
syn = "2.0" syn = { version = "2.0", features = ["full"] }
proc-macro2 = "1.0" proc-macro2 = "1.0"
quote = "1.0" quote = "1.0"

View File

@ -6,7 +6,6 @@
//! [`Material2d`]: bevy::sprite::Material2d //! [`Material2d`]: bevy::sprite::Material2d
use bevy::{ use bevy::{
asset::uuid_handle,
color::palettes::basic::YELLOW, color::palettes::basic::YELLOW,
core_pipeline::core_2d::{Transparent2d, CORE_2D_DEPTH_FORMAT}, core_pipeline::core_2d::{Transparent2d, CORE_2D_DEPTH_FORMAT},
math::{ops, FloatOrd}, math::{ops, FloatOrd},
@ -129,12 +128,16 @@ pub struct ColoredMesh2d;
pub struct ColoredMesh2dPipeline { pub struct ColoredMesh2dPipeline {
/// This pipeline wraps the standard [`Mesh2dPipeline`] /// This pipeline wraps the standard [`Mesh2dPipeline`]
mesh2d_pipeline: Mesh2dPipeline, mesh2d_pipeline: Mesh2dPipeline,
/// The shader asset handle.
shader: Handle<Shader>,
} }
impl FromWorld for ColoredMesh2dPipeline { impl FromWorld for ColoredMesh2dPipeline {
fn from_world(world: &mut World) -> Self { fn from_world(world: &mut World) -> Self {
Self { Self {
mesh2d_pipeline: Mesh2dPipeline::from_world(world), mesh2d_pipeline: Mesh2dPipeline::from_world(world),
// Get the shader from the shader resource we inserted in the plugin.
shader: world.resource::<ColoredMesh2dShader>().0.clone(),
} }
} }
} }
@ -164,14 +167,14 @@ impl SpecializedRenderPipeline for ColoredMesh2dPipeline {
RenderPipelineDescriptor { RenderPipelineDescriptor {
vertex: VertexState { vertex: VertexState {
// Use our custom shader // Use our custom shader
shader: COLORED_MESH2D_SHADER_HANDLE, shader: self.shader.clone(),
// Use our custom vertex buffer // Use our custom vertex buffer
buffers: vec![vertex_layout], buffers: vec![vertex_layout],
..default() ..default()
}, },
fragment: Some(FragmentState { fragment: Some(FragmentState {
// Use our custom shader // Use our custom shader
shader: COLORED_MESH2D_SHADER_HANDLE, shader: self.shader.clone(),
targets: vec![Some(ColorTargetState { targets: vec![Some(ColorTargetState {
format, format,
blend: Some(BlendState::ALPHA_BLENDING), blend: Some(BlendState::ALPHA_BLENDING),
@ -278,9 +281,10 @@ fn fragment(in: FragmentInput) -> @location(0) vec4<f32> {
/// Plugin that renders [`ColoredMesh2d`]s /// Plugin that renders [`ColoredMesh2d`]s
pub struct ColoredMesh2dPlugin; pub struct ColoredMesh2dPlugin;
/// Handle to the custom shader with a unique random ID /// A resource holding the shader asset handle for the pipeline to take. There are many ways to get
pub const COLORED_MESH2D_SHADER_HANDLE: Handle<Shader> = /// the shader into the pipeline - this is just one option.
uuid_handle!("f48b148f-7373-4638-9900-392b3b3ccc66"); #[derive(Resource)]
struct ColoredMesh2dShader(Handle<Shader>);
/// Our custom pipeline needs its own instance storage /// Our custom pipeline needs its own instance storage
#[derive(Resource, Deref, DerefMut, Default)] #[derive(Resource, Deref, DerefMut, Default)]
@ -290,15 +294,16 @@ impl Plugin for ColoredMesh2dPlugin {
fn build(&self, app: &mut App) { fn build(&self, app: &mut App) {
// Load our custom shader // Load our custom shader
let mut shaders = app.world_mut().resource_mut::<Assets<Shader>>(); let mut shaders = app.world_mut().resource_mut::<Assets<Shader>>();
shaders.insert( // Here, we construct and add the shader asset manually. There are many ways to load this
&COLORED_MESH2D_SHADER_HANDLE, // shader, including `embedded_asset`/`load_embedded_asset`.
Shader::from_wgsl(COLORED_MESH2D_SHADER, file!()), let shader = shaders.add(Shader::from_wgsl(COLORED_MESH2D_SHADER, file!()));
);
app.add_plugins(SyncComponentPlugin::<ColoredMesh2d>::default()); app.add_plugins(SyncComponentPlugin::<ColoredMesh2d>::default());
// Register our custom draw function, and add our render systems // Register our custom draw function, and add our render systems
app.get_sub_app_mut(RenderApp) app.get_sub_app_mut(RenderApp)
.unwrap() .unwrap()
.insert_resource(ColoredMesh2dShader(shader))
.add_render_command::<Transparent2d, DrawColoredMesh2d>() .add_render_command::<Transparent2d, DrawColoredMesh2d>()
.init_resource::<SpecializedRenderPipelines<ColoredMesh2dPipeline>>() .init_resource::<SpecializedRenderPipelines<ColoredMesh2dPipeline>>()
.init_resource::<RenderColoredMesh2dInstances>() .init_resource::<RenderColoredMesh2dInstances>()

View File

@ -4,7 +4,7 @@ use bevy::{
color::palettes::basic::*, color::palettes::basic::*,
core_widgets::{ core_widgets::{
Callback, CoreButton, CoreCheckbox, CoreRadio, CoreRadioGroup, CoreSlider, Callback, CoreButton, CoreCheckbox, CoreRadio, CoreRadioGroup, CoreSlider,
CoreSliderDragState, CoreSliderThumb, CoreWidgetsPlugin, SliderRange, SliderValue, CoreSliderDragState, CoreSliderThumb, CoreWidgetsPlugins, SliderRange, SliderValue,
TrackClick, TrackClick,
}, },
input_focus::{ input_focus::{
@ -21,7 +21,7 @@ fn main() {
App::new() App::new()
.add_plugins(( .add_plugins((
DefaultPlugins, DefaultPlugins,
CoreWidgetsPlugin, CoreWidgetsPlugins,
InputDispatchPlugin, InputDispatchPlugin,
TabNavigationPlugin, TabNavigationPlugin,
)) ))

View File

@ -3,7 +3,7 @@
use bevy::{ use bevy::{
color::palettes::basic::*, color::palettes::basic::*,
core_widgets::{ core_widgets::{
Callback, CoreButton, CoreCheckbox, CoreSlider, CoreSliderThumb, CoreWidgetsPlugin, Callback, CoreButton, CoreCheckbox, CoreSlider, CoreSliderThumb, CoreWidgetsPlugins,
SliderRange, SliderValue, SliderRange, SliderValue,
}, },
ecs::system::SystemId, ecs::system::SystemId,
@ -21,7 +21,7 @@ fn main() {
App::new() App::new()
.add_plugins(( .add_plugins((
DefaultPlugins, DefaultPlugins,
CoreWidgetsPlugin, CoreWidgetsPlugins,
InputDispatchPlugin, InputDispatchPlugin,
TabNavigationPlugin, TabNavigationPlugin,
)) ))

View File

@ -1,7 +1,7 @@
//! This example shows off the various Bevy Feathers widgets. //! This example shows off the various Bevy Feathers widgets.
use bevy::{ use bevy::{
core_widgets::{Callback, CoreRadio, CoreRadioGroup, CoreWidgetsPlugin, SliderStep}, core_widgets::{Callback, CoreRadio, CoreRadioGroup, CoreWidgetsPlugins, SliderStep},
feathers::{ feathers::{
controls::{ controls::{
button, checkbox, radio, slider, toggle_switch, ButtonProps, ButtonVariant, button, checkbox, radio, slider, toggle_switch, ButtonProps, ButtonVariant,
@ -25,7 +25,7 @@ fn main() {
App::new() App::new()
.add_plugins(( .add_plugins((
DefaultPlugins, DefaultPlugins,
CoreWidgetsPlugin, CoreWidgetsPlugins,
InputDispatchPlugin, InputDispatchPlugin,
TabNavigationPlugin, TabNavigationPlugin,
FeathersPlugin, FeathersPlugin,

View File

@ -0,0 +1,8 @@
---
title: ChromaticAberration LUT is now Option
pull_requests: [19408]
---
The `ChromaticAberration` component `color_lut` field use to be a regular `Handle<Image>`. Now, it
is an `Option<Handle<Image>>` which falls back to the default image when `None`. For users assigning
a custom LUT, just wrap the value in `Some`.

View File

@ -1,7 +1,7 @@
--- ---
title: Headless Widgets title: Headless Widgets
authors: ["@viridia", "@ickshonpe", "@alice-i-cecile"] authors: ["@viridia", "@ickshonpe", "@alice-i-cecile"]
pull_requests: [19366, 19584, 19665, 19778, 19803] pull_requests: [19366, 19584, 19665, 19778, 19803, 20036]
--- ---
Bevy's `Button` and `Interaction` components have been around for a long time. Unfortunately Bevy's `Button` and `Interaction` components have been around for a long time. Unfortunately