Merge branch 'main' of https://github.com/bevyengine/bevy into reflect-auto-registration

This commit is contained in:
eugineerd 2025-02-25 17:10:01 +00:00
commit c6be4fa012
277 changed files with 4097 additions and 1691 deletions

View File

@ -244,7 +244,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Check for typos
uses: crate-ci/typos@v1.29.7
uses: crate-ci/typos@v1.29.9
- name: Typos info
if: failure()
run: |
@ -335,6 +335,7 @@ jobs:
timeout-minutes: 30
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- name: check for missing metadata
id: missing-metadata
run: cargo run -p build-templated-pages -- check-missing examples
@ -369,6 +370,7 @@ jobs:
needs: check-missing-examples-in-docs
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- name: check for missing features
id: missing-features
run: cargo run -p build-templated-pages -- check-missing features
@ -412,6 +414,7 @@ jobs:
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-msrv-${{ hashFiles('**/Cargo.toml') }}
- uses: dtolnay/rust-toolchain@stable
- name: get MSRV
id: msrv
run: |

View File

@ -219,13 +219,6 @@ jobs:
target/
key: ${{ runner.os }}-wasm-run-examples-${{ hashFiles('**/Cargo.toml') }}
- name: install xvfb, llvmpipe and lavapipe
run: |
sudo apt-get update -y -qq
sudo add-apt-repository ppa:kisak/turtle -y
sudo apt-get update
sudo apt install -y xvfb libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
- name: Install wasm-bindgen
run: cargo install --force wasm-bindgen-cli

View File

@ -1,7 +1,7 @@
[package]
name = "bevy"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
categories = ["game-engines", "graphics", "gui", "rendering"]
description = "A refreshingly simple data-driven game engine and app framework"
exclude = ["assets/", "tools/", ".github/", "crates/", "examples/wasm/assets/"]
@ -10,7 +10,7 @@ keywords = ["game", "engine", "gamedev", "graphics", "bevy"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/bevyengine/bevy"
documentation = "https://docs.rs/bevy"
rust-version = "1.83.0"
rust-version = "1.85.0"
[workspace]
resolver = "2"
@ -46,6 +46,7 @@ undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn"
needless_lifetimes = "allow"
too_many_arguments = "allow"
nonstandard_macro_braces = "warn"
ptr_as_ptr = "warn"
ptr_cast_constness = "warn"
@ -91,6 +92,7 @@ undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn"
needless_lifetimes = "allow"
too_many_arguments = "allow"
nonstandard_macro_braces = "warn"
ptr_as_ptr = "warn"
ptr_cast_constness = "warn"
@ -474,6 +476,9 @@ track_location = ["bevy_internal/track_location"]
# Enable function reflection
reflect_functions = ["bevy_internal/reflect_functions"]
# Enable documentation reflection
reflect_documentation = ["bevy_internal/reflect_documentation"]
# Enable automatic reflect registration
reflect_auto_register = ["bevy_internal/reflect_auto_register"]
@ -782,6 +787,17 @@ description = "Used to test alpha modes with mesh2d"
category = "2D Rendering"
wasm = true
[[example]]
name = "mesh2d_repeated_texture"
path = "examples/2d/mesh2d_repeated_texture.rs"
doc-scrape-examples = true
[package.metadata.example.mesh2d_repeated_texture]
name = "Mesh2d Repeated Texture"
description = "Showcase of using `uv_transform` on the `ColorMaterial` of a `Mesh2d`"
category = "2D Rendering"
wasm = true
[[example]]
name = "pixel_grid_snap"
path = "examples/2d/pixel_grid_snap.rs"

View File

@ -1,6 +1,6 @@
[package]
name = "benches"
edition = "2021"
edition = "2024"
description = "Benchmarks that test Bevy's performance"
publish = false
license = "MIT OR Apache-2.0"
@ -50,6 +50,7 @@ undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn"
needless_lifetimes = "allow"
too_many_arguments = "allow"
nonstandard_macro_braces = "warn"
ptr_as_ptr = "warn"
ptr_cast_constness = "warn"

View File

@ -95,7 +95,7 @@ fn all_added_detection_generic<T: Component + Default>(group: &mut BenchGroup, e
let query = generic_filter_query::<Added<T>>(&mut world);
(world, query)
},
|(ref mut world, ref mut query)| {
|(world, query)| {
let mut count = 0;
for entity in query.iter(world) {
black_box(entity);
@ -143,7 +143,7 @@ fn all_changed_detection_generic<T: Component<Mutability = Mutable> + Default +
let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query)
},
|(ref mut world, ref mut query)| {
|(world, query)| {
let mut count = 0;
for entity in query.iter(world) {
black_box(entity);
@ -196,7 +196,7 @@ fn few_changed_detection_generic<T: Component<Mutability = Mutable> + Default +
let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query)
},
|(ref mut world, ref mut query)| {
|(world, query)| {
for entity in query.iter(world) {
black_box(entity);
}
@ -237,7 +237,7 @@ fn none_changed_detection_generic<T: Component<Mutability = Mutable> + Default>(
let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query)
},
|(ref mut world, ref mut query)| {
|(world, query)| {
let mut count = 0;
for entity in query.iter(world) {
black_box(entity);
@ -343,7 +343,7 @@ fn multiple_archetype_none_changed_detection_generic<
let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query)
},
|(ref mut world, ref mut query)| {
|(world, query)| {
let mut count = 0;
for entity in query.iter(world) {
black_box(entity);

View File

@ -12,7 +12,7 @@ impl Benchmark {
let mut world = World::default();
let entities = world
.spawn_batch(core::iter::repeat(A(0.)).take(10000))
.spawn_batch(core::iter::repeat_n(A(0.), 10_000))
.collect();
Self(world, entities)
}

View File

@ -19,15 +19,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),
Velocity(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query)

View File

@ -19,15 +19,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),
Velocity(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query)

View File

@ -21,15 +21,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),
Velocity(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query)

View File

@ -33,8 +33,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),
@ -47,9 +47,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query();
Self(world, query)

View File

@ -35,8 +35,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),
@ -49,9 +49,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query();
Self(world, query)

View File

@ -21,15 +21,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),
Velocity(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query)

View File

@ -19,15 +19,15 @@ impl Benchmark {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),
Velocity(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
fn query_system(mut query: Query<(&Velocity, &mut Position)>) {
for (velocity, mut position) in &mut query {

View File

@ -33,8 +33,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),
@ -47,9 +47,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query();
Self(world, query)

View File

@ -35,8 +35,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();
world.spawn_batch(
core::iter::repeat((
world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X),
Position::<0>(Vec3::X),
@ -49,9 +49,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X),
))
.take(10_000),
);
),
10_000,
));
let query = world.query();
Self(world, query)

View File

@ -30,15 +30,15 @@ impl<'w> Benchmark<'w> {
let mut world = World::new();
let iter = world.spawn_batch(
core::iter::repeat((
let iter = world.spawn_batch(core::iter::repeat_n(
(
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),
Velocity(Vec3::X),
))
.take(100_000),
);
),
100_000,
));
let entities = iter.into_iter().collect::<Vec<Entity>>();
for i in 0..fragment {
let mut e = world.entity_mut(entities[i as usize]);

View File

@ -1,6 +1,10 @@
use core::hint::black_box;
use bevy_ecs::{entity::Entity, event::Event, observer::Trigger, world::World};
use bevy_ecs::{
event::Event,
observer::{Trigger, TriggerTargets},
world::World,
};
use criterion::Criterion;
use rand::{prelude::SliceRandom, SeedableRng};
@ -46,6 +50,6 @@ fn empty_listener_base(trigger: Trigger<EventBase>) {
black_box(trigger);
}
fn send_base_event(world: &mut World, entities: &Vec<Entity>) {
fn send_base_event(world: &mut World, entities: impl TriggerTargets) {
world.trigger_targets(EventBase, entities);
}

View File

@ -11,16 +11,16 @@ fn make_entity(rng: &mut impl Rng, size: usize) -> Entity {
// * For ids, half are in [0, size), half are unboundedly larger.
// * For generations, half are in [1, 3), half are unboundedly larger.
let x: f64 = rng.gen();
let x: f64 = rng.r#gen();
let id = -(1.0 - x).log2() * (size as f64);
let x: f64 = rng.gen();
let gen = 1.0 + -(1.0 - x).log2() * 2.0;
let x: f64 = rng.r#gen();
let generation = 1.0 + -(1.0 - x).log2() * 2.0;
// this is not reliable, but we're internal so a hack is ok
let bits = ((gen as u64) << 32) | (id as u64);
let bits = ((generation as u64) << 32) | (id as u64);
let e = Entity::from_bits(bits);
assert_eq!(e.index(), id as u32);
assert_eq!(e.generation(), gen as u32);
assert_eq!(e.generation(), generation as u32);
e
}

View File

@ -75,8 +75,8 @@ fn concrete_list_apply(criterion: &mut Criterion) {
let mut group = create_group(criterion, bench!("concrete_list_apply"));
let empty_base = |_: usize| Vec::<u64>::new;
let full_base = |size: usize| move || iter::repeat(0).take(size).collect::<Vec<u64>>();
let patch = |size: usize| iter::repeat(1).take(size).collect::<Vec<u64>>();
let full_base = |size: usize| move || iter::repeat_n(0, size).collect::<Vec<u64>>();
let patch = |size: usize| iter::repeat_n(1, size).collect::<Vec<u64>>();
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);
@ -103,7 +103,7 @@ fn concrete_list_clone_dynamic(criterion: &mut Criterion) {
BenchmarkId::from_parameter(size),
&size,
|bencher, &size| {
let v = iter::repeat(0).take(size).collect::<Vec<_>>();
let v = iter::repeat_n(0, size).collect::<Vec<_>>();
bencher.iter(|| black_box(&v).clone_dynamic());
},
@ -123,7 +123,7 @@ fn dynamic_list_push(criterion: &mut Criterion) {
BenchmarkId::from_parameter(size),
&size,
|bencher, &size| {
let src = iter::repeat(()).take(size).collect::<Vec<_>>();
let src = iter::repeat_n((), size).collect::<Vec<_>>();
let dst = DynamicList::default();
bencher.iter_batched(
@ -146,8 +146,8 @@ fn dynamic_list_apply(criterion: &mut Criterion) {
let mut group = create_group(criterion, bench!("dynamic_list_apply"));
let empty_base = |_: usize| || Vec::<u64>::new().clone_dynamic();
let full_base = |size: usize| move || iter::repeat(0).take(size).collect::<Vec<u64>>();
let patch = |size: usize| iter::repeat(1).take(size).collect::<Vec<u64>>();
let full_base = |size: usize| move || iter::repeat_n(0, size).collect::<Vec<u64>>();
let patch = |size: usize| iter::repeat_n(1, size).collect::<Vec<u64>>();
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);

View File

@ -145,7 +145,7 @@ fn u64_to_n_byte_key(k: u64, n: usize) -> String {
write!(&mut key, "{}", k).unwrap();
// Pad key to n bytes.
key.extend(iter::repeat('\0').take(n - key.len()));
key.extend(iter::repeat_n('\0', n - key.len()));
key
}

View File

@ -43,3 +43,6 @@ disallowed-methods = [
{ path = "f32::atanh", reason = "use bevy_math::ops::atanh instead for libm determinism" },
{ path = "criterion::black_box", reason = "use core::hint::black_box instead" },
]
# Require `bevy_ecs::children!` to use `[]` braces, instead of `()` or `{}`.
standard-macro-braces = [{ name = "children", brace = "[" }]

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_a11y"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides accessibility support for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"
@ -50,15 +50,6 @@ critical-section = [
"bevy_input_focus/critical-section",
]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_app/portable-atomic",
"bevy_ecs/portable-atomic",
"bevy_reflect?/portable-atomic",
"bevy_input_focus/portable-atomic",
]
## Uses the `libm` maths library instead of the one provided in `std` and `core`.
libm = ["bevy_input_focus/libm"]

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_animation"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides animation functionality for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -884,10 +884,10 @@ impl ThreadedAnimationGraph {
self.sorted_edge_ranges.clear();
self.sorted_edge_ranges
.extend(iter::repeat(0..0).take(node_count));
.extend(iter::repeat_n(0..0, node_count));
self.computed_masks.clear();
self.computed_masks.extend(iter::repeat(0).take(node_count));
self.computed_masks.extend(iter::repeat_n(0, node_count));
}
/// Recursively constructs the [`ThreadedAnimationGraph`] for the subtree

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_app"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides core App functionality for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"
@ -65,15 +65,6 @@ critical-section = [
"bevy_reflect?/critical-section",
]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_tasks?/portable-atomic",
"bevy_ecs/portable-atomic",
"bevy_platform_support/portable-atomic",
"bevy_reflect?/portable-atomic",
]
[dependencies]
# bevy
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }

View File

@ -1440,7 +1440,7 @@ impl Termination for AppExit {
#[cfg(test)]
mod tests {
use core::{iter, marker::PhantomData};
use core::marker::PhantomData;
use std::sync::Mutex;
use bevy_ecs::{
@ -1664,7 +1664,7 @@ mod tests {
struct Foo;
let mut app = App::new();
app.world_mut().spawn_batch(iter::repeat(Foo).take(5));
app.world_mut().spawn_batch(core::iter::repeat_n(Foo, 5));
fn despawn_one_foo(mut commands: Commands, foos: Query<Entity, With<Foo>>) {
if let Some(e) = foos.iter().next() {
@ -1718,9 +1718,9 @@ mod tests {
fn raise_exits(mut exits: EventWriter<AppExit>) {
// Exit codes chosen by a fair dice roll.
// Unlikely to overlap with default values.
exits.send(AppExit::Success);
exits.send(AppExit::from_code(4));
exits.send(AppExit::from_code(73));
exits.write(AppExit::Success);
exits.write(AppExit::from_code(4));
exits.write(AppExit::from_code(73));
}
let exit = App::new().add_systems(Update, raise_exits).run();

View File

@ -11,7 +11,7 @@ use crate::{App, Plugin};
/// Adds sensible panic handlers to Apps. This plugin is part of the `DefaultPlugins`. Adding
/// this plugin will setup a panic hook appropriate to your target platform:
/// * On Wasm, uses [`console_error_panic_hook`](https://crates.io/crates/console_error_panic_hook), logging
/// to the browser console.
/// to the browser console.
/// * Other platforms are currently not setup.
///
/// ```no_run

View File

@ -1,11 +1,3 @@
#![cfg_attr(
feature = "portable-atomic",
expect(
clippy::redundant_closure,
reason = "bevy_platform_support::sync::Arc has subtly different implicit behavior"
)
)]
use crate::{App, Plugin};
use alloc::string::ToString;

View File

@ -50,7 +50,7 @@ impl TerminalCtrlCHandlerPlugin {
/// Sends a [`AppExit`] event when the user presses `Ctrl+C` on the terminal.
pub fn exit_on_flag(mut events: EventWriter<AppExit>) {
if SHOULD_EXIT.load(Ordering::Relaxed) {
events.send(AppExit::from_code(130));
events.write(AppExit::from_code(130));
}
}
}

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_asset"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides asset functionality for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_asset_macros"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Derive implementations for bevy_asset"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -330,7 +330,7 @@ mod tests {
_query: Query<&mut MyComponent, AssetChanged<MyComponent>>,
mut exit: EventWriter<AppExit>,
) {
exit.send(AppExit::Error(NonZero::<u8>::MIN));
exit.write(AppExit::Error(NonZero::<u8>::MIN));
}
run_app(compatible_filter);
}

View File

@ -579,7 +579,7 @@ impl<A: Asset> Assets<A> {
};
}
}
events.send_batch(assets.queued_events.drain(..));
events.write_batch(assets.queued_events.drain(..));
}
/// A run condition for [`asset_events`]. The system will not run if there are no events to

View File

@ -499,8 +499,8 @@ pub trait AssetApp {
/// * Initializing the [`AssetEvent`] resource for the [`Asset`]
/// * Adding other relevant systems and resources for the [`Asset`]
/// * Ignoring schedule ambiguities in [`Assets`] resource. Any time a system takes
/// mutable access to this resource this causes a conflict, but they rarely actually
/// modify the same underlying asset.
/// mutable access to this resource this causes a conflict, but they rarely actually
/// modify the same underlying asset.
fn init_asset<A: Asset>(&mut self) -> &mut Self;
/// Registers the asset type `T` using `[App::register]`,
/// and adds [`ReflectAsset`] type data to `T` and [`ReflectHandle`] type data to [`Handle<T>`] in the type registry.
@ -639,7 +639,7 @@ mod tests {
},
loader::{AssetLoader, LoadContext},
Asset, AssetApp, AssetEvent, AssetId, AssetLoadError, AssetLoadFailedEvent, AssetPath,
AssetPlugin, AssetServer, Assets,
AssetPlugin, AssetServer, Assets, DuplicateLabelAssetError, LoadState,
};
use alloc::{
boxed::Box,
@ -695,6 +695,8 @@ mod tests {
CannotLoadDependency { dependency: AssetPath<'static> },
#[error("A RON error occurred during loading")]
RonSpannedError(#[from] ron::error::SpannedError),
#[error(transparent)]
DuplicateLabelAssetError(#[from] DuplicateLabelAssetError),
#[error("An IO error occurred during loading")]
Io(#[from] std::io::Error),
}
@ -740,7 +742,7 @@ mod tests {
.sub_texts
.drain(..)
.map(|text| load_context.add_labeled_asset(text.clone(), SubText { text }))
.collect(),
.collect::<Result<Vec<_>, _>>()?,
})
}
@ -1778,6 +1780,49 @@ mod tests {
app.world_mut().run_schedule(Update);
}
#[test]
fn fails_to_load_for_duplicate_subasset_labels() {
let mut app = App::new();
let dir = Dir::default();
dir.insert_asset_text(
Path::new("a.ron"),
r#"(
text: "b",
dependencies: [],
embedded_dependencies: [],
sub_texts: ["A", "A"],
)"#,
);
app.register_asset_source(
AssetSourceId::Default,
AssetSource::build()
.with_reader(move || Box::new(MemoryAssetReader { root: dir.clone() })),
)
.add_plugins((
TaskPoolPlugin::default(),
LogPlugin::default(),
AssetPlugin::default(),
));
app.init_asset::<CoolText>()
.init_asset::<SubText>()
.register_asset_loader(CoolTextLoader);
let asset_server = app.world().resource::<AssetServer>().clone();
let handle = asset_server.load::<CoolText>("a.ron");
run_app_until(&mut app, |_world| match asset_server.load_state(&handle) {
LoadState::Loading => None,
LoadState::Failed(err) => {
assert!(matches!(*err, AssetLoadError::AssetLoaderError(_)));
Some(())
}
state => panic!("Unexpected asset state: {state:?}"),
});
}
// validate the Asset derive macro for various asset types
#[derive(Asset, TypePath)]
pub struct TestAsset;

View File

@ -13,7 +13,6 @@ use alloc::{
};
use atomicow::CowArc;
use bevy_ecs::world::World;
use bevy_log::warn;
use bevy_platform_support::collections::{HashMap, HashSet};
use bevy_tasks::{BoxedFuture, ConditionalSendFuture};
use core::any::{Any, TypeId};
@ -458,7 +457,7 @@ impl<'a> LoadContext<'a> {
&mut self,
label: String,
load: impl FnOnce(&mut LoadContext) -> A,
) -> Handle<A> {
) -> Result<Handle<A>, DuplicateLabelAssetError> {
let mut context = self.begin_labeled_asset();
let asset = load(&mut context);
let complete_asset = context.finish(asset);
@ -475,7 +474,11 @@ impl<'a> LoadContext<'a> {
/// new [`LoadContext`] to track the dependencies for the labeled asset.
///
/// See [`AssetPath`] for more on labeled assets.
pub fn add_labeled_asset<A: Asset>(&mut self, label: String, asset: A) -> Handle<A> {
pub fn add_labeled_asset<A: Asset>(
&mut self,
label: String,
asset: A,
) -> Result<Handle<A>, DuplicateLabelAssetError> {
self.labeled_asset_scope(label, |_| asset)
}
@ -488,7 +491,7 @@ impl<'a> LoadContext<'a> {
&mut self,
label: impl Into<CowArc<'static, str>>,
loaded_asset: CompleteLoadedAsset<A>,
) -> Handle<A> {
) -> Result<Handle<A>, DuplicateLabelAssetError> {
let label = label.into();
let CompleteLoadedAsset {
asset,
@ -499,19 +502,25 @@ impl<'a> LoadContext<'a> {
let handle = self
.asset_server
.get_or_create_path_handle(labeled_path, None);
self.labeled_assets.insert(
label,
LabeledAsset {
asset: loaded_asset,
handle: handle.clone().untyped(),
},
);
let has_duplicate = self
.labeled_assets
.insert(
label.clone(),
LabeledAsset {
asset: loaded_asset,
handle: handle.clone().untyped(),
},
)
.is_some();
if has_duplicate {
return Err(DuplicateLabelAssetError(label.to_string()));
}
for (label, asset) in labeled_assets {
if self.labeled_assets.insert(label.clone(), asset).is_some() {
warn!("A labeled asset with the label \"{label}\" already exists. Replacing with the new asset.");
return Err(DuplicateLabelAssetError(label.to_string()));
}
}
handle
Ok(handle)
}
/// Returns `true` if an asset with the label `label` exists in this context.
@ -552,8 +561,8 @@ impl<'a> LoadContext<'a> {
let path = path.into();
let source = self.asset_server.get_source(path.source())?;
let asset_reader = match self.asset_server.mode() {
AssetServerMode::Unprocessed { .. } => source.reader(),
AssetServerMode::Processed { .. } => source.processed_reader()?,
AssetServerMode::Unprocessed => source.reader(),
AssetServerMode::Processed => source.processed_reader()?,
};
let mut reader = asset_reader.read(path.path()).await?;
let hash = if self.populate_hashes {
@ -661,3 +670,8 @@ pub enum ReadAssetBytesError {
#[error("The LoadContext for this read_asset_bytes call requires hash metadata, but it was not provided. This is likely an internal implementation error.")]
MissingAssetHash,
}
/// An error when labeled assets have the same label, containing the duplicate label.
#[derive(Error, Debug)]
#[error("Encountered a duplicate label while loading an asset: \"{0}\"")]
pub struct DuplicateLabelAssetError(pub String);

View File

@ -18,10 +18,10 @@ use thiserror::Error;
///
/// Asset paths consist of three main parts:
/// * [`AssetPath::source`]: The name of the [`AssetSource`](crate::io::AssetSource) to load the asset from.
/// This is optional. If one is not set the default source will be used (which is the `assets` folder by default).
/// This is optional. If one is not set the default source will be used (which is the `assets` folder by default).
/// * [`AssetPath::path`]: The "virtual filesystem path" pointing to an asset source file.
/// * [`AssetPath::label`]: An optional "named sub asset". When assets are loaded, they are
/// allowed to load "sub assets" of any type, which are identified by a named "label".
/// allowed to load "sub assets" of any type, which are identified by a named "label".
///
/// Asset paths are generally constructed (and visualized) as strings:
///

View File

@ -207,10 +207,13 @@ impl AssetProcessor {
/// Processes all assets. This will:
/// * For each "processed [`AssetSource`]:
/// * Scan the [`ProcessorTransactionLog`] and recover from any failures detected
/// * Scan the processed [`AssetReader`](crate::io::AssetReader) to build the current view of already processed assets.
/// * Scan the unprocessed [`AssetReader`](crate::io::AssetReader) and remove any final processed assets that are invalid or no longer exist.
/// * For each asset in the unprocessed [`AssetReader`](crate::io::AssetReader), kick off a new "process job", which will process the asset
/// (if the latest version of the asset has not been processed).
/// * Scan the processed [`AssetReader`](crate::io::AssetReader) to build the current view of
/// already processed assets.
/// * Scan the unprocessed [`AssetReader`](crate::io::AssetReader) and remove any final
/// processed assets that are invalid or no longer exist.
/// * For each asset in the unprocessed [`AssetReader`](crate::io::AssetReader), kick off a new
/// "process job", which will process the asset
/// (if the latest version of the asset has not been processed).
#[cfg(all(not(target_arch = "wasm32"), feature = "multi_threaded"))]
pub fn process_assets(&self) {
let start_time = std::time::Instant::now();

View File

@ -38,12 +38,13 @@ use std::path::{Path, PathBuf};
use thiserror::Error;
use tracing::{error, info};
/// Loads and tracks the state of [`Asset`] values from a configured [`AssetReader`](crate::io::AssetReader). This can be used to kick off new asset loads and
/// retrieve their current load states.
/// Loads and tracks the state of [`Asset`] values from a configured [`AssetReader`](crate::io::AssetReader).
/// This can be used to kick off new asset loads and retrieve their current load states.
///
/// The general process to load an asset is:
/// 1. Initialize a new [`Asset`] type with the [`AssetServer`] via [`AssetApp::init_asset`], which will internally call [`AssetServer::register_asset`]
/// and set up related ECS [`Assets`] storage and systems.
/// 1. Initialize a new [`Asset`] type with the [`AssetServer`] via [`AssetApp::init_asset`], which
/// will internally call [`AssetServer::register_asset`] and set up related ECS [`Assets`]
/// storage and systems.
/// 2. Register one or more [`AssetLoader`]s for that asset with [`AssetApp::init_asset_loader`]
/// 3. Add the asset to your asset folder (defaults to `assets`).
/// 4. Call [`AssetServer::load`] with a path to your asset.
@ -923,8 +924,8 @@ impl AssetServer {
};
let asset_reader = match server.data.mode {
AssetServerMode::Unprocessed { .. } => source.reader(),
AssetServerMode::Processed { .. } => match source.processed_reader() {
AssetServerMode::Unprocessed => source.reader(),
AssetServerMode::Processed => match source.processed_reader() {
Ok(reader) => reader,
Err(_) => {
error!(
@ -1235,8 +1236,8 @@ impl AssetServer {
// Then the meta reader, if meta exists, will correspond to the meta for the current "version" of the asset.
// See ProcessedAssetInfo::file_transaction_lock for more context
let asset_reader = match self.data.mode {
AssetServerMode::Unprocessed { .. } => source.reader(),
AssetServerMode::Processed { .. } => source.processed_reader()?,
AssetServerMode::Unprocessed => source.reader(),
AssetServerMode::Processed => source.processed_reader()?,
};
let reader = asset_reader.read(asset_path.path()).await?;
let read_meta = match &self.data.meta_check {
@ -1584,14 +1585,14 @@ pub fn handle_internal_asset_events(world: &mut World) {
for source in server.data.sources.iter() {
match server.data.mode {
AssetServerMode::Unprocessed { .. } => {
AssetServerMode::Unprocessed => {
if let Some(receiver) = source.event_receiver() {
for event in receiver.try_iter() {
handle_event(source.id(), event);
}
}
}
AssetServerMode::Processed { .. } => {
AssetServerMode::Processed => {
if let Some(receiver) = source.processed_event_receiver() {
for event in receiver.try_iter() {
handle_event(source.id(), event);

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_audio"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides audio functionality for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -1,13 +1,13 @@
[package]
name = "bevy_color"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Types for representing and manipulating color values"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"
license = "MIT OR Apache-2.0"
keywords = ["bevy", "color"]
rust-version = "1.83.0"
rust-version = "1.85.0"
[dependencies]
bevy_math = { path = "../bevy_math", version = "0.16.0-dev", default-features = false, features = [

View File

@ -1,7 +1,7 @@
[package]
name = "gen_tests"
version = "0.1.0"
edition = "2021"
edition = "2024"
publish = false
[workspace]

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_core_pipeline"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
authors = [
"Bevy Contributors <bevyengine@gmail.com>",
"Carter Anderson <mcanders1@gmail.com>",

View File

@ -152,7 +152,8 @@ impl ViewNode for BloomNode {
render_context.command_encoder().push_debug_group("bloom");
let diagnostics = render_context.diagnostic_recorder();
let time_span = diagnostics.time_span(render_context.command_encoder(), "bloom");
let command_encoder = render_context.command_encoder();
let time_span = diagnostics.time_span(command_encoder, "bloom");
// First downsample pass
{

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_derive"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides derive implementations for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -1,6 +1,6 @@
[package]
name = "bevy_derive_compile_fail"
edition = "2021"
edition = "2024"
description = "Compile fail tests for Bevy Engine's various macros"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -10,14 +10,20 @@ pub fn bevy_main(_attr: TokenStream, item: TokenStream) -> TokenStream {
);
TokenStream::from(quote! {
#[no_mangle]
// SAFETY: `#[bevy_main]` should only be placed on a single `main` function
// TODO: Potentially make `bevy_main` and unsafe attribute as there is a safety
// guarantee required from the caller.
#[unsafe(no_mangle)]
#[cfg(target_os = "android")]
fn android_main(android_app: bevy::window::android_activity::AndroidApp) {
let _ = bevy::window::ANDROID_APP.set(android_app);
main();
}
#[no_mangle]
// SAFETY: `#[bevy_main]` should only be placed on a single `main` function
// TODO: Potentially make `bevy_main` and unsafe attribute as there is a safety
// guarantee required from the caller.
#[unsafe(no_mangle)]
#[cfg(target_os = "ios")]
extern "C" fn main_rs() {
main();

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_dev_tools"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Collection of developer tools for the Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -29,7 +29,7 @@ pub mod states;
/// To enable developer tools, you can either:
///
/// - Create a custom crate feature (e.g "`dev_mode`"), which enables the `bevy_dev_tools` feature
/// along with any other development tools you might be using:
/// along with any other development tools you might be using:
///
/// ```toml
/// [feature]

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_diagnostic"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides diagnostic functionality for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"
@ -54,17 +54,6 @@ critical-section = [
"bevy_tasks?/critical-section",
]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_ecs/portable-atomic",
"bevy_app/portable-atomic",
"bevy_platform_support/portable-atomic",
"bevy_time/portable-atomic",
"bevy_utils/portable-atomic",
"bevy_tasks?/portable-atomic",
]
[dependencies]
# bevy
bevy_app = { path = "../bevy_app", version = "0.16.0-dev", default-features = false }

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_dylib"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Force the Bevy Engine to be dynamically linked for faster linking"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -1,14 +1,14 @@
[package]
name = "bevy_ecs"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Bevy Engine's entity component system"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"
license = "MIT OR Apache-2.0"
keywords = ["ecs", "game", "bevy"]
categories = ["game-engines", "data-structures"]
rust-version = "1.83.0"
rust-version = "1.85.0"
[features]
default = ["std", "bevy_reflect", "async_executor"]
@ -93,15 +93,6 @@ critical-section = [
"bevy_reflect?/critical-section",
]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_tasks?/portable-atomic",
"bevy_platform_support/portable-atomic",
"concurrent-queue/portable-atomic",
"bevy_reflect?/portable-atomic",
]
[dependencies]
bevy_ptr = { path = "../bevy_ptr", version = "0.16.0-dev" }
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", features = [
@ -140,6 +131,11 @@ tracing = { version = "0.1", default-features = false, optional = true }
log = { version = "0.4", default-features = false }
bumpalo = "3"
[target.'cfg(not(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr")))'.dependencies]
concurrent-queue = { version = "2.5.0", default-features = false, features = [
"portable-atomic",
] }
[dev-dependencies]
rand = "0.8"
static_assertions = "1.1.0"

View File

@ -1,6 +1,6 @@
[package]
name = "bevy_ecs_compile_fail"
edition = "2021"
edition = "2024"
description = "Compile fail tests for Bevy Engine's entity component system"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -45,7 +45,7 @@ struct MyEvent {
fn sending_system(mut event_writer: EventWriter<MyEvent>) {
let random_value: f32 = rand::random();
if random_value > 0.5 {
event_writer.send(MyEvent {
event_writer.write(MyEvent {
message: "A random event with value > 0.5".to_string(),
random_value,
});

View File

@ -2,7 +2,7 @@
name = "bevy_ecs_macros"
version = "0.16.0-dev"
description = "Bevy ECS Macros"
edition = "2021"
edition = "2024"
license = "MIT OR Apache-2.0"
[lib]

View File

@ -257,7 +257,7 @@ pub fn derive_component(input: TokenStream) -> TokenStream {
fn visit_entities(data: &Data, bevy_ecs_path: &Path, is_relationship: bool) -> TokenStream2 {
match data {
Data::Struct(DataStruct { ref fields, .. }) => {
Data::Struct(DataStruct { fields, .. }) => {
let mut visited_fields = Vec::new();
let mut visited_indices = Vec::new();
match fields {
@ -343,8 +343,8 @@ fn visit_entities(data: &Data, bevy_ecs_path: &Path, is_relationship: bool) -> T
let field_member = ident_or_index(field.ident.as_ref(), index);
let field_ident = format_ident!("field_{}", field_member);
variant_fields.push(quote!(#field_member: ref #field_ident));
variant_fields_mut.push(quote!(#field_member: ref mut #field_ident));
variant_fields.push(quote!(#field_member: #field_ident));
variant_fields_mut.push(quote!(#field_member: #field_ident));
visit_variant_fields.push(quote!(#field_ident.visit_entities(&mut func);));
visit_variant_fields_mut

View File

@ -223,7 +223,7 @@ pub trait DetectChangesMut: DetectChanges {
/// let new_score = 0;
/// if let Some(Score(previous_score)) = score.replace_if_neq(Score(new_score)) {
/// // If `score` change, emit a `ScoreChanged` event.
/// score_changed.send(ScoreChanged {
/// score_changed.write(ScoreChanged {
/// current: new_score,
/// previous: previous_score,
/// });
@ -811,7 +811,7 @@ impl<'w, T: ?Sized> Ref<'w, T> {
/// - `added` - A [`Tick`] that stores the tick when the wrapped value was created.
/// - `changed` - A [`Tick`] that stores the last time the wrapped value was changed.
/// - `last_run` - A [`Tick`], occurring before `this_run`, which is used
/// as a reference to determine whether the wrapped value is newly added or changed.
/// as a reference to determine whether the wrapped value is newly added or changed.
/// - `this_run` - A [`Tick`] corresponding to the current point in time -- "now".
pub fn new(
value: &'w T,

View File

@ -2060,7 +2060,7 @@ impl RequiredComponents {
//
// This would be resolved by https://github.com/rust-lang/rust/issues/123430
#[cfg(feature = "portable-atomic")]
#[cfg(not(target_has_atomic = "ptr"))]
use alloc::boxed::Box;
type Constructor = dyn for<'a, 'b> Fn(
@ -2072,10 +2072,10 @@ impl RequiredComponents {
MaybeLocation,
);
#[cfg(feature = "portable-atomic")]
#[cfg(not(target_has_atomic = "ptr"))]
type Intermediate<T> = Box<T>;
#[cfg(not(feature = "portable-atomic"))]
#[cfg(target_has_atomic = "ptr")]
type Intermediate<T> = Arc<T>;
let boxed: Intermediate<Constructor> = Intermediate::new(

View File

@ -70,6 +70,10 @@ mod unique_slice;
pub use unique_slice::*;
mod unique_array;
pub use unique_array::UniqueEntityArray;
use crate::{
archetype::{ArchetypeId, ArchetypeRow},
change_detection::MaybeLocation,

View File

@ -0,0 +1,543 @@
use core::{
array,
borrow::{Borrow, BorrowMut},
fmt::Debug,
ops::{
Bound, Deref, DerefMut, Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive,
RangeTo, RangeToInclusive,
},
ptr,
};
use alloc::{
boxed::Box,
collections::{BTreeSet, BinaryHeap, LinkedList, VecDeque},
rc::Rc,
sync::Arc,
vec::Vec,
};
use super::{unique_slice, TrustedEntityBorrow, UniqueEntityIter, UniqueEntitySlice};
/// An array that contains only unique entities.
///
/// It can be obtained through certain methods on [`UniqueEntitySlice`],
/// and some [`TryFrom`] implementations.
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct UniqueEntityArray<T: TrustedEntityBorrow, const N: usize>([T; N]);
impl<T: TrustedEntityBorrow, const N: usize> UniqueEntityArray<T, N> {
/// Constructs a `UniqueEntityArray` from a [`[T; N]`] unsafely.
///
/// # Safety
///
/// `array` must contain only unique elements.
pub const unsafe fn from_array_unchecked(array: [T; N]) -> Self {
Self(array)
}
/// Constructs a `&UniqueEntityArray` from a [`&[T; N]`] unsafely.
///
/// # Safety
///
/// `array` must contain only unique elements.
pub const unsafe fn from_array_ref_unchecked(array: &[T; N]) -> &Self {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { &*(ptr::from_ref(array).cast()) }
}
/// Constructs a `Box<UniqueEntityArray>` from a [`Box<[T; N]>`] unsafely.
///
/// # Safety
///
/// `array` must contain only unique elements.
pub unsafe fn from_boxed_array_unchecked(array: Box<[T; N]>) -> Box<Self> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Box::from_raw(Box::into_raw(array).cast()) }
}
/// Casts `self` into the inner array.
pub fn into_boxed_inner(self: Box<Self>) -> Box<[T; N]> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Box::from_raw(Box::into_raw(self).cast()) }
}
/// Constructs a `Arc<UniqueEntityArray>` from a [`Arc<[T; N]>`] unsafely.
///
/// # Safety
///
/// `slice` must contain only unique elements.
pub unsafe fn from_arc_array_unchecked(slice: Arc<[T; N]>) -> Arc<Self> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Arc::from_raw(Arc::into_raw(slice).cast()) }
}
/// Casts `self` to the inner array.
pub fn into_arc_inner(self: Arc<Self>) -> Arc<[T; N]> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Arc::from_raw(Arc::into_raw(self).cast()) }
}
// Constructs a `Rc<UniqueEntityArray>` from a [`Rc<[T; N]>`] unsafely.
///
/// # Safety
///
/// `slice` must contain only unique elements.
pub unsafe fn from_rc_array_unchecked(slice: Rc<[T; N]>) -> Rc<Self> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Rc::from_raw(Rc::into_raw(slice).cast()) }
}
/// Casts `self` to the inner array.
pub fn into_rc_inner(self: Rc<Self>) -> Rc<[T; N]> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Rc::from_raw(Rc::into_raw(self).cast()) }
}
/// Return the inner array.
pub fn into_inner(self) -> [T; N] {
self.0
}
/// Returns a reference to the inner array.
pub fn as_inner(&self) -> &[T; N] {
&self.0
}
/// Returns a slice containing the entire array. Equivalent to `&s[..]`.
pub const fn as_slice(&self) -> &UniqueEntitySlice<T> {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.as_slice()) }
}
/// Returns a mutable slice containing the entire array. Equivalent to
/// `&mut s[..]`.
pub fn as_mut_slice(&mut self) -> &mut UniqueEntitySlice<T> {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.as_mut_slice()) }
}
/// Borrows each element and returns an array of references with the same
/// size as `self`.
///
/// Equivalent to [`[T; N]::as_ref`](array::each_ref).
pub fn each_ref(&self) -> UniqueEntityArray<&T, N> {
UniqueEntityArray(self.0.each_ref())
}
}
impl<T: TrustedEntityBorrow, const N: usize> Deref for UniqueEntityArray<T, N> {
type Target = UniqueEntitySlice<T>;
fn deref(&self) -> &Self::Target {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(&self.0) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> DerefMut for UniqueEntityArray<T, N> {
fn deref_mut(&mut self) -> &mut Self::Target {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(&mut self.0) }
}
}
impl<T: TrustedEntityBorrow> Default for UniqueEntityArray<T, 0> {
fn default() -> Self {
Self(Default::default())
}
}
impl<'a, T: TrustedEntityBorrow, const N: usize> IntoIterator for &'a UniqueEntityArray<T, N> {
type Item = &'a T;
type IntoIter = unique_slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntityIter::from_iterator_unchecked(self.0.iter()) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IntoIterator for UniqueEntityArray<T, N> {
type Item = T;
type IntoIter = IntoIter<T, N>;
fn into_iter(self) -> Self::IntoIter {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntityIter::from_iterator_unchecked(self.0.into_iter()) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> AsRef<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn as_ref(&self) -> &UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> AsMut<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn as_mut(&mut self) -> &mut UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> Borrow<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn borrow(&self) -> &UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> BorrowMut<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn borrow_mut(&mut self) -> &mut UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<(Bound<usize>, Bound<usize>)>
for UniqueEntityArray<T, N>
{
type Output = UniqueEntitySlice<T>;
fn index(&self, key: (Bound<usize>, Bound<usize>)) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<Range<usize>> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: Range<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeFrom<usize>> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeFrom<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeFull> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeFull) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeInclusive<usize>>
for UniqueEntityArray<T, N>
{
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeInclusive<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeTo<usize>> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeTo<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeToInclusive<usize>>
for UniqueEntityArray<T, N>
{
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeToInclusive<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<usize> for UniqueEntityArray<T, N> {
type Output = T;
fn index(&self, key: usize) -> &T {
self.0.index(key)
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<(Bound<usize>, Bound<usize>)>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: (Bound<usize>, Bound<usize>)) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<Range<usize>> for UniqueEntityArray<T, N> {
fn index_mut(&mut self, key: Range<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeFrom<usize>>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: RangeFrom<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeFull> for UniqueEntityArray<T, N> {
fn index_mut(&mut self, key: RangeFull) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeInclusive<usize>>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: RangeInclusive<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeTo<usize>> for UniqueEntityArray<T, N> {
fn index_mut(&mut self, key: RangeTo<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeToInclusive<usize>>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: RangeToInclusive<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow + Clone> From<&[T; 1]> for UniqueEntityArray<T, 1> {
fn from(value: &[T; 1]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow + Clone> From<&[T; 0]> for UniqueEntityArray<T, 0> {
fn from(value: &[T; 0]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow + Clone> From<&mut [T; 1]> for UniqueEntityArray<T, 1> {
fn from(value: &mut [T; 1]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow + Clone> From<&mut [T; 0]> for UniqueEntityArray<T, 0> {
fn from(value: &mut [T; 0]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow> From<[T; 1]> for UniqueEntityArray<T, 1> {
fn from(value: [T; 1]) -> Self {
Self(value)
}
}
impl<T: TrustedEntityBorrow> From<[T; 0]> for UniqueEntityArray<T, 0> {
fn from(value: [T; 0]) -> Self {
Self(value)
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 1>> for (T,) {
fn from(array: UniqueEntityArray<T, 1>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 2>> for (T, T) {
fn from(array: UniqueEntityArray<T, 2>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 3>> for (T, T, T) {
fn from(array: UniqueEntityArray<T, 3>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 4>> for (T, T, T, T) {
fn from(array: UniqueEntityArray<T, 4>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 5>> for (T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 5>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 6>> for (T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 6>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 7>> for (T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 7>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 8>> for (T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 8>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 9>> for (T, T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 9>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 10>> for (T, T, T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 10>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 11>> for (T, T, T, T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 11>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 12>>
for (T, T, T, T, T, T, T, T, T, T, T, T)
{
fn from(array: UniqueEntityArray<T, 12>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow + Ord, const N: usize> From<UniqueEntityArray<T, N>> for BTreeSet<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
BTreeSet::from(value.0)
}
}
impl<T: TrustedEntityBorrow + Ord, const N: usize> From<UniqueEntityArray<T, N>> for BinaryHeap<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
BinaryHeap::from(value.0)
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for LinkedList<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
LinkedList::from(value.0)
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for Vec<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
Vec::from(value.0)
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for VecDeque<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
VecDeque::from(value.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&UniqueEntitySlice<U>> for UniqueEntityArray<T, N>
{
fn eq(&self, other: &&UniqueEntitySlice<U>) -> bool {
self.0.eq(&other.as_inner())
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntitySlice<U>> for UniqueEntityArray<T, N>
{
fn eq(&self, other: &UniqueEntitySlice<U>) -> bool {
self.0.eq(other.as_inner())
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<&UniqueEntityArray<U, N>>
for Vec<T>
{
fn eq(&self, other: &&UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<&UniqueEntityArray<U, N>>
for VecDeque<T>
{
fn eq(&self, other: &&UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&mut UniqueEntityArray<U, N>> for VecDeque<T>
{
fn eq(&self, other: &&mut UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<UniqueEntityArray<U, N>>
for Vec<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<UniqueEntityArray<U, N>>
for VecDeque<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
pub type IntoIter<T, const N: usize> = UniqueEntityIter<array::IntoIter<T, N>>;
impl<T: TrustedEntityBorrow, const N: usize> UniqueEntityIter<array::IntoIter<T, N>> {
/// Returns an immutable slice of all elements that have not been yielded
/// yet.
///
/// Equivalent to [`array::IntoIter::as_slice`].
pub fn as_slice(&self) -> &UniqueEntitySlice<T> {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.as_inner().as_slice()) }
}
/// Returns a mutable slice of all elements that have not been yielded yet.
///
/// Equivalent to [`array::IntoIter::as_mut_slice`].
pub fn as_mut_slice(&mut self) -> &mut UniqueEntitySlice<T> {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.as_mut_inner().as_mut_slice()) }
}
}

View File

@ -1,4 +1,5 @@
use core::{
array::TryFromSliceError,
borrow::Borrow,
cmp::Ordering,
fmt::Debug,
@ -22,7 +23,7 @@ use alloc::{
use super::{
unique_vec, EntitySet, EntitySetIterator, FromEntitySetIterator, TrustedEntityBorrow,
UniqueEntityIter, UniqueEntityVec,
UniqueEntityArray, UniqueEntityIter, UniqueEntityVec,
};
/// A slice that contains only unique entities.
@ -128,6 +129,64 @@ impl<T: TrustedEntityBorrow> UniqueEntitySlice<T> {
Some((last, unsafe { Self::from_slice_unchecked(rest) }))
}
/// Returns an array reference to the first `N` items in the slice.
///
/// Equivalent to [`[T]::first_chunk`](slice::first_chunk).
pub const fn first_chunk<const N: usize>(&self) -> Option<&UniqueEntityArray<T, N>> {
let Some(chunk) = self.0.first_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
Some(unsafe { UniqueEntityArray::from_array_ref_unchecked(chunk) })
}
/// Returns an array reference to the first `N` items in the slice and the remaining slice.
///
/// Equivalent to [`[T]::split_first_chunk`](slice::split_first_chunk).
pub const fn split_first_chunk<const N: usize>(
&self,
) -> Option<(&UniqueEntityArray<T, N>, &UniqueEntitySlice<T>)> {
let Some((chunk, rest)) = self.0.split_first_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
unsafe {
Some((
UniqueEntityArray::from_array_ref_unchecked(chunk),
Self::from_slice_unchecked(rest),
))
}
}
/// Returns an array reference to the last `N` items in the slice and the remaining slice.
///
/// Equivalent to [`[T]::split_last_chunk`](slice::split_last_chunk).
pub const fn split_last_chunk<const N: usize>(
&self,
) -> Option<(&UniqueEntitySlice<T>, &UniqueEntityArray<T, N>)> {
let Some((rest, chunk)) = self.0.split_last_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
unsafe {
Some((
Self::from_slice_unchecked(rest),
UniqueEntityArray::from_array_ref_unchecked(chunk),
))
}
}
/// Returns an array reference to the last `N` items in the slice.
///
/// Equivalent to [`[T]::last_chunk`](slice::last_chunk).
pub const fn last_chunk<const N: usize>(&self) -> Option<&UniqueEntityArray<T, N>> {
let Some(chunk) = self.0.last_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
Some(unsafe { UniqueEntityArray::from_array_ref_unchecked(chunk) })
}
/// Returns a reference to a subslice.
///
/// Equivalent to the range functionality of [`[T]::get`].
@ -949,6 +1008,15 @@ impl<'a, T: TrustedEntityBorrow + Clone> From<&'a UniqueEntitySlice<T>>
}
}
impl<T: TrustedEntityBorrow + Clone, const N: usize> From<UniqueEntityArray<T, N>>
for Box<UniqueEntitySlice<T>>
{
fn from(value: UniqueEntityArray<T, N>) -> Self {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_boxed_slice_unchecked(Box::new(value.into_inner())) }
}
}
impl<'a, T: TrustedEntityBorrow + Clone> From<Cow<'a, UniqueEntitySlice<T>>>
for Box<UniqueEntitySlice<T>>
{
@ -1134,6 +1202,30 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<[U; N]>
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for &UniqueEntitySlice<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for &mut UniqueEntitySlice<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for UniqueEntitySlice<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U> PartialEq<Vec<U>> for &UniqueEntitySlice<T> {
fn eq(&self, other: &Vec<U>) -> bool {
self.0.eq(other)
@ -1161,6 +1253,38 @@ impl<T: TrustedEntityBorrow + Clone> ToOwned for UniqueEntitySlice<T> {
}
}
impl<'a, T: TrustedEntityBorrow + Copy, const N: usize> TryFrom<&'a UniqueEntitySlice<T>>
for &'a UniqueEntityArray<T, N>
{
type Error = TryFromSliceError;
fn try_from(value: &'a UniqueEntitySlice<T>) -> Result<Self, Self::Error> {
<&[T; N]>::try_from(&value.0).map(|array|
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntityArray::from_array_ref_unchecked(array) })
}
}
impl<T: TrustedEntityBorrow + Copy, const N: usize> TryFrom<&UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
type Error = TryFromSliceError;
fn try_from(value: &UniqueEntitySlice<T>) -> Result<Self, Self::Error> {
<&Self>::try_from(value).copied()
}
}
impl<T: TrustedEntityBorrow + Copy, const N: usize> TryFrom<&mut UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
type Error = TryFromSliceError;
fn try_from(value: &mut UniqueEntitySlice<T>) -> Result<Self, Self::Error> {
<Self>::try_from(&*value)
}
}
impl<T: TrustedEntityBorrow> Index<(Bound<usize>, Bound<usize>)> for UniqueEntitySlice<T> {
type Output = Self;
fn index(&self, key: (Bound<usize>, Bound<usize>)) -> &Self {
@ -1280,7 +1404,6 @@ impl<T: TrustedEntityBorrow> IndexMut<RangeToInclusive<usize>> for UniqueEntityS
/// the [`IntoIterator`] impls on it and [`UniqueEntityVec`].
///
/// [`iter`]: `UniqueEntitySlice::iter`
/// [`into_iter`]: UniqueEntitySlice::into_iter
pub type Iter<'a, T> = UniqueEntityIter<slice::Iter<'a, T>>;
impl<'a, T: TrustedEntityBorrow> UniqueEntityIter<slice::Iter<'a, T>> {

View File

@ -17,8 +17,8 @@ use alloc::{
};
use super::{
unique_slice, EntitySet, FromEntitySetIterator, TrustedEntityBorrow, UniqueEntityIter,
UniqueEntitySlice,
unique_slice, EntitySet, FromEntitySetIterator, TrustedEntityBorrow, UniqueEntityArray,
UniqueEntityIter, UniqueEntitySlice,
};
/// A `Vec` that contains only unique entities.
@ -550,6 +550,14 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<&[U; N]
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&UniqueEntityArray<U, N>> for UniqueEntityVec<T>
{
fn eq(&self, other: &&UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.as_inner())
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<&mut [U; N]>
for UniqueEntityVec<T>
{
@ -558,6 +566,14 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<&mut [U
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&mut UniqueEntityArray<U, N>> for UniqueEntityVec<T>
{
fn eq(&self, other: &&mut UniqueEntityArray<U, N>) -> bool {
self.0.eq(other.as_inner())
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U> PartialEq<[U]> for UniqueEntityVec<T> {
fn eq(&self, other: &[U]) -> bool {
self.0.eq(other)
@ -580,6 +596,14 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<[U; N]>
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for UniqueEntityVec<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(other.as_inner())
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow> PartialEq<UniqueEntityVec<U>> for Vec<T> {
fn eq(&self, other: &UniqueEntityVec<U>) -> bool {
self.eq(&other.0)
@ -683,6 +707,28 @@ impl<T: TrustedEntityBorrow> From<[T; 0]> for UniqueEntityVec<T> {
}
}
impl<T: TrustedEntityBorrow + Clone, const N: usize> From<&UniqueEntityArray<T, N>>
for UniqueEntityVec<T>
{
fn from(value: &UniqueEntityArray<T, N>) -> Self {
Self(Vec::from(value.as_inner().clone()))
}
}
impl<T: TrustedEntityBorrow + Clone, const N: usize> From<&mut UniqueEntityArray<T, N>>
for UniqueEntityVec<T>
{
fn from(value: &mut UniqueEntityArray<T, N>) -> Self {
Self(Vec::from(value.as_inner().clone()))
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for UniqueEntityVec<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
Self(Vec::from(value.into_inner()))
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityVec<T>> for Vec<T> {
fn from(value: UniqueEntityVec<T>) -> Self {
value.0
@ -755,6 +801,20 @@ impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>> for Box
}
}
impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>>
for Box<UniqueEntityArray<T, N>>
{
type Error = UniqueEntityVec<T>;
fn try_from(value: UniqueEntityVec<T>) -> Result<Self, Self::Error> {
Box::try_from(value.0)
.map(|v|
// SAFETY: All elements in the original Vec are unique.
unsafe { UniqueEntityArray::from_boxed_array_unchecked(v) })
.map_err(UniqueEntityVec)
}
}
impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>> for [T; N] {
type Error = UniqueEntityVec<T>;
@ -763,6 +823,20 @@ impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>> for [T;
}
}
impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>>
for UniqueEntityArray<T, N>
{
type Error = UniqueEntityVec<T>;
fn try_from(value: UniqueEntityVec<T>) -> Result<Self, Self::Error> {
<[T; N] as TryFrom<Vec<T>>>::try_from(value.0)
.map(|v|
// SAFETY: All elements in the original Vec are unique.
unsafe { UniqueEntityArray::from_array_unchecked(v) })
.map_err(UniqueEntityVec)
}
}
impl<T: TrustedEntityBorrow> From<BTreeSet<T>> for UniqueEntityVec<T> {
fn from(value: BTreeSet<T>) -> Self {
Self(value.into_iter().collect::<Vec<T>>())
@ -809,7 +883,7 @@ impl<T: TrustedEntityBorrow> Extend<T> for UniqueEntityVec<T> {
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
iter.size_hint().0.div_ceil(2)
};
self.reserve(reserve);
// Internal iteration (fold/for_each) is known to result in better code generation
@ -836,7 +910,7 @@ impl<'a, T: TrustedEntityBorrow + Copy + 'a> Extend<&'a T> for UniqueEntityVec<T
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
iter.size_hint().0.div_ceil(2)
};
self.reserve(reserve);
// Internal iteration (fold/for_each) is known to result in better code generation

View File

@ -73,7 +73,7 @@ use {
/// - [`EventReader`]s that read at least once per update will never drop events.
/// - [`EventReader`]s that read once within two updates might still receive some events
/// - [`EventReader`]s that read after two updates are guaranteed to drop all events that occurred
/// before those updates.
/// before those updates.
///
/// The buffers in [`Events`] will grow indefinitely if [`update`](Events::update) is never called.
///

View File

@ -14,14 +14,14 @@ use bevy_ecs::{
/// #[derive(Event)]
/// pub struct MyEvent; // Custom event type.
/// fn my_system(mut writer: EventWriter<MyEvent>) {
/// writer.send(MyEvent);
/// writer.write(MyEvent);
/// }
///
/// # bevy_ecs::system::assert_is_system(my_system);
/// ```
/// # Observers
///
/// "Buffered" Events, such as those sent directly in [`Events`] or sent using [`EventWriter`], do _not_ automatically
/// "Buffered" Events, such as those sent directly in [`Events`] or written using [`EventWriter`], do _not_ automatically
/// trigger any [`Observer`]s watching for that event, as each [`Event`] has different requirements regarding _if_ it will
/// be triggered, and if so, _when_ it will be triggered in the schedule.
///
@ -32,7 +32,7 @@ use bevy_ecs::{
///
/// # Untyped events
///
/// `EventWriter` can only send events of one specific type, which must be known at compile-time.
/// `EventWriter` can only write events of one specific type, which must be known at compile-time.
/// This is not a problem most of the time, but you may find a situation where you cannot know
/// ahead of time every kind of event you'll need to send. In this case, you can use the "type-erased event" pattern.
///
@ -64,13 +64,48 @@ pub struct EventWriter<'w, E: Event> {
}
impl<'w, E: Event> EventWriter<'w, E> {
/// Writes an `event`, which can later be read by [`EventReader`](super::EventReader)s.
/// This method returns the [ID](`EventId`) of the written `event`.
///
/// See [`Events`] for details.
#[doc(alias = "send")]
#[track_caller]
pub fn write(&mut self, event: E) -> EventId<E> {
self.events.send(event)
}
/// Sends a list of `events` all at once, which can later be read by [`EventReader`](super::EventReader)s.
/// This is more efficient than sending each event individually.
/// This method returns the [IDs](`EventId`) of the written `events`.
///
/// See [`Events`] for details.
#[doc(alias = "send_batch")]
#[track_caller]
pub fn write_batch(&mut self, events: impl IntoIterator<Item = E>) -> SendBatchIds<E> {
self.events.send_batch(events)
}
/// Writes the default value of the event. Useful when the event is an empty struct.
/// This method returns the [ID](`EventId`) of the written `event`.
///
/// See [`Events`] for details.
#[doc(alias = "send_default")]
#[track_caller]
pub fn write_default(&mut self) -> EventId<E>
where
E: Default,
{
self.events.send_default()
}
/// Sends an `event`, which can later be read by [`EventReader`](super::EventReader)s.
/// This method returns the [ID](`EventId`) of the sent `event`.
///
/// See [`Events`] for details.
#[deprecated(since = "0.16.0", note = "Use `EventWriter::write` instead.")]
#[track_caller]
pub fn send(&mut self, event: E) -> EventId<E> {
self.events.send(event)
self.write(event)
}
/// Sends a list of `events` all at once, which can later be read by [`EventReader`](super::EventReader)s.
@ -78,20 +113,22 @@ impl<'w, E: Event> EventWriter<'w, E> {
/// This method returns the [IDs](`EventId`) of the sent `events`.
///
/// See [`Events`] for details.
#[deprecated(since = "0.16.0", note = "Use `EventWriter::write_batch` instead.")]
#[track_caller]
pub fn send_batch(&mut self, events: impl IntoIterator<Item = E>) -> SendBatchIds<E> {
self.events.send_batch(events)
self.write_batch(events)
}
/// Sends the default value of the event. Useful when the event is an empty struct.
/// This method returns the [ID](`EventId`) of the sent `event`.
///
/// See [`Events`] for details.
#[deprecated(since = "0.16.0", note = "Use `EventWriter::write_default` instead.")]
#[track_caller]
pub fn send_default(&mut self) -> EventId<E>
where
E: Default,
{
self.events.send_default()
self.write_default()
}
}

View File

@ -2,13 +2,6 @@
unsafe_op_in_unsafe_fn,
reason = "See #11590. To be removed once all applicable unsafe code has an unsafe block with a safety comment."
)]
#![cfg_attr(
test,
expect(
dependency_on_unit_never_type_fallback,
reason = "See #17340. To be removed once Edition 2024 is released"
)
)]
#![doc = include_str!("../README.md")]
#![cfg_attr(
any(docsrs, docsrs_dep),

View File

@ -5,6 +5,7 @@ mod runner;
pub use entity_observer::ObservedBy;
pub use runner::*;
use variadics_please::all_tuples;
use crate::{
archetype::ArchetypeFlags,
@ -177,92 +178,108 @@ impl<'w, E, B: Bundle> DerefMut for Trigger<'w, E, B> {
/// will run.
pub trait TriggerTargets {
/// The components the trigger should target.
fn components(&self) -> &[ComponentId];
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_;
/// The entities the trigger should target.
fn entities(&self) -> &[Entity];
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_;
}
impl TriggerTargets for () {
fn components(&self) -> &[ComponentId] {
&[]
impl<T: TriggerTargets + ?Sized> TriggerTargets for &T {
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
(**self).components()
}
fn entities(&self) -> &[Entity] {
&[]
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
(**self).entities()
}
}
impl TriggerTargets for Entity {
fn components(&self) -> &[ComponentId] {
&[]
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
[].into_iter()
}
fn entities(&self) -> &[Entity] {
core::slice::from_ref(self)
}
}
impl TriggerTargets for Vec<Entity> {
fn components(&self) -> &[ComponentId] {
&[]
}
fn entities(&self) -> &[Entity] {
self.as_slice()
}
}
impl<const N: usize> TriggerTargets for [Entity; N] {
fn components(&self) -> &[ComponentId] {
&[]
}
fn entities(&self) -> &[Entity] {
self.as_slice()
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
core::iter::once(*self)
}
}
impl TriggerTargets for ComponentId {
fn components(&self) -> &[ComponentId] {
core::slice::from_ref(self)
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
core::iter::once(*self)
}
fn entities(&self) -> &[Entity] {
&[]
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
[].into_iter()
}
}
impl TriggerTargets for Vec<ComponentId> {
fn components(&self) -> &[ComponentId] {
self.as_slice()
impl<T: TriggerTargets> TriggerTargets for Vec<T> {
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
self.iter().flat_map(T::components)
}
fn entities(&self) -> &[Entity] {
&[]
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
self.iter().flat_map(T::entities)
}
}
impl<const N: usize> TriggerTargets for [ComponentId; N] {
fn components(&self) -> &[ComponentId] {
self.as_slice()
impl<const N: usize, T: TriggerTargets> TriggerTargets for [T; N] {
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
self.iter().flat_map(T::components)
}
fn entities(&self) -> &[Entity] {
&[]
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
self.iter().flat_map(T::entities)
}
}
impl TriggerTargets for &Vec<Entity> {
fn components(&self) -> &[ComponentId] {
&[]
impl<T: TriggerTargets> TriggerTargets for [T] {
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
self.iter().flat_map(T::components)
}
fn entities(&self) -> &[Entity] {
self.as_slice()
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
self.iter().flat_map(T::entities)
}
}
macro_rules! impl_trigger_targets_tuples {
($(#[$meta:meta])* $($trigger_targets: ident),*) => {
#[expect(clippy::allow_attributes, reason = "can't guarantee violation of non_snake_case")]
#[allow(non_snake_case, reason = "`all_tuples!()` generates non-snake-case variable names.")]
$(#[$meta])*
impl<$($trigger_targets: TriggerTargets),*> TriggerTargets for ($($trigger_targets,)*)
{
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
let iter = [].into_iter();
let ($($trigger_targets,)*) = self;
$(
let iter = iter.chain($trigger_targets.components());
)*
iter
}
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
let iter = [].into_iter();
let ($($trigger_targets,)*) = self;
$(
let iter = iter.chain($trigger_targets.entities());
)*
iter
}
}
}
}
all_tuples!(
#[doc(fake_variadic)]
impl_trigger_targets_tuples,
0,
15,
T
);
/// A description of what an [`Observer`] observes.
#[derive(Default, Clone)]
pub struct ObserverDescriptor {
@ -673,7 +690,8 @@ impl World {
caller: MaybeLocation,
) {
let mut world = DeferredWorld::from(self);
if targets.entities().is_empty() {
let mut entity_targets = targets.entities().peekable();
if entity_targets.peek().is_none() {
// SAFETY: `event_data` is accessible as the type represented by `event_id`
unsafe {
world.trigger_observers_with_data::<_, E::Traversal>(
@ -686,12 +704,12 @@ impl World {
);
};
} else {
for target in targets.entities() {
for target_entity in entity_targets {
// SAFETY: `event_data` is accessible as the type represented by `event_id`
unsafe {
world.trigger_observers_with_data::<_, E::Traversal>(
event_id,
*target,
target_entity,
targets.components(),
event_data,
E::AUTO_PROPAGATE,
@ -1115,11 +1133,10 @@ mod tests {
fn observer_despawn() {
let mut world = World::new();
let observer = world
.add_observer(|_: Trigger<OnAdd, A>| {
panic!("Observer triggered after being despawned.")
})
.id();
let system: fn(Trigger<OnAdd, A>) = |_| {
panic!("Observer triggered after being despawned.");
};
let observer = world.add_observer(system).id();
world.despawn(observer);
world.spawn(A).flush();
}
@ -1136,11 +1153,11 @@ mod tests {
res.observed("remove_a");
});
let observer = world
.add_observer(|_: Trigger<OnRemove, B>| {
panic!("Observer triggered after being despawned.")
})
.flush();
let system: fn(Trigger<OnRemove, B>) = |_: Trigger<OnRemove, B>| {
panic!("Observer triggered after being despawned.");
};
let observer = world.add_observer(system).flush();
world.despawn(observer);
world.despawn(entity);
@ -1166,9 +1183,10 @@ mod tests {
let mut world = World::new();
world.init_resource::<Order>();
world
.spawn_empty()
.observe(|_: Trigger<EventA>| panic!("Trigger routed to non-targeted entity."));
let system: fn(Trigger<EventA>) = |_| {
panic!("Trigger routed to non-targeted entity.");
};
world.spawn_empty().observe(system);
world.add_observer(move |obs: Trigger<EventA>, mut res: ResMut<Order>| {
assert_eq!(obs.target(), Entity::PLACEHOLDER);
res.observed("event_a");
@ -1187,9 +1205,11 @@ mod tests {
let mut world = World::new();
world.init_resource::<Order>();
world
.spawn_empty()
.observe(|_: Trigger<EventA>| panic!("Trigger routed to non-targeted entity."));
let system: fn(Trigger<EventA>) = |_| {
panic!("Trigger routed to non-targeted entity.");
};
world.spawn_empty().observe(system);
let entity = world
.spawn_empty()
.observe(|_: Trigger<EventA>, mut res: ResMut<Order>| res.observed("a_1"))
@ -1207,6 +1227,119 @@ mod tests {
assert_eq!(vec!["a_2", "a_1"], world.resource::<Order>().0);
}
#[test]
fn observer_multiple_targets() {
#[derive(Resource, Default)]
struct R(i32);
let mut world = World::new();
let component_a = world.register_component::<A>();
let component_b = world.register_component::<B>();
world.init_resource::<R>();
// targets (entity_1, A)
let entity_1 = world
.spawn_empty()
.observe(|_: Trigger<EventA, A>, mut res: ResMut<R>| res.0 += 1)
.id();
// targets (entity_2, B)
let entity_2 = world
.spawn_empty()
.observe(|_: Trigger<EventA, B>, mut res: ResMut<R>| res.0 += 10)
.id();
// targets any entity or component
world.add_observer(|_: Trigger<EventA>, mut res: ResMut<R>| res.0 += 100);
// targets any entity, and components A or B
world.add_observer(|_: Trigger<EventA, (A, B)>, mut res: ResMut<R>| res.0 += 1000);
// test all tuples
world.add_observer(|_: Trigger<EventA, (A, B, (A, B))>, mut res: ResMut<R>| res.0 += 10000);
world.add_observer(
|_: Trigger<EventA, (A, B, (A, B), ((A, B), (A, B)))>, mut res: ResMut<R>| {
res.0 += 100000;
},
);
world.add_observer(
|_: Trigger<EventA, (A, B, (A, B), (B, A), (A, B, ((A, B), (B, A))))>,
mut res: ResMut<R>| res.0 += 1000000,
);
// WorldEntityMut does not automatically flush.
world.flush();
// trigger for an entity and a component
world.trigger_targets(EventA, (entity_1, component_a));
world.flush();
// only observer that doesn't trigger is the one only watching entity_2
assert_eq!(1111101, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger for both entities, but no components: trigger once per entity target
world.trigger_targets(EventA, (entity_1, entity_2));
world.flush();
// only the observer that doesn't require components triggers - once per entity
assert_eq!(200, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger for both components, but no entities: trigger once
world.trigger_targets(EventA, (component_a, component_b));
world.flush();
// all component observers trigger, entities are not observed
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger for both entities and both components: trigger once per entity target
// we only get 2222211 because a given observer can trigger only once per entity target
world.trigger_targets(EventA, ((component_a, component_b), (entity_1, entity_2)));
world.flush();
assert_eq!(2222211, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger to test complex tuples: (A, B, (A, B))
world.trigger_targets(
EventA,
(component_a, component_b, (component_a, component_b)),
);
world.flush();
// the duplicate components in the tuple don't cause multiple triggers
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger to test complex tuples: (A, B, (A, B), ((A, B), (A, B)))
world.trigger_targets(
EventA,
(
component_a,
component_b,
(component_a, component_b),
((component_a, component_b), (component_a, component_b)),
),
);
world.flush();
// the duplicate components in the tuple don't cause multiple triggers
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger to test the most complex tuple: (A, B, (A, B), (B, A), (A, B, ((A, B), (B, A))))
world.trigger_targets(
EventA,
(
component_a,
component_b,
(component_a, component_b),
(component_b, component_a),
(
component_a,
component_b,
((component_a, component_b), (component_b, component_a)),
),
),
);
world.flush();
// the duplicate components in the tuple don't cause multiple triggers
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
}
#[test]
fn observer_dynamic_component() {
let mut world = World::new();

View File

@ -819,7 +819,7 @@ impl<T: SparseSetIndex> Access<T> {
/// otherwise would allow for queries to be considered disjoint when they shouldn't:
/// - `Query<(&mut T, Option<&U>)>` read/write `T`, read `U`, with `U`
/// - `Query<&mut T, Without<U>>` read/write `T`, without `U`
/// from this we could reasonably conclude that the queries are disjoint but they aren't.
/// from this we could reasonably conclude that the queries are disjoint but they aren't.
///
/// In order to solve this the actual access that `Query<(&mut T, Option<&U>)>` has
/// is read/write `T`, read `U`. It must still have a read `U` access otherwise the following

View File

@ -66,9 +66,7 @@ use variadics_please::all_tuples;
/// # bevy_ecs::system::assert_is_system(my_system);
/// ```
///
/// [`matches_component_set`]: Self::matches_component_set
/// [`Query`]: crate::system::Query
/// [`State`]: Self::State
///
/// # Safety
///

View File

@ -51,9 +51,9 @@ pub(super) union StorageId {
///
/// This data is cached between system runs, and is used to:
/// - store metadata about which [`Table`] or [`Archetype`] are matched by the query. "Matched" means
/// that the query will iterate over the data in the matched table/archetype.
/// that the query will iterate over the data in the matched table/archetype.
/// - cache the [`State`] needed to compute the [`Fetch`] struct used to retrieve data
/// from a specific [`Table`] or [`Archetype`]
/// from a specific [`Table`] or [`Archetype`]
/// - build iterators that can iterate over the query results
///
/// [`State`]: crate::query::world_query::WorldQuery::State

View File

@ -20,7 +20,7 @@ pub trait ReflectCommandExt {
///
/// - If the entity doesn't exist.
/// - If [`AppTypeRegistry`] does not have the reflection data for the given
/// [`Component`](crate::component::Component) or [`Bundle`](crate::bundle::Bundle).
/// [`Component`](crate::component::Component) or [`Bundle`](crate::bundle::Bundle).
/// - If the component or bundle data is invalid. See [`PartialReflect::apply`] for further details.
/// - If [`AppTypeRegistry`] is not present in the [`World`].
///
@ -212,7 +212,7 @@ impl<'w> EntityWorldMut<'w> {
///
/// - If the entity has been despawned while this `EntityWorldMut` is still alive.
/// - If [`AppTypeRegistry`] does not have the reflection data for the given
/// [`Component`](crate::component::Component) or [`Bundle`](crate::bundle::Bundle).
/// [`Component`](crate::component::Component) or [`Bundle`](crate::bundle::Bundle).
/// - If the component or bundle data is invalid. See [`PartialReflect::apply`] for further details.
/// - If [`AppTypeRegistry`] is not present in the [`World`].
///
@ -243,7 +243,7 @@ impl<'w> EntityWorldMut<'w> {
///
/// - If the entity has been despawned while this `EntityWorldMut` is still alive.
/// - If the given [`Resource`] does not have the reflection data for the given
/// [`Component`](crate::component::Component) or [`Bundle`](crate::bundle::Bundle).
/// [`Component`](crate::component::Component) or [`Bundle`](crate::bundle::Bundle).
/// - If the component or bundle data is invalid. See [`PartialReflect::apply`] for further details.
/// - If the given [`Resource`] is not present in the [`World`].
pub fn insert_reflect_with_registry<T: Resource + AsRef<TypeRegistry>>(

View File

@ -80,41 +80,109 @@ impl ScheduleBuildPass for AutoInsertApplyDeferredPass {
let mut sync_point_graph = dependency_flattened.clone();
let topo = graph.topsort_graph(dependency_flattened, ReportCycles::Dependency)?;
fn set_has_conditions(graph: &ScheduleGraph, node: NodeId) -> bool {
!graph.set_conditions_at(node).is_empty()
|| graph
.hierarchy()
.graph()
.edges_directed(node, Direction::Incoming)
.any(|(parent, _)| set_has_conditions(graph, parent))
}
fn system_has_conditions(graph: &ScheduleGraph, node: NodeId) -> bool {
assert!(node.is_system());
!graph.system_conditions[node.index()].is_empty()
|| graph
.hierarchy()
.graph()
.edges_directed(node, Direction::Incoming)
.any(|(parent, _)| set_has_conditions(graph, parent))
}
let mut system_has_conditions_cache = HashMap::default();
fn is_valid_explicit_sync_point(
graph: &ScheduleGraph,
system: NodeId,
system_has_conditions_cache: &mut HashMap<usize, bool>,
) -> bool {
let index = system.index();
is_apply_deferred(graph.systems[index].get().unwrap())
&& !*system_has_conditions_cache
.entry(index)
.or_insert_with(|| system_has_conditions(graph, system))
}
// calculate the number of sync points each sync point is from the beginning of the graph
// use the same sync point if the distance is the same
let mut distances: HashMap<usize, Option<u32>> =
let mut distances: HashMap<usize, u32> =
HashMap::with_capacity_and_hasher(topo.len(), Default::default());
// Keep track of any explicit sync nodes for a specific distance.
let mut distance_to_explicit_sync_node: HashMap<u32, NodeId> = HashMap::default();
for node in &topo {
let add_sync_after = graph.systems[node.index()].get().unwrap().has_deferred();
let node_system = graph.systems[node.index()].get().unwrap();
let node_needs_sync =
if is_valid_explicit_sync_point(graph, *node, &mut system_has_conditions_cache) {
distance_to_explicit_sync_node.insert(
distances.get(&node.index()).copied().unwrap_or_default(),
*node,
);
// This node just did a sync, so the only reason to do another sync is if one was
// explicitly scheduled afterwards.
false
} else {
node_system.has_deferred()
};
for target in dependency_flattened.neighbors_directed(*node, Direction::Outgoing) {
let add_sync_on_edge = add_sync_after
&& !is_apply_deferred(graph.systems[target.index()].get().unwrap())
&& !self.no_sync_edges.contains(&(*node, target));
let edge_needs_sync = node_needs_sync
&& !self.no_sync_edges.contains(&(*node, target))
|| is_valid_explicit_sync_point(
graph,
target,
&mut system_has_conditions_cache,
);
let weight = if add_sync_on_edge { 1 } else { 0 };
let weight = if edge_needs_sync { 1 } else { 0 };
// Use whichever distance is larger, either the current distance, or the distance to
// the parent plus the weight.
let distance = distances
.get(&target.index())
.unwrap_or(&None)
.or(Some(0))
.map(|distance| {
distance.max(
distances.get(&node.index()).unwrap_or(&None).unwrap_or(0) + weight,
)
});
.copied()
.unwrap_or_default()
.max(distances.get(&node.index()).copied().unwrap_or_default() + weight);
distances.insert(target.index(), distance);
}
}
if add_sync_on_edge {
let sync_point =
self.get_sync_point(graph, distances[&target.index()].unwrap());
sync_point_graph.add_edge(*node, sync_point);
sync_point_graph.add_edge(sync_point, target);
// edge is now redundant
sync_point_graph.remove_edge(*node, target);
// Find any edges which have a different number of sync points between them and make sure
// there is a sync point between them.
for node in &topo {
let node_distance = distances.get(&node.index()).copied().unwrap_or_default();
for target in dependency_flattened.neighbors_directed(*node, Direction::Outgoing) {
let target_distance = distances.get(&target.index()).copied().unwrap_or_default();
if node_distance == target_distance {
// These nodes are the same distance, so they don't need an edge between them.
continue;
}
if is_apply_deferred(graph.systems[target.index()].get().unwrap()) {
// We don't need to insert a sync point since ApplyDeferred is a sync point
// already!
continue;
}
let sync_point = distance_to_explicit_sync_node
.get(&target_distance)
.copied()
.unwrap_or_else(|| self.get_sync_point(graph, target_distance));
sync_point_graph.add_edge(*node, sync_point);
sync_point_graph.add_edge(sync_point, target);
sync_point_graph.remove_edge(*node, target);
}
}

View File

@ -276,7 +276,7 @@ pub fn simple_cycles_in_component(graph: &DiGraph, scc: &[NodeId]) -> Vec<Vec<No
stack.clear();
stack.push((root, subgraph.neighbors(root)));
while !stack.is_empty() {
let (ref node, successors) = stack.last_mut().unwrap();
let &mut (ref node, ref mut successors) = stack.last_mut().unwrap();
if let Some(next) = successors.next() {
if next == root {
// found a cycle

View File

@ -1192,7 +1192,7 @@ mod tests {
let mut schedule = Schedule::new(TestSchedule);
schedule
.set_executor_kind($executor)
.add_systems(|| panic!("Executor ignored Stepping"));
.add_systems(|| -> () { panic!("Executor ignored Stepping") });
// Add our schedule to stepping & and enable stepping; this should
// prevent any systems in the schedule from running

View File

@ -758,6 +758,26 @@ impl ScheduleGraph {
.unwrap()
}
/// Returns the conditions for the set at the given [`NodeId`], if it exists.
pub fn get_set_conditions_at(&self, id: NodeId) -> Option<&[BoxedCondition]> {
if !id.is_set() {
return None;
}
self.system_set_conditions
.get(id.index())
.map(Vec::as_slice)
}
/// Returns the conditions for the set at the given [`NodeId`].
///
/// Panics if it doesn't exist.
#[track_caller]
pub fn set_conditions_at(&self, id: NodeId) -> &[BoxedCondition] {
self.get_set_conditions_at(id)
.ok_or_else(|| format!("set with id {id:?} does not exist in this Schedule"))
.unwrap()
}
/// Returns an iterator over all systems in this schedule, along with the conditions for each system.
pub fn systems(&self) -> impl Iterator<Item = (NodeId, &ScheduleSystem, &[BoxedCondition])> {
self.systems
@ -2036,7 +2056,7 @@ mod tests {
use bevy_ecs_macros::ScheduleLabel;
use crate::{
prelude::{Res, Resource},
prelude::{ApplyDeferred, Res, Resource},
schedule::{
tests::ResMut, IntoSystemConfigs, IntoSystemSetConfigs, Schedule,
ScheduleBuildSettings, SystemSet,
@ -2062,12 +2082,12 @@ mod tests {
let mut world = World::new();
let mut schedule = Schedule::default();
let system: fn() = || {
panic!("This system must not run");
};
schedule.configure_sets(Set.run_if(|| false));
schedule.add_systems(
(|| panic!("This system must not run"))
.ambiguous_with(|| ())
.in_set(Set),
);
schedule.add_systems(system.ambiguous_with(|| ()).in_set(Set));
schedule.run(&mut world);
}
@ -2088,6 +2108,108 @@ mod tests {
assert_eq!(schedule.executable.systems.len(), 3);
}
#[test]
fn explicit_sync_point_used_as_auto_sync_point() {
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred, || {}).chain());
schedule.run(&mut world);
// No sync point was inserted, since we can reuse the explicit sync point.
assert_eq!(schedule.executable.systems.len(), 5);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point() {
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred.run_if(|| false), || {}).chain());
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point_condition_on_chain() {
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred, || {}).chain().run_if(|| false));
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point_condition_on_system_set() {
#[derive(SystemSet, Debug, Clone, PartialEq, Eq, Hash)]
struct Set;
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.configure_sets(Set.run_if(|| false));
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred.in_set(Set), || {}).chain());
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point_condition_on_nested_system_set()
{
#[derive(SystemSet, Debug, Clone, PartialEq, Eq, Hash)]
struct Set1;
#[derive(SystemSet, Debug, Clone, PartialEq, Eq, Hash)]
struct Set2;
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.configure_sets(Set2.run_if(|| false));
schedule.configure_sets(Set1.in_set(Set2));
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred, || {}).chain().in_set(Set1));
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test]
fn merges_sync_points_into_one() {
let mut schedule = Schedule::default();

View File

@ -1348,7 +1348,9 @@ mod tests {
//
// first system will be configured as `run_if(|| false)`, so it can
// just panic if called
let first_system = move || panic!("first_system should not be run");
let first_system: fn() = move || {
panic!("first_system should not be run");
};
// The second system, we need to know when it has been called, so we'll
// add a resource for tracking if it has been run. The system will

View File

@ -76,7 +76,7 @@ impl BlobArray {
///
/// # Safety
/// - The element at index `index` is safe to access.
/// (If the safety requirements of every method that has been used on `Self` have been fulfilled, the caller just needs to ensure that `index` < `len`)
/// (If the safety requirements of every method that has been used on `Self` have been fulfilled, the caller just needs to ensure that `index` < `len`)
///
/// [`Vec::len`]: alloc::vec::Vec::len
#[inline]
@ -99,7 +99,7 @@ impl BlobArray {
///
/// # Safety
/// - The element with at index `index` is safe to access.
/// (If the safety requirements of every method that has been used on `Self` have been fulfilled, the caller just needs to ensure that `index` < `len`)
/// (If the safety requirements of every method that has been used on `Self` have been fulfilled, the caller just needs to ensure that `index` < `len`)
///
/// [`Vec::len`]: alloc::vec::Vec::len
#[inline]
@ -156,7 +156,7 @@ impl BlobArray {
///
/// # Safety
/// - For every element with index `i`, if `i` < `len`: It must be safe to call [`Self::get_unchecked_mut`] with `i`.
/// (If the safety requirements of every method that has been used on `Self` have been fulfilled, the caller just needs to ensure that `len` is correct.)
/// (If the safety requirements of every method that has been used on `Self` have been fulfilled, the caller just needs to ensure that `len` is correct.)
///
/// [`Vec::clear`]: alloc::vec::Vec::clear
pub unsafe fn clear(&mut self, len: usize) {
@ -256,7 +256,7 @@ impl BlobArray {
new_capacity: NonZeroUsize,
) {
#[cfg(debug_assertions)]
debug_assert_eq!(self.capacity, current_capacity.into());
debug_assert_eq!(self.capacity, current_capacity.get());
if !self.is_zst() {
// SAFETY: `new_capacity` can't overflow usize
let new_layout =
@ -289,7 +289,7 @@ impl BlobArray {
/// # Safety
/// - `index` must be in bounds (`index` < capacity)
/// - The [`Layout`] of the value must match the layout of the blobs stored in this array,
/// and it must be safe to use the `drop` function of this [`BlobArray`] to drop `value`.
/// and it must be safe to use the `drop` function of this [`BlobArray`] to drop `value`.
/// - `value` must not point to the same value that is being initialized.
#[inline]
pub unsafe fn initialize_unchecked(&mut self, index: usize, value: OwningPtr<'_>) {
@ -305,7 +305,7 @@ impl BlobArray {
/// # Safety
/// - Index must be in-bounds (`index` < `len`)
/// - `value`'s [`Layout`] must match this [`BlobArray`]'s `item_layout`,
/// and it must be safe to use the `drop` function of this [`BlobArray`] to drop `value`.
/// and it must be safe to use the `drop` function of this [`BlobArray`] to drop `value`.
/// - `value` must not point to the same value that is being replaced.
pub unsafe fn replace_unchecked(&mut self, index: usize, value: OwningPtr<'_>) {
#[cfg(debug_assertions)]

View File

@ -176,7 +176,7 @@ impl BlobVec {
/// # Safety
/// - index must be in bounds
/// - the memory in the [`BlobVec`] starting at index `index`, of a size matching this [`BlobVec`]'s
/// `item_layout`, must have been previously allocated.
/// `item_layout`, must have been previously allocated.
#[inline]
pub unsafe fn initialize_unchecked(&mut self, index: usize, value: OwningPtr<'_>) {
debug_assert!(index < self.len());
@ -189,10 +189,10 @@ impl BlobVec {
/// # Safety
/// - index must be in-bounds
/// - the memory in the [`BlobVec`] starting at index `index`, of a size matching this
/// [`BlobVec`]'s `item_layout`, must have been previously initialized with an item matching
/// this [`BlobVec`]'s `item_layout`
/// [`BlobVec`]'s `item_layout`, must have been previously initialized with an item matching
/// this [`BlobVec`]'s `item_layout`
/// - the memory at `*value` must also be previously initialized with an item matching this
/// [`BlobVec`]'s `item_layout`
/// [`BlobVec`]'s `item_layout`
pub unsafe fn replace_unchecked(&mut self, index: usize, value: OwningPtr<'_>) {
debug_assert!(index < self.len());

View File

@ -722,10 +722,10 @@ mod tests {
assert_eq!(sets.len(), 0);
assert!(sets.is_empty());
init_component::<TestComponent1>(&mut sets, 1);
register_component::<TestComponent1>(&mut sets, 1);
assert_eq!(sets.len(), 1);
init_component::<TestComponent2>(&mut sets, 2);
register_component::<TestComponent2>(&mut sets, 2);
assert_eq!(sets.len(), 2);
// check its shape by iter
@ -739,7 +739,7 @@ mod tests {
vec![(ComponentId::new(1), 0), (ComponentId::new(2), 0),]
);
fn init_component<T: Component>(sets: &mut SparseSets, id: usize) {
fn register_component<T: Component>(sets: &mut SparseSets, id: usize) {
let descriptor = ComponentDescriptor::new::<T>();
let id = ComponentId::new(id);
let info = ComponentInfo::new(id, descriptor);

View File

@ -87,7 +87,7 @@ impl<T> ThinArrayPtr<T> {
/// - The caller should update their saved `capacity` value to reflect the fact that it was changed
pub unsafe fn realloc(&mut self, current_capacity: NonZeroUsize, new_capacity: NonZeroUsize) {
#[cfg(debug_assertions)]
assert_eq!(self.capacity, current_capacity.into());
assert_eq!(self.capacity, current_capacity.get());
self.set_capacity(new_capacity.get());
if size_of::<T>() != 0 {
let new_layout =

View File

@ -79,7 +79,11 @@ pub trait HandleError<Out = ()> {
}
}
impl<C: Command<Result<T, E>>, T, E: Into<Error>> HandleError<Result<T, E>> for C {
impl<C, T, E> HandleError<Result<T, E>> for C
where
C: Command<Result<T, E>>,
E: Into<Error>,
{
fn handle_error_with(self, error_handler: fn(&mut World, Error)) -> impl Command {
move |world: &mut World| match self.apply(world) {
Ok(_) => {}
@ -88,7 +92,10 @@ impl<C: Command<Result<T, E>>, T, E: Into<Error>> HandleError<Result<T, E>> for
}
}
impl<C: Command> HandleError for C {
impl<C> HandleError for C
where
C: Command,
{
#[inline]
fn handle_error_with(self, _error_handler: fn(&mut World, Error)) -> impl Command {
self

View File

@ -5,6 +5,7 @@
//! [`EntityCommands`](crate::system::EntityCommands).
use alloc::vec::Vec;
use core::fmt;
use log::info;
use crate::{
@ -79,8 +80,7 @@ use bevy_ptr::OwningPtr;
/// }
/// ```
pub trait EntityCommand<Out = ()>: Send + 'static {
/// Executes this command for the given [`Entity`] and
/// returns a [`Result`] for error handling.
/// Executes this command for the given [`Entity`].
fn apply(self, entity: EntityWorldMut) -> Out;
}
/// Passes in a specific entity to an [`EntityCommand`], resulting in a [`Command`] that
@ -96,7 +96,10 @@ pub trait CommandWithEntity<Out> {
fn with_entity(self, entity: Entity) -> impl Command<Out> + HandleError<Out>;
}
impl<C: EntityCommand> CommandWithEntity<Result<(), EntityMutableFetchError>> for C {
impl<C> CommandWithEntity<Result<(), EntityMutableFetchError>> for C
where
C: EntityCommand,
{
fn with_entity(
self,
entity: Entity,
@ -110,11 +113,10 @@ impl<C: EntityCommand> CommandWithEntity<Result<(), EntityMutableFetchError>> fo
}
}
impl<
C: EntityCommand<Result<T, Err>>,
T,
Err: core::fmt::Debug + core::fmt::Display + Send + Sync + 'static,
> CommandWithEntity<Result<T, EntityCommandError<Err>>> for C
impl<C, T, Err> CommandWithEntity<Result<T, EntityCommandError<Err>>> for C
where
C: EntityCommand<Result<T, Err>>,
Err: fmt::Debug + fmt::Display + Send + Sync + 'static,
{
fn with_entity(
self,
@ -245,8 +247,9 @@ pub fn retain<T: Bundle>() -> impl EntityCommand {
///
/// # Note
///
/// This will also despawn any [`Children`](crate::hierarchy::Children) entities, and any other [`RelationshipTarget`](crate::relationship::RelationshipTarget) that is configured
/// to despawn descendants. This results in "recursive despawn" behavior.
/// This will also despawn any [`Children`](crate::hierarchy::Children) entities,
/// and any other [`RelationshipTarget`](crate::relationship::RelationshipTarget) that is configured to despawn descendants.
/// This results in "recursive despawn" behavior.
#[track_caller]
pub fn despawn() -> impl EntityCommand {
let caller = MaybeLocation::caller();

View File

@ -82,21 +82,25 @@ use crate::{
/// // NOTE: type inference fails here, so annotations are required on the closure.
/// commands.queue(|w: &mut World| {
/// // Mutate the world however you want...
/// # todo!();
/// });
/// # }
/// ```
///
/// # Error handling
///
/// Commands can return a [`Result`](crate::result::Result), which can be passed to
/// an error handler. Error handlers are functions/closures of the form
/// `fn(&mut World, CommandError)`.
/// A [`Command`] can return a [`Result`](crate::result::Result),
/// which will be passed to an error handler if the `Result` is an error.
///
/// The default error handler panics. It can be configured by enabling the `configurable_error_handler`
/// cargo feature, then setting the `GLOBAL_ERROR_HANDLER`.
/// Error handlers are functions/closures of the form `fn(&mut World, Error)`.
/// They are granted exclusive access to the [`World`], which enables them to
/// respond to the error in whatever way is necessary.
///
/// Alternatively, you can customize the error handler for a specific command by calling [`Commands::queue_handled`].
/// The [default error handler](error_handler::default) panics.
/// It can be configured by enabling the `configurable_error_handler` cargo feature,
/// then setting the `GLOBAL_ERROR_HANDLER`.
///
/// Alternatively, you can customize the error handler for a specific command
/// by calling [`Commands::queue_handled`].
///
/// The [`error_handler`] module provides some simple error handlers for convenience.
///
@ -546,7 +550,8 @@ impl<'w, 's> Commands<'w, 's> {
/// Pushes a generic [`Command`] to the command queue.
///
/// If the [`Command`] returns a [`Result`], it will be handled using the [default error handler](error_handler::default).
/// If the [`Command`] returns a [`Result`],
/// it will be handled using the [default error handler](error_handler::default).
///
/// To use a custom error handler, see [`Commands::queue_handled`].
///
@ -589,8 +594,11 @@ impl<'w, 's> Commands<'w, 's> {
pub fn queue<C: Command<T> + HandleError<T>, T>(&mut self, command: C) {
self.queue_internal(command.handle_error());
}
/// Pushes a generic [`Command`] to the command queue. If the command returns a [`Result`] the given
/// `error_handler` will be used to handle error cases.
/// Pushes a generic [`Command`] to the command queue.
///
/// If the [`Command`] returns a [`Result`],
/// the given `error_handler` will be used to handle error cases.
///
/// To implicitly use the default error handler, see [`Commands::queue`].
///
@ -1137,7 +1145,7 @@ impl<'w, 's> Commands<'w, 's> {
/// Most [`Commands`] (and thereby [`EntityCommands`]) are deferred: when you call the command,
/// if it requires mutable access to the [`World`] (that is, if it removes, adds, or changes something),
/// it's not executed immediately. Instead, the command is added to a "command queue."
/// The command queue is applied between [`Schedules`](bevy_ecs::schedule::Schedule), one by one,
/// The command queue is applied between [`Schedules`](crate::schedule::Schedule), one by one,
/// so that each command can have exclusive access to the World.
///
/// # Fallible
@ -1148,14 +1156,19 @@ impl<'w, 's> Commands<'w, 's> {
///
/// # Error handling
///
/// [`EntityCommands`] can return a [`Result`](crate::result::Result), which can be passed to
/// an error handler. Error handlers are functions/closures of the form
/// `fn(&mut World, CommandError)`.
/// An [`EntityCommand`] can return a [`Result`](crate::result::Result),
/// which will be passed to an error handler if the `Result` is an error.
///
/// The default error handler panics. It can be configured by enabling the `configurable_error_handler`
/// cargo feature, then setting the `GLOBAL_ERROR_HANDLER`.
/// Error handlers are functions/closures of the form `fn(&mut World, Error)`.
/// They are granted exclusive access to the [`World`], which enables them to
/// respond to the error in whatever way is necessary.
///
/// Alternatively, you can customize the error handler for a specific command by calling [`EntityCommands::queue_handled`].
/// The [default error handler](error_handler::default) panics.
/// It can be configured by enabling the `configurable_error_handler` cargo feature,
/// then setting the `GLOBAL_ERROR_HANDLER`.
///
/// Alternatively, you can customize the error handler for a specific command
/// by calling [`EntityCommands::queue_handled`].
///
/// The [`error_handler`] module provides some simple error handlers for convenience.
pub struct EntityCommands<'a> {
@ -1754,7 +1767,8 @@ impl<'a> EntityCommands<'a> {
/// Pushes an [`EntityCommand`] to the queue, which will get executed for the current [`Entity`].
///
/// If the [`EntityCommand`] returns a [`Result`], it will be handled using the [default error handler](error_handler::default).
/// If the [`EntityCommand`] returns a [`Result`],
/// it will be handled using the [default error handler](error_handler::default).
///
/// To use a custom error handler, see [`EntityCommands::queue_handled`].
///
@ -1788,7 +1802,9 @@ impl<'a> EntityCommands<'a> {
}
/// Pushes an [`EntityCommand`] to the queue, which will get executed for the current [`Entity`].
/// If the command returns a [`Result`] the given `error_handler` will be used to handle error cases.
///
/// If the [`EntityCommand`] returns a [`Result`],
/// the given `error_handler` will be used to handle error cases.
///
/// To implicitly use the default error handler, see [`EntityCommands::queue`].
///

View File

@ -1648,7 +1648,10 @@ mod tests {
#[should_panic]
fn panic_inside_system() {
let mut world = World::new();
run_system(&mut world, || panic!("this system panics"));
let system: fn() = || {
panic!("this system panics");
};
run_system(&mut world, system);
}
#[test]

View File

@ -685,7 +685,7 @@ unsafe impl<'w, 's, D: ReadOnlyQueryData + 'static, F: QueryFilter + 'static> Re
/// // ...
/// # let _event = event;
/// }
/// set.p1().send(MyEvent::new());
/// set.p1().write(MyEvent::new());
///
/// let entities = set.p2().entities();
/// // ...

View File

@ -674,7 +674,7 @@ impl<'w> DeferredWorld<'w> {
&mut self,
event: ComponentId,
mut target: Entity,
components: &[ComponentId],
components: impl Iterator<Item = ComponentId> + Clone,
data: &mut E,
mut propagate: bool,
caller: MaybeLocation,
@ -686,7 +686,7 @@ impl<'w> DeferredWorld<'w> {
self.reborrow(),
event,
target,
components.iter().copied(),
components.clone(),
data,
&mut propagate,
caller,

View File

@ -105,7 +105,7 @@ impl<'w> EntityRef<'w> {
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
#[inline]
pub fn contains_id(&self, component_id: ComponentId) -> bool {
self.cell.contains_id(component_id)
@ -510,7 +510,7 @@ impl<'w> EntityMut<'w> {
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
#[inline]
pub fn contains_id(&self, component_id: ComponentId) -> bool {
self.cell.contains_id(component_id)
@ -1134,7 +1134,7 @@ impl<'w> EntityWorldMut<'w> {
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
///
/// # Panics
///
@ -2653,34 +2653,29 @@ unsafe fn trigger_on_replace_and_on_remove_hooks_and_observers(
bundle_info: &BundleInfo,
caller: MaybeLocation,
) {
let bundle_components_in_archetype = || {
bundle_info
.iter_explicit_components()
.filter(|component_id| archetype.contains(*component_id))
};
if archetype.has_replace_observer() {
deferred_world.trigger_observers(
ON_REPLACE,
entity,
bundle_info.iter_explicit_components(),
bundle_components_in_archetype(),
caller,
);
}
deferred_world.trigger_on_replace(
archetype,
entity,
bundle_info.iter_explicit_components(),
caller,
);
deferred_world.trigger_on_replace(archetype, entity, bundle_components_in_archetype(), caller);
if archetype.has_remove_observer() {
deferred_world.trigger_observers(
ON_REMOVE,
entity,
bundle_info.iter_explicit_components(),
bundle_components_in_archetype(),
caller,
);
}
deferred_world.trigger_on_remove(
archetype,
entity,
bundle_info.iter_explicit_components(),
caller,
);
deferred_world.trigger_on_remove(archetype, entity, bundle_components_in_archetype(), caller);
}
/// A view into a single entity and component in a world, which may either be vacant or occupied.
@ -3052,7 +3047,7 @@ impl<'w> FilteredEntityRef<'w> {
/// # Safety
/// - No `&mut World` can exist from the underlying `UnsafeWorldCell`
/// - If `access` takes read access to a component no mutable reference to that
/// component can exist at the same time as the returned [`FilteredEntityMut`]
/// component can exist at the same time as the returned [`FilteredEntityMut`]
/// - If `access` takes any access for a component `entity` must have that component.
#[inline]
pub(crate) unsafe fn new(entity: UnsafeEntityCell<'w>, access: Access<ComponentId>) -> Self {
@ -3103,7 +3098,7 @@ impl<'w> FilteredEntityRef<'w> {
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
#[inline]
pub fn contains_id(&self, component_id: ComponentId) -> bool {
self.entity.contains_id(component_id)
@ -3382,9 +3377,9 @@ impl<'w> FilteredEntityMut<'w> {
/// # Safety
/// - No `&mut World` can exist from the underlying `UnsafeWorldCell`
/// - If `access` takes read access to a component no mutable reference to that
/// component can exist at the same time as the returned [`FilteredEntityMut`]
/// component can exist at the same time as the returned [`FilteredEntityMut`]
/// - If `access` takes write access to a component, no reference to that component
/// may exist at the same time as the returned [`FilteredEntityMut`]
/// may exist at the same time as the returned [`FilteredEntityMut`]
/// - If `access` takes any access for a component `entity` must have that component.
#[inline]
pub(crate) unsafe fn new(entity: UnsafeEntityCell<'w>, access: Access<ComponentId>) -> Self {
@ -3448,7 +3443,7 @@ impl<'w> FilteredEntityMut<'w> {
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
#[inline]
pub fn contains_id(&self, component_id: ComponentId) -> bool {
self.entity.contains_id(component_id)
@ -3812,7 +3807,7 @@ where
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
#[inline]
pub fn contains_id(&self, component_id: ComponentId) -> bool {
self.entity.contains_id(component_id)
@ -4037,7 +4032,7 @@ where
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
#[inline]
pub fn contains_id(&self, component_id: ComponentId) -> bool {
self.entity.contains_id(component_id)
@ -4146,7 +4141,7 @@ where
/// # Safety
///
/// - [`OwningPtr`] and [`StorageType`] iterators must correspond to the
/// [`BundleInfo`] used to construct [`BundleInserter`]
/// [`BundleInfo`] used to construct [`BundleInserter`]
/// - [`Entity`] must correspond to [`EntityLocation`]
unsafe fn insert_dynamic_bundle<
'a,
@ -5900,4 +5895,42 @@ mod tests {
assert_eq!(archetype_pointer_before, archetype_pointer_after);
}
#[test]
fn bundle_remove_only_triggers_for_present_components() {
let mut world = World::default();
#[derive(Component)]
struct A;
#[derive(Component)]
struct B;
#[derive(Resource, PartialEq, Eq, Debug)]
struct Tracker {
a: bool,
b: bool,
}
world.insert_resource(Tracker { a: false, b: false });
let entity = world.spawn(A).id();
world.add_observer(|_: Trigger<OnRemove, A>, mut tracker: ResMut<Tracker>| {
tracker.a = true;
});
world.add_observer(|_: Trigger<OnRemove, B>, mut tracker: ResMut<Tracker>| {
tracker.b = true;
});
world.entity_mut(entity).remove::<(A, B)>();
assert_eq!(
world.resource::<Tracker>(),
&Tracker {
a: true,
// The entity didn't have a B component, so it should not have been triggered.
b: false,
}
);
}
}

View File

@ -247,6 +247,10 @@ impl World {
}
/// Registers a new [`Component`] type and returns the [`ComponentId`] created for it.
///
/// # Usage Notes
/// In most cases, you don't need to call this method directly since component registration
/// happens automatically during system initialization.
pub fn register_component<T: Component>(&mut self) -> ComponentId {
self.components.register_component::<T>()
}

View File

@ -748,7 +748,7 @@ impl<'w> UnsafeEntityCell<'w> {
///
/// - If you know the concrete type of the component, you should prefer [`Self::contains`].
/// - If you know the component's [`TypeId`] but not its [`ComponentId`], consider using
/// [`Self::contains_type_id`].
/// [`Self::contains_type_id`].
#[inline]
pub fn contains_id(self, component_id: ComponentId) -> bool {
self.archetype().contains(component_id)
@ -1125,7 +1125,7 @@ impl<'w> UnsafeWorldCell<'w> {
///
/// # Safety
/// - `location` must refer to an archetype that contains `entity`
/// the archetype
/// the archetype
/// - `component_id` must be valid
/// - `storage_type` must accurately reflect where the components for `component_id` are stored.
/// - the caller must ensure that no aliasing rules are violated
@ -1195,7 +1195,7 @@ unsafe fn get_component_and_ticks(
///
/// # Safety
/// - `location` must refer to an archetype that contains `entity`
/// the archetype
/// the archetype
/// - `component_id` must be valid
/// - `storage_type` must accurately reflect where the components for `component_id` are stored.
/// - the caller must ensure that no aliasing rules are violated

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_encase_derive"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Bevy derive macro for encase"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_gilrs"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Gamepad system made using Gilrs for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -26,7 +26,7 @@ pub fn gilrs_event_startup_system(
gamepads.id_to_entity.insert(id, entity);
gamepads.entity_to_id.insert(entity, id);
events.send(GamepadConnectionEvent {
events.write(GamepadConnectionEvent {
gamepad: entity,
connection: GamepadConnection::Connected {
name: gamepad.name().to_string(),
@ -69,8 +69,8 @@ pub fn gilrs_event_system(
},
);
events.send(event.clone().into());
connection_events.send(event);
events.write(event.clone().into());
connection_events.write(event);
}
EventType::Disconnected => {
let gamepad = gamepads
@ -79,8 +79,8 @@ pub fn gilrs_event_system(
.copied()
.expect("mapping should exist from connection");
let event = GamepadConnectionEvent::new(gamepad, GamepadConnection::Disconnected);
events.send(event.clone().into());
connection_events.send(event);
events.write(event.clone().into());
connection_events.write(event);
}
EventType::ButtonChanged(gilrs_button, raw_value, _) => {
let Some(button) = convert_button(gilrs_button) else {
@ -91,8 +91,8 @@ pub fn gilrs_event_system(
.get(&gilrs_event.id)
.copied()
.expect("mapping should exist from connection");
events.send(RawGamepadButtonChangedEvent::new(gamepad, button, raw_value).into());
button_events.send(RawGamepadButtonChangedEvent::new(
events.write(RawGamepadButtonChangedEvent::new(gamepad, button, raw_value).into());
button_events.write(RawGamepadButtonChangedEvent::new(
gamepad, button, raw_value,
));
}
@ -105,8 +105,8 @@ pub fn gilrs_event_system(
.get(&gilrs_event.id)
.copied()
.expect("mapping should exist from connection");
events.send(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value).into());
axis_event.send(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value));
events.write(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value).into());
axis_event.write(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value));
}
_ => (),
};

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_gizmos"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides gizmos for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_gizmos_macros"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Derive implementations for bevy_gizmos"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -136,11 +136,11 @@ where
///
/// # Arguments
/// - `angle`: sets how much of a circle circumference is passed, e.g. PI is half a circle. This
/// value should be in the range (-2 * PI..=2 * PI)
/// value should be in the range (-2 * PI..=2 * PI)
/// - `radius`: distance between the arc and its center point
/// - `isometry` defines the translation and rotation of the arc.
/// - the translation specifies the center of the arc
/// - the rotation is counter-clockwise starting from `Vec3::Y`
/// - the translation specifies the center of the arc
/// - the rotation is counter-clockwise starting from `Vec3::Y`
/// - `color`: color of the arc
///
/// # Builder methods
@ -219,10 +219,10 @@ where
///
/// # Notes
/// - This method assumes that the points `from` and `to` are distinct from `center`. If one of
/// the points is coincident with `center`, nothing is rendered.
/// the points is coincident with `center`, nothing is rendered.
/// - The arc is drawn as a portion of a circle with a radius equal to the distance from the
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case
#[inline]
pub fn short_arc_3d_between(
&mut self,
@ -265,10 +265,10 @@ where
///
/// # Notes
/// - This method assumes that the points `from` and `to` are distinct from `center`. If one of
/// the points is coincident with `center`, nothing is rendered.
/// the points is coincident with `center`, nothing is rendered.
/// - The arc is drawn as a portion of a circle with a radius equal to the distance from the
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case.
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case.
#[inline]
pub fn long_arc_3d_between(
&mut self,
@ -352,10 +352,10 @@ where
///
/// # Notes
/// - This method assumes that the points `from` and `to` are distinct from `center`. If one of
/// the points is coincident with `center`, nothing is rendered.
/// the points is coincident with `center`, nothing is rendered.
/// - The arc is drawn as a portion of a circle with a radius equal to the distance from the
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case
#[inline]
pub fn short_arc_2d_between(
&mut self,
@ -398,10 +398,10 @@ where
///
/// # Notes
/// - This method assumes that the points `from` and `to` are distinct from `center`. If one of
/// the points is coincident with `center`, nothing is rendered.
/// the points is coincident with `center`, nothing is rendered.
/// - The arc is drawn as a portion of a circle with a radius equal to the distance from the
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case.
/// `center` to `from`. If the distance from `center` to `to` is not equal to the radius, then
/// the results will behave as if this were the case.
#[inline]
pub fn long_arc_2d_between(
&mut self,

View File

@ -820,8 +820,7 @@ where
let polymorphic_color: Color = color.into();
let linear_color = LinearRgba::from(polymorphic_color);
self.list_colors
.extend(iter::repeat(linear_color).take(count));
self.list_colors.extend(iter::repeat_n(linear_color, count));
}
#[inline]

View File

@ -186,10 +186,9 @@ where
/// # Arguments
///
/// - `isometry` defines the translation and rotation of the grid.
/// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default
/// we assume the grid is contained in a plane parallel
/// to the XY plane
/// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default we assume the grid is contained in a
/// plane parallel to the XY plane
/// - `cell_count`: defines the amount of cells in the x and y axes
/// - `spacing`: defines the distance between cells along the x and y axes
/// - `color`: color of the grid
@ -241,9 +240,8 @@ where
/// # Arguments
///
/// - `isometry` defines the translation and rotation of the grid.
/// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default
/// we assume the grid is aligned with all axes
/// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default we assume the grid is aligned with all axes
/// - `cell_count`: defines the amount of cells in the x, y and z axes
/// - `spacing`: defines the distance between cells along the x, y and z axes
/// - `color`: color of the grid
@ -295,9 +293,8 @@ where
/// # Arguments
///
/// - `isometry` defines the translation and rotation of the grid.
/// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default
/// we assume the grid is aligned with all axes
/// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default we assume the grid is aligned with all axes
/// - `cell_count`: defines the amount of cells in the x and y axes
/// - `spacing`: defines the distance between cells along the x and y axes
/// - `color`: color of the grid

View File

@ -7,6 +7,7 @@ use crate::{
use bevy_app::{App, Plugin};
use bevy_core_pipeline::{
core_3d::{Transparent3d, CORE_3D_DEPTH_FORMAT},
oit::OrderIndependentTransparencySettings,
prepass::{DeferredPrepass, DepthPrepass, MotionVectorPrepass, NormalPrepass},
};
@ -301,6 +302,7 @@ fn queue_line_gizmos_3d(
Has<DepthPrepass>,
Has<MotionVectorPrepass>,
Has<DeferredPrepass>,
Has<OrderIndependentTransparencySettings>,
),
)>,
) {
@ -314,7 +316,7 @@ fn queue_line_gizmos_3d(
view,
msaa,
render_layers,
(normal_prepass, depth_prepass, motion_vector_prepass, deferred_prepass),
(normal_prepass, depth_prepass, motion_vector_prepass, deferred_prepass, oit),
) in &views
{
let Some(transparent_phase) = transparent_render_phases.get_mut(&view.retained_view_entity)
@ -343,6 +345,10 @@ fn queue_line_gizmos_3d(
view_key |= MeshPipelineKey::DEFERRED_PREPASS;
}
if oit {
view_key |= MeshPipelineKey::OIT_ENABLED;
}
for (entity, main_entity, config) in &line_gizmos {
if !config.render_layers.intersects(render_layers) {
continue;

View File

@ -238,10 +238,9 @@ where
/// # Arguments
///
/// - `isometry` defines the translation and rotation of the rectangle.
/// - the translation specifies the center of the rectangle
/// - defines orientation of the rectangle, by default we
/// assume the rectangle is contained in a plane parallel
/// to the XY plane.
/// - the translation specifies the center of the rectangle
/// - defines orientation of the rectangle, by default we assume the rectangle is contained in
/// a plane parallel to the XY plane.
/// - `size`: defines the size of the rectangle. This refers to the 'outer size', similar to a bounding box.
/// - `color`: color of the rectangle
///
@ -249,7 +248,7 @@ where
///
/// - The corner radius can be adjusted with the `.corner_radius(...)` method.
/// - The resolution of the arcs at each corner (i.e. the level of detail) can be adjusted with the
/// `.arc_resolution(...)` method.
/// `.arc_resolution(...)` method.
///
/// # Example
/// ```
@ -293,9 +292,8 @@ where
/// # Arguments
///
/// - `isometry` defines the translation and rotation of the rectangle.
/// - the translation specifies the center of the rectangle
/// - defines orientation of the rectangle, by default we
/// assume the rectangle aligned with all axes.
/// - the translation specifies the center of the rectangle
/// - defines orientation of the rectangle, by default we assume the rectangle aligned with all axes.
/// - `size`: defines the size of the rectangle. This refers to the 'outer size', similar to a bounding box.
/// - `color`: color of the rectangle
///
@ -303,7 +301,7 @@ where
///
/// - The corner radius can be adjusted with the `.corner_radius(...)` method.
/// - The resolution of the arcs at each corner (i.e. the level of detail) can be adjusted with the
/// `.arc_resolution(...)` method.
/// `.arc_resolution(...)` method.
///
/// # Example
/// ```
@ -351,9 +349,8 @@ where
/// # Arguments
///
/// - `isometry` defines the translation and rotation of the cuboid.
/// - the translation specifies the center of the cuboid
/// - defines orientation of the cuboid, by default we
/// assume the cuboid aligned with all axes.
/// - the translation specifies the center of the cuboid
/// - defines orientation of the cuboid, by default we assume the cuboid aligned with all axes.
/// - `size`: defines the size of the cuboid. This refers to the 'outer size', similar to a bounding box.
/// - `color`: color of the cuboid
///
@ -361,7 +358,7 @@ where
///
/// - The edge radius can be adjusted with the `.edge_radius(...)` method.
/// - The resolution of the arcs at each edge (i.e. the level of detail) can be adjusted with the
/// `.arc_resolution(...)` method.
/// `.arc_resolution(...)` method.
///
/// # Example
/// ```

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_gltf"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Bevy Engine GLTF loading"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

View File

@ -517,10 +517,12 @@ async fn load_gltf<'a, 'b, 'c>(
);
}
}
let handle = load_context.add_labeled_asset(
GltfAssetLabel::Animation(animation.index()).to_string(),
animation_clip,
);
let handle = load_context
.add_labeled_asset(
GltfAssetLabel::Animation(animation.index()).to_string(),
animation_clip,
)
.expect("animation indices are unique, so the label is unique");
if let Some(name) = animation.name() {
named_animations.insert(name.into(), handle.clone());
}
@ -540,9 +542,9 @@ async fn load_gltf<'a, 'b, 'c>(
texture: ImageOrPath,
) {
let handle = match texture {
ImageOrPath::Image { label, image } => {
load_context.add_labeled_asset(label.to_string(), image)
}
ImageOrPath::Image { label, image } => load_context
.add_labeled_asset(label.to_string(), image)
.expect("texture indices are unique, so the label is unique"),
ImageOrPath::Path {
path,
is_srgb,
@ -696,7 +698,8 @@ async fn load_gltf<'a, 'b, 'c>(
RenderAssetUsages::default(),
)?;
let handle = load_context
.add_labeled_asset(morph_targets_label.to_string(), morph_target_image.0);
.add_labeled_asset(morph_targets_label.to_string(), morph_target_image.0)
.expect("morph target indices are unique, so the label is unique");
mesh.set_morph_targets(handle);
let extras = gltf_mesh.extras().as_ref();
@ -749,7 +752,9 @@ async fn load_gltf<'a, 'b, 'c>(
});
}
let mesh_handle = load_context.add_labeled_asset(primitive_label.to_string(), mesh);
let mesh_handle = load_context
.add_labeled_asset(primitive_label.to_string(), mesh)
.expect("primitive indices are unique, so the label is unique");
primitives.push(super::GltfPrimitive::new(
&gltf_mesh,
&primitive,
@ -766,7 +771,9 @@ async fn load_gltf<'a, 'b, 'c>(
let mesh =
super::GltfMesh::new(&gltf_mesh, primitives, get_gltf_extras(gltf_mesh.extras()));
let handle = load_context.add_labeled_asset(mesh.asset_label().to_string(), mesh);
let handle = load_context
.add_labeled_asset(mesh.asset_label().to_string(), mesh)
.expect("mesh indices are unique, so the label is unique");
if let Some(name) = gltf_mesh.name() {
named_meshes.insert(name.into(), handle.clone());
}
@ -783,16 +790,18 @@ async fn load_gltf<'a, 'b, 'c>(
.map(|mat| Mat4::from_cols_array_2d(&mat))
.collect();
load_context.add_labeled_asset(
inverse_bind_matrices_label(&gltf_skin),
SkinnedMeshInverseBindposes::from(local_to_bone_bind_matrices),
)
load_context
.add_labeled_asset(
inverse_bind_matrices_label(&gltf_skin),
SkinnedMeshInverseBindposes::from(local_to_bone_bind_matrices),
)
.expect("inverse bind matrix indices are unique, so the label is unique")
})
.collect();
let mut nodes = HashMap::<usize, Handle<GltfNode>>::default();
let mut named_nodes = <HashMap<_, _>>::default();
let mut skins = vec![];
let mut skins = <HashMap<_, _>>::default();
let mut named_skins = <HashMap<_, _>>::default();
// First, create the node handles.
@ -808,37 +817,43 @@ async fn load_gltf<'a, 'b, 'c>(
// Now populate the nodes.
for node in gltf.nodes() {
let skin = node.skin().map(|skin| {
let joints: Vec<_> = skin
.joints()
.map(|joint| nodes.get(&joint.index()).unwrap().clone())
.collect();
skins
.entry(skin.index())
.or_insert_with(|| {
let joints: Vec<_> = skin
.joints()
.map(|joint| nodes.get(&joint.index()).unwrap().clone())
.collect();
if joints.len() > MAX_JOINTS {
warn!(
"The glTF skin {} has {} joints, but the maximum supported is {}",
skin.name()
.map(ToString::to_string)
.unwrap_or_else(|| skin.index().to_string()),
joints.len(),
MAX_JOINTS
);
}
if joints.len() > MAX_JOINTS {
warn!(
"The glTF skin {} has {} joints, but the maximum supported is {}",
skin.name()
.map(ToString::to_string)
.unwrap_or_else(|| skin.index().to_string()),
joints.len(),
MAX_JOINTS
);
}
let gltf_skin = GltfSkin::new(
&skin,
joints,
skinned_mesh_inverse_bindposes[skin.index()].clone(),
get_gltf_extras(skin.extras()),
);
let gltf_skin = GltfSkin::new(
&skin,
joints,
skinned_mesh_inverse_bindposes[skin.index()].clone(),
get_gltf_extras(skin.extras()),
);
let handle = load_context.add_labeled_asset(skin_label(&skin), gltf_skin);
let handle = load_context
.add_labeled_asset(skin_label(&skin), gltf_skin)
.expect("skin indices are unique, so the label is unique");
skins.push(handle.clone());
if let Some(name) = skin.name() {
named_skins.insert(name.into(), handle.clone());
}
if let Some(name) = skin.name() {
named_skins.insert(name.into(), handle.clone());
}
handle
handle
})
.clone()
});
let children = node
@ -863,7 +878,9 @@ async fn load_gltf<'a, 'b, 'c>(
#[cfg(feature = "bevy_animation")]
let gltf_node = gltf_node.with_animation_root(animation_roots.contains(&node.index()));
let handle = load_context.add_labeled_asset(gltf_node.asset_label().to_string(), gltf_node);
let handle = load_context
.add_labeled_asset(gltf_node.asset_label().to_string(), gltf_node)
.expect("node indices are unique, so the label is unique");
nodes.insert(node.index(), handle.clone());
if let Some(name) = node.name() {
named_nodes.insert(name.into(), handle);
@ -952,7 +969,9 @@ async fn load_gltf<'a, 'b, 'c>(
});
}
let loaded_scene = scene_load_context.finish(Scene::new(world));
let scene_handle = load_context.add_loaded_labeled_asset(scene_label(&scene), loaded_scene);
let scene_handle = load_context
.add_loaded_labeled_asset(scene_label(&scene), loaded_scene)
.expect("scene indices are unique, so the label is unique");
if let Some(name) = scene.name() {
named_scenes.insert(name.into(), scene_handle.clone());
@ -969,7 +988,7 @@ async fn load_gltf<'a, 'b, 'c>(
named_scenes,
meshes,
named_meshes,
skins,
skins: skins.into_values().collect(),
named_skins,
materials,
named_materials,
@ -1119,74 +1138,78 @@ fn load_material(
is_scale_inverted: bool,
) -> Handle<StandardMaterial> {
let material_label = material_label(material, is_scale_inverted);
load_context.labeled_asset_scope(material_label, |load_context| {
let pbr = material.pbr_metallic_roughness();
load_context
.labeled_asset_scope(material_label, |load_context| {
let pbr = material.pbr_metallic_roughness();
// TODO: handle missing label handle errors here?
let color = pbr.base_color_factor();
let base_color_channel = pbr
.base_color_texture()
.map(|info| get_uv_channel(material, "base color", info.tex_coord()))
.unwrap_or_default();
let base_color_texture = pbr
.base_color_texture()
.map(|info| texture_handle(load_context, &info.texture()));
// TODO: handle missing label handle errors here?
let color = pbr.base_color_factor();
let base_color_channel = pbr
.base_color_texture()
.map(|info| get_uv_channel(material, "base color", info.tex_coord()))
.unwrap_or_default();
let base_color_texture = pbr
.base_color_texture()
.map(|info| texture_handle(load_context, &info.texture()));
let uv_transform = pbr
.base_color_texture()
.and_then(|info| {
info.texture_transform()
.map(convert_texture_transform_to_affine2)
})
.unwrap_or_default();
let uv_transform = pbr
.base_color_texture()
.and_then(|info| {
info.texture_transform()
.map(convert_texture_transform_to_affine2)
})
.unwrap_or_default();
let normal_map_channel = material
.normal_texture()
.map(|info| get_uv_channel(material, "normal map", info.tex_coord()))
.unwrap_or_default();
let normal_map_texture: Option<Handle<Image>> =
material.normal_texture().map(|normal_texture| {
// TODO: handle normal_texture.scale
texture_handle(load_context, &normal_texture.texture())
let normal_map_channel = material
.normal_texture()
.map(|info| get_uv_channel(material, "normal map", info.tex_coord()))
.unwrap_or_default();
let normal_map_texture: Option<Handle<Image>> =
material.normal_texture().map(|normal_texture| {
// TODO: handle normal_texture.scale
texture_handle(load_context, &normal_texture.texture())
});
let metallic_roughness_channel = pbr
.metallic_roughness_texture()
.map(|info| get_uv_channel(material, "metallic/roughness", info.tex_coord()))
.unwrap_or_default();
let metallic_roughness_texture = pbr.metallic_roughness_texture().map(|info| {
warn_on_differing_texture_transforms(
material,
&info,
uv_transform,
"metallic/roughness",
);
texture_handle(load_context, &info.texture())
});
let metallic_roughness_channel = pbr
.metallic_roughness_texture()
.map(|info| get_uv_channel(material, "metallic/roughness", info.tex_coord()))
.unwrap_or_default();
let metallic_roughness_texture = pbr.metallic_roughness_texture().map(|info| {
warn_on_differing_texture_transforms(
material,
&info,
uv_transform,
"metallic/roughness",
);
texture_handle(load_context, &info.texture())
});
let occlusion_channel = material
.occlusion_texture()
.map(|info| get_uv_channel(material, "occlusion", info.tex_coord()))
.unwrap_or_default();
let occlusion_texture = material.occlusion_texture().map(|occlusion_texture| {
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
texture_handle(load_context, &occlusion_texture.texture())
});
let occlusion_channel = material
.occlusion_texture()
.map(|info| get_uv_channel(material, "occlusion", info.tex_coord()))
.unwrap_or_default();
let occlusion_texture = material.occlusion_texture().map(|occlusion_texture| {
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
texture_handle(load_context, &occlusion_texture.texture())
});
let emissive = material.emissive_factor();
let emissive_channel = material
.emissive_texture()
.map(|info| get_uv_channel(material, "emissive", info.tex_coord()))
.unwrap_or_default();
let emissive_texture = material.emissive_texture().map(|info| {
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
warn_on_differing_texture_transforms(material, &info, uv_transform, "emissive");
texture_handle(load_context, &info.texture())
});
let emissive = material.emissive_factor();
let emissive_channel = material
.emissive_texture()
.map(|info| get_uv_channel(material, "emissive", info.tex_coord()))
.unwrap_or_default();
let emissive_texture = material.emissive_texture().map(|info| {
// TODO: handle occlusion_texture.strength() (a scalar multiplier for occlusion strength)
warn_on_differing_texture_transforms(material, &info, uv_transform, "emissive");
texture_handle(load_context, &info.texture())
});
#[cfg(feature = "pbr_transmission_textures")]
let (specular_transmission, specular_transmission_channel, specular_transmission_texture) =
material
#[cfg(feature = "pbr_transmission_textures")]
let (
specular_transmission,
specular_transmission_channel,
specular_transmission_texture,
) = material
.transmission()
.map_or((0.0, UvChannel::Uv0, None), |transmission| {
let specular_transmission_channel = transmission
@ -1208,152 +1231,156 @@ fn load_material(
)
});
#[cfg(not(feature = "pbr_transmission_textures"))]
let specular_transmission = material
.transmission()
.map_or(0.0, |transmission| transmission.transmission_factor());
#[cfg(not(feature = "pbr_transmission_textures"))]
let specular_transmission = material
.transmission()
.map_or(0.0, |transmission| transmission.transmission_factor());
#[cfg(feature = "pbr_transmission_textures")]
let (
thickness,
thickness_channel,
thickness_texture,
attenuation_distance,
attenuation_color,
) = material.volume().map_or(
(0.0, UvChannel::Uv0, None, f32::INFINITY, [1.0, 1.0, 1.0]),
|volume| {
let thickness_channel = volume
.thickness_texture()
.map(|info| get_uv_channel(material, "thickness", info.tex_coord()))
.unwrap_or_default();
let thickness_texture: Option<Handle<Image>> =
volume.thickness_texture().map(|thickness_texture| {
texture_handle(load_context, &thickness_texture.texture())
});
#[cfg(feature = "pbr_transmission_textures")]
let (
thickness,
thickness_channel,
thickness_texture,
attenuation_distance,
attenuation_color,
) = material.volume().map_or(
(0.0, UvChannel::Uv0, None, f32::INFINITY, [1.0, 1.0, 1.0]),
|volume| {
let thickness_channel = volume
.thickness_texture()
.map(|info| get_uv_channel(material, "thickness", info.tex_coord()))
.unwrap_or_default();
let thickness_texture: Option<Handle<Image>> =
volume.thickness_texture().map(|thickness_texture| {
texture_handle(load_context, &thickness_texture.texture())
});
(
volume.thickness_factor(),
thickness_channel,
thickness_texture,
volume.attenuation_distance(),
volume.attenuation_color(),
)
},
);
#[cfg(not(feature = "pbr_transmission_textures"))]
let (thickness, attenuation_distance, attenuation_color) =
material
.volume()
.map_or((0.0, f32::INFINITY, [1.0, 1.0, 1.0]), |volume| {
(
volume.thickness_factor(),
thickness_channel,
thickness_texture,
volume.attenuation_distance(),
volume.attenuation_color(),
)
});
},
);
let ior = material.ior().unwrap_or(1.5);
#[cfg(not(feature = "pbr_transmission_textures"))]
let (thickness, attenuation_distance, attenuation_color) =
material
.volume()
.map_or((0.0, f32::INFINITY, [1.0, 1.0, 1.0]), |volume| {
(
volume.thickness_factor(),
volume.attenuation_distance(),
volume.attenuation_color(),
)
});
// Parse the `KHR_materials_clearcoat` extension data if necessary.
let clearcoat =
ClearcoatExtension::parse(load_context, document, material).unwrap_or_default();
let ior = material.ior().unwrap_or(1.5);
// Parse the `KHR_materials_anisotropy` extension data if necessary.
let anisotropy =
AnisotropyExtension::parse(load_context, document, material).unwrap_or_default();
// Parse the `KHR_materials_clearcoat` extension data if necessary.
let clearcoat =
ClearcoatExtension::parse(load_context, document, material).unwrap_or_default();
// Parse the `KHR_materials_specular` extension data if necessary.
let specular =
SpecularExtension::parse(load_context, document, material).unwrap_or_default();
// Parse the `KHR_materials_anisotropy` extension data if necessary.
let anisotropy =
AnisotropyExtension::parse(load_context, document, material).unwrap_or_default();
// We need to operate in the Linear color space and be willing to exceed 1.0 in our channels
let base_emissive = LinearRgba::rgb(emissive[0], emissive[1], emissive[2]);
let emissive = base_emissive * material.emissive_strength().unwrap_or(1.0);
// Parse the `KHR_materials_specular` extension data if necessary.
let specular =
SpecularExtension::parse(load_context, document, material).unwrap_or_default();
StandardMaterial {
base_color: Color::linear_rgba(color[0], color[1], color[2], color[3]),
base_color_channel,
base_color_texture,
perceptual_roughness: pbr.roughness_factor(),
metallic: pbr.metallic_factor(),
metallic_roughness_channel,
metallic_roughness_texture,
normal_map_channel,
normal_map_texture,
double_sided: material.double_sided(),
cull_mode: if material.double_sided() {
None
} else if is_scale_inverted {
Some(Face::Front)
} else {
Some(Face::Back)
},
occlusion_channel,
occlusion_texture,
emissive,
emissive_channel,
emissive_texture,
specular_transmission,
#[cfg(feature = "pbr_transmission_textures")]
specular_transmission_channel,
#[cfg(feature = "pbr_transmission_textures")]
specular_transmission_texture,
thickness,
#[cfg(feature = "pbr_transmission_textures")]
thickness_channel,
#[cfg(feature = "pbr_transmission_textures")]
thickness_texture,
ior,
attenuation_distance,
attenuation_color: Color::linear_rgb(
attenuation_color[0],
attenuation_color[1],
attenuation_color[2],
),
unlit: material.unlit(),
alpha_mode: alpha_mode(material),
uv_transform,
clearcoat: clearcoat.clearcoat_factor.unwrap_or_default() as f32,
clearcoat_perceptual_roughness: clearcoat.clearcoat_roughness_factor.unwrap_or_default()
as f32,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_channel: clearcoat.clearcoat_channel,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_texture: clearcoat.clearcoat_texture,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_roughness_channel: clearcoat.clearcoat_roughness_channel,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_roughness_texture: clearcoat.clearcoat_roughness_texture,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_normal_channel: clearcoat.clearcoat_normal_channel,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_normal_texture: clearcoat.clearcoat_normal_texture,
anisotropy_strength: anisotropy.anisotropy_strength.unwrap_or_default() as f32,
anisotropy_rotation: anisotropy.anisotropy_rotation.unwrap_or_default() as f32,
#[cfg(feature = "pbr_anisotropy_texture")]
anisotropy_channel: anisotropy.anisotropy_channel,
#[cfg(feature = "pbr_anisotropy_texture")]
anisotropy_texture: anisotropy.anisotropy_texture,
// From the `KHR_materials_specular` spec:
// <https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_materials_specular#materials-with-reflectance-parameter>
reflectance: specular.specular_factor.unwrap_or(1.0) as f32 * 0.5,
#[cfg(feature = "pbr_specular_textures")]
specular_channel: specular.specular_channel,
#[cfg(feature = "pbr_specular_textures")]
specular_texture: specular.specular_texture,
specular_tint: match specular.specular_color_factor {
Some(color) => Color::linear_rgb(color[0] as f32, color[1] as f32, color[2] as f32),
None => Color::WHITE,
},
#[cfg(feature = "pbr_specular_textures")]
specular_tint_channel: specular.specular_color_channel,
#[cfg(feature = "pbr_specular_textures")]
specular_tint_texture: specular.specular_color_texture,
..Default::default()
}
})
// We need to operate in the Linear color space and be willing to exceed 1.0 in our channels
let base_emissive = LinearRgba::rgb(emissive[0], emissive[1], emissive[2]);
let emissive = base_emissive * material.emissive_strength().unwrap_or(1.0);
StandardMaterial {
base_color: Color::linear_rgba(color[0], color[1], color[2], color[3]),
base_color_channel,
base_color_texture,
perceptual_roughness: pbr.roughness_factor(),
metallic: pbr.metallic_factor(),
metallic_roughness_channel,
metallic_roughness_texture,
normal_map_channel,
normal_map_texture,
double_sided: material.double_sided(),
cull_mode: if material.double_sided() {
None
} else if is_scale_inverted {
Some(Face::Front)
} else {
Some(Face::Back)
},
occlusion_channel,
occlusion_texture,
emissive,
emissive_channel,
emissive_texture,
specular_transmission,
#[cfg(feature = "pbr_transmission_textures")]
specular_transmission_channel,
#[cfg(feature = "pbr_transmission_textures")]
specular_transmission_texture,
thickness,
#[cfg(feature = "pbr_transmission_textures")]
thickness_channel,
#[cfg(feature = "pbr_transmission_textures")]
thickness_texture,
ior,
attenuation_distance,
attenuation_color: Color::linear_rgb(
attenuation_color[0],
attenuation_color[1],
attenuation_color[2],
),
unlit: material.unlit(),
alpha_mode: alpha_mode(material),
uv_transform,
clearcoat: clearcoat.clearcoat_factor.unwrap_or_default() as f32,
clearcoat_perceptual_roughness: clearcoat
.clearcoat_roughness_factor
.unwrap_or_default() as f32,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_channel: clearcoat.clearcoat_channel,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_texture: clearcoat.clearcoat_texture,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_roughness_channel: clearcoat.clearcoat_roughness_channel,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_roughness_texture: clearcoat.clearcoat_roughness_texture,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_normal_channel: clearcoat.clearcoat_normal_channel,
#[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_normal_texture: clearcoat.clearcoat_normal_texture,
anisotropy_strength: anisotropy.anisotropy_strength.unwrap_or_default() as f32,
anisotropy_rotation: anisotropy.anisotropy_rotation.unwrap_or_default() as f32,
#[cfg(feature = "pbr_anisotropy_texture")]
anisotropy_channel: anisotropy.anisotropy_channel,
#[cfg(feature = "pbr_anisotropy_texture")]
anisotropy_texture: anisotropy.anisotropy_texture,
// From the `KHR_materials_specular` spec:
// <https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_materials_specular#materials-with-reflectance-parameter>
reflectance: specular.specular_factor.unwrap_or(1.0) as f32 * 0.5,
#[cfg(feature = "pbr_specular_textures")]
specular_channel: specular.specular_channel,
#[cfg(feature = "pbr_specular_textures")]
specular_texture: specular.specular_texture,
specular_tint: match specular.specular_color_factor {
Some(color) => {
Color::linear_rgb(color[0] as f32, color[1] as f32, color[2] as f32)
}
None => Color::WHITE,
},
#[cfg(feature = "pbr_specular_textures")]
specular_tint_channel: specular.specular_color_channel,
#[cfg(feature = "pbr_specular_textures")]
specular_tint_texture: specular.specular_color_texture,
..Default::default()
}
})
.expect("material indices are unique, so the label is unique")
}
fn get_uv_channel(material: &Material, texture_kind: &str, tex_coord: u32) -> UvChannel {

View File

@ -1,7 +1,7 @@
[package]
name = "bevy_image"
version = "0.16.0-dev"
edition = "2021"
edition = "2024"
description = "Provides image types for Bevy Engine"
homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy"

Some files were not shown because too many files have changed in this diff Show More