Merge branch 'main' of https://github.com/bevyengine/bevy into reflect-auto-registration

This commit is contained in:
eugineerd 2025-02-25 17:10:01 +00:00
commit c6be4fa012
277 changed files with 4097 additions and 1691 deletions

View File

@ -244,7 +244,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Check for typos - name: Check for typos
uses: crate-ci/typos@v1.29.7 uses: crate-ci/typos@v1.29.9
- name: Typos info - name: Typos info
if: failure() if: failure()
run: | run: |
@ -335,6 +335,7 @@ jobs:
timeout-minutes: 30 timeout-minutes: 30
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- name: check for missing metadata - name: check for missing metadata
id: missing-metadata id: missing-metadata
run: cargo run -p build-templated-pages -- check-missing examples run: cargo run -p build-templated-pages -- check-missing examples
@ -369,6 +370,7 @@ jobs:
needs: check-missing-examples-in-docs needs: check-missing-examples-in-docs
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- name: check for missing features - name: check for missing features
id: missing-features id: missing-features
run: cargo run -p build-templated-pages -- check-missing features run: cargo run -p build-templated-pages -- check-missing features
@ -412,6 +414,7 @@ jobs:
~/.cargo/git/db/ ~/.cargo/git/db/
target/ target/
key: ${{ runner.os }}-cargo-msrv-${{ hashFiles('**/Cargo.toml') }} key: ${{ runner.os }}-cargo-msrv-${{ hashFiles('**/Cargo.toml') }}
- uses: dtolnay/rust-toolchain@stable
- name: get MSRV - name: get MSRV
id: msrv id: msrv
run: | run: |

View File

@ -219,13 +219,6 @@ jobs:
target/ target/
key: ${{ runner.os }}-wasm-run-examples-${{ hashFiles('**/Cargo.toml') }} key: ${{ runner.os }}-wasm-run-examples-${{ hashFiles('**/Cargo.toml') }}
- name: install xvfb, llvmpipe and lavapipe
run: |
sudo apt-get update -y -qq
sudo add-apt-repository ppa:kisak/turtle -y
sudo apt-get update
sudo apt install -y xvfb libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
- name: Install wasm-bindgen - name: Install wasm-bindgen
run: cargo install --force wasm-bindgen-cli run: cargo install --force wasm-bindgen-cli

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy" name = "bevy"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
categories = ["game-engines", "graphics", "gui", "rendering"] categories = ["game-engines", "graphics", "gui", "rendering"]
description = "A refreshingly simple data-driven game engine and app framework" description = "A refreshingly simple data-driven game engine and app framework"
exclude = ["assets/", "tools/", ".github/", "crates/", "examples/wasm/assets/"] exclude = ["assets/", "tools/", ".github/", "crates/", "examples/wasm/assets/"]
@ -10,7 +10,7 @@ keywords = ["game", "engine", "gamedev", "graphics", "bevy"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
documentation = "https://docs.rs/bevy" documentation = "https://docs.rs/bevy"
rust-version = "1.83.0" rust-version = "1.85.0"
[workspace] [workspace]
resolver = "2" resolver = "2"
@ -46,6 +46,7 @@ undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn" unwrap_or_default = "warn"
needless_lifetimes = "allow" needless_lifetimes = "allow"
too_many_arguments = "allow" too_many_arguments = "allow"
nonstandard_macro_braces = "warn"
ptr_as_ptr = "warn" ptr_as_ptr = "warn"
ptr_cast_constness = "warn" ptr_cast_constness = "warn"
@ -91,6 +92,7 @@ undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn" unwrap_or_default = "warn"
needless_lifetimes = "allow" needless_lifetimes = "allow"
too_many_arguments = "allow" too_many_arguments = "allow"
nonstandard_macro_braces = "warn"
ptr_as_ptr = "warn" ptr_as_ptr = "warn"
ptr_cast_constness = "warn" ptr_cast_constness = "warn"
@ -474,6 +476,9 @@ track_location = ["bevy_internal/track_location"]
# Enable function reflection # Enable function reflection
reflect_functions = ["bevy_internal/reflect_functions"] reflect_functions = ["bevy_internal/reflect_functions"]
# Enable documentation reflection
reflect_documentation = ["bevy_internal/reflect_documentation"]
# Enable automatic reflect registration # Enable automatic reflect registration
reflect_auto_register = ["bevy_internal/reflect_auto_register"] reflect_auto_register = ["bevy_internal/reflect_auto_register"]
@ -782,6 +787,17 @@ description = "Used to test alpha modes with mesh2d"
category = "2D Rendering" category = "2D Rendering"
wasm = true wasm = true
[[example]]
name = "mesh2d_repeated_texture"
path = "examples/2d/mesh2d_repeated_texture.rs"
doc-scrape-examples = true
[package.metadata.example.mesh2d_repeated_texture]
name = "Mesh2d Repeated Texture"
description = "Showcase of using `uv_transform` on the `ColorMaterial` of a `Mesh2d`"
category = "2D Rendering"
wasm = true
[[example]] [[example]]
name = "pixel_grid_snap" name = "pixel_grid_snap"
path = "examples/2d/pixel_grid_snap.rs" path = "examples/2d/pixel_grid_snap.rs"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "benches" name = "benches"
edition = "2021" edition = "2024"
description = "Benchmarks that test Bevy's performance" description = "Benchmarks that test Bevy's performance"
publish = false publish = false
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
@ -50,6 +50,7 @@ undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn" unwrap_or_default = "warn"
needless_lifetimes = "allow" needless_lifetimes = "allow"
too_many_arguments = "allow" too_many_arguments = "allow"
nonstandard_macro_braces = "warn"
ptr_as_ptr = "warn" ptr_as_ptr = "warn"
ptr_cast_constness = "warn" ptr_cast_constness = "warn"

View File

@ -95,7 +95,7 @@ fn all_added_detection_generic<T: Component + Default>(group: &mut BenchGroup, e
let query = generic_filter_query::<Added<T>>(&mut world); let query = generic_filter_query::<Added<T>>(&mut world);
(world, query) (world, query)
}, },
|(ref mut world, ref mut query)| { |(world, query)| {
let mut count = 0; let mut count = 0;
for entity in query.iter(world) { for entity in query.iter(world) {
black_box(entity); black_box(entity);
@ -143,7 +143,7 @@ fn all_changed_detection_generic<T: Component<Mutability = Mutable> + Default +
let query = generic_filter_query::<Changed<T>>(&mut world); let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query) (world, query)
}, },
|(ref mut world, ref mut query)| { |(world, query)| {
let mut count = 0; let mut count = 0;
for entity in query.iter(world) { for entity in query.iter(world) {
black_box(entity); black_box(entity);
@ -196,7 +196,7 @@ fn few_changed_detection_generic<T: Component<Mutability = Mutable> + Default +
let query = generic_filter_query::<Changed<T>>(&mut world); let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query) (world, query)
}, },
|(ref mut world, ref mut query)| { |(world, query)| {
for entity in query.iter(world) { for entity in query.iter(world) {
black_box(entity); black_box(entity);
} }
@ -237,7 +237,7 @@ fn none_changed_detection_generic<T: Component<Mutability = Mutable> + Default>(
let query = generic_filter_query::<Changed<T>>(&mut world); let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query) (world, query)
}, },
|(ref mut world, ref mut query)| { |(world, query)| {
let mut count = 0; let mut count = 0;
for entity in query.iter(world) { for entity in query.iter(world) {
black_box(entity); black_box(entity);
@ -343,7 +343,7 @@ fn multiple_archetype_none_changed_detection_generic<
let query = generic_filter_query::<Changed<T>>(&mut world); let query = generic_filter_query::<Changed<T>>(&mut world);
(world, query) (world, query)
}, },
|(ref mut world, ref mut query)| { |(world, query)| {
let mut count = 0; let mut count = 0;
for entity in query.iter(world) { for entity in query.iter(world) {
black_box(entity); black_box(entity);

View File

@ -12,7 +12,7 @@ impl Benchmark {
let mut world = World::default(); let mut world = World::default();
let entities = world let entities = world
.spawn_batch(core::iter::repeat(A(0.)).take(10000)) .spawn_batch(core::iter::repeat_n(A(0.), 10_000))
.collect(); .collect();
Self(world, entities) Self(world, entities)
} }

View File

@ -19,15 +19,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X), Position(Vec3::X),
Rotation(Vec3::X), Rotation(Vec3::X),
Velocity(Vec3::X), Velocity(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query::<(&Velocity, &mut Position)>(); let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query) Self(world, query)

View File

@ -19,15 +19,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X), Position(Vec3::X),
Rotation(Vec3::X), Rotation(Vec3::X),
Velocity(Vec3::X), Velocity(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query::<(&Velocity, &mut Position)>(); let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query) Self(world, query)

View File

@ -21,15 +21,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X), Position(Vec3::X),
Rotation(Vec3::X), Rotation(Vec3::X),
Velocity(Vec3::X), Velocity(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query::<(&Velocity, &mut Position)>(); let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query) Self(world, query)

View File

@ -33,8 +33,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X), Rotation(Vec3::X),
Position::<0>(Vec3::X), Position::<0>(Vec3::X),
@ -47,9 +47,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X), Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X), Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X), Velocity::<4>(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query(); let query = world.query();
Self(world, query) Self(world, query)

View File

@ -35,8 +35,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X), Rotation(Vec3::X),
Position::<0>(Vec3::X), Position::<0>(Vec3::X),
@ -49,9 +49,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X), Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X), Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X), Velocity::<4>(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query(); let query = world.query();
Self(world, query) Self(world, query)

View File

@ -21,15 +21,15 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X), Position(Vec3::X),
Rotation(Vec3::X), Rotation(Vec3::X),
Velocity(Vec3::X), Velocity(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query::<(&Velocity, &mut Position)>(); let query = world.query::<(&Velocity, &mut Position)>();
Self(world, query) Self(world, query)

View File

@ -19,15 +19,15 @@ impl Benchmark {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X), Position(Vec3::X),
Rotation(Vec3::X), Rotation(Vec3::X),
Velocity(Vec3::X), Velocity(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
fn query_system(mut query: Query<(&Velocity, &mut Position)>) { fn query_system(mut query: Query<(&Velocity, &mut Position)>) {
for (velocity, mut position) in &mut query { for (velocity, mut position) in &mut query {

View File

@ -33,8 +33,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X), Rotation(Vec3::X),
Position::<0>(Vec3::X), Position::<0>(Vec3::X),
@ -47,9 +47,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X), Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X), Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X), Velocity::<4>(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query(); let query = world.query();
Self(world, query) Self(world, query)

View File

@ -35,8 +35,8 @@ impl<'w> Benchmark<'w> {
pub fn new() -> Self { pub fn new() -> Self {
let mut world = World::new(); let mut world = World::new();
world.spawn_batch( world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Rotation(Vec3::X), Rotation(Vec3::X),
Position::<0>(Vec3::X), Position::<0>(Vec3::X),
@ -49,9 +49,9 @@ impl<'w> Benchmark<'w> {
Velocity::<3>(Vec3::X), Velocity::<3>(Vec3::X),
Position::<4>(Vec3::X), Position::<4>(Vec3::X),
Velocity::<4>(Vec3::X), Velocity::<4>(Vec3::X),
)) ),
.take(10_000), 10_000,
); ));
let query = world.query(); let query = world.query();
Self(world, query) Self(world, query)

View File

@ -30,15 +30,15 @@ impl<'w> Benchmark<'w> {
let mut world = World::new(); let mut world = World::new();
let iter = world.spawn_batch( let iter = world.spawn_batch(core::iter::repeat_n(
core::iter::repeat(( (
Transform(Mat4::from_scale(Vec3::ONE)), Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X), Position(Vec3::X),
Rotation(Vec3::X), Rotation(Vec3::X),
Velocity(Vec3::X), Velocity(Vec3::X),
)) ),
.take(100_000), 100_000,
); ));
let entities = iter.into_iter().collect::<Vec<Entity>>(); let entities = iter.into_iter().collect::<Vec<Entity>>();
for i in 0..fragment { for i in 0..fragment {
let mut e = world.entity_mut(entities[i as usize]); let mut e = world.entity_mut(entities[i as usize]);

View File

@ -1,6 +1,10 @@
use core::hint::black_box; use core::hint::black_box;
use bevy_ecs::{entity::Entity, event::Event, observer::Trigger, world::World}; use bevy_ecs::{
event::Event,
observer::{Trigger, TriggerTargets},
world::World,
};
use criterion::Criterion; use criterion::Criterion;
use rand::{prelude::SliceRandom, SeedableRng}; use rand::{prelude::SliceRandom, SeedableRng};
@ -46,6 +50,6 @@ fn empty_listener_base(trigger: Trigger<EventBase>) {
black_box(trigger); black_box(trigger);
} }
fn send_base_event(world: &mut World, entities: &Vec<Entity>) { fn send_base_event(world: &mut World, entities: impl TriggerTargets) {
world.trigger_targets(EventBase, entities); world.trigger_targets(EventBase, entities);
} }

View File

@ -11,16 +11,16 @@ fn make_entity(rng: &mut impl Rng, size: usize) -> Entity {
// * For ids, half are in [0, size), half are unboundedly larger. // * For ids, half are in [0, size), half are unboundedly larger.
// * For generations, half are in [1, 3), half are unboundedly larger. // * For generations, half are in [1, 3), half are unboundedly larger.
let x: f64 = rng.gen(); let x: f64 = rng.r#gen();
let id = -(1.0 - x).log2() * (size as f64); let id = -(1.0 - x).log2() * (size as f64);
let x: f64 = rng.gen(); let x: f64 = rng.r#gen();
let gen = 1.0 + -(1.0 - x).log2() * 2.0; let generation = 1.0 + -(1.0 - x).log2() * 2.0;
// this is not reliable, but we're internal so a hack is ok // this is not reliable, but we're internal so a hack is ok
let bits = ((gen as u64) << 32) | (id as u64); let bits = ((generation as u64) << 32) | (id as u64);
let e = Entity::from_bits(bits); let e = Entity::from_bits(bits);
assert_eq!(e.index(), id as u32); assert_eq!(e.index(), id as u32);
assert_eq!(e.generation(), gen as u32); assert_eq!(e.generation(), generation as u32);
e e
} }

View File

@ -75,8 +75,8 @@ fn concrete_list_apply(criterion: &mut Criterion) {
let mut group = create_group(criterion, bench!("concrete_list_apply")); let mut group = create_group(criterion, bench!("concrete_list_apply"));
let empty_base = |_: usize| Vec::<u64>::new; let empty_base = |_: usize| Vec::<u64>::new;
let full_base = |size: usize| move || iter::repeat(0).take(size).collect::<Vec<u64>>(); let full_base = |size: usize| move || iter::repeat_n(0, size).collect::<Vec<u64>>();
let patch = |size: usize| iter::repeat(1).take(size).collect::<Vec<u64>>(); let patch = |size: usize| iter::repeat_n(1, size).collect::<Vec<u64>>();
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch); list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);
@ -103,7 +103,7 @@ fn concrete_list_clone_dynamic(criterion: &mut Criterion) {
BenchmarkId::from_parameter(size), BenchmarkId::from_parameter(size),
&size, &size,
|bencher, &size| { |bencher, &size| {
let v = iter::repeat(0).take(size).collect::<Vec<_>>(); let v = iter::repeat_n(0, size).collect::<Vec<_>>();
bencher.iter(|| black_box(&v).clone_dynamic()); bencher.iter(|| black_box(&v).clone_dynamic());
}, },
@ -123,7 +123,7 @@ fn dynamic_list_push(criterion: &mut Criterion) {
BenchmarkId::from_parameter(size), BenchmarkId::from_parameter(size),
&size, &size,
|bencher, &size| { |bencher, &size| {
let src = iter::repeat(()).take(size).collect::<Vec<_>>(); let src = iter::repeat_n((), size).collect::<Vec<_>>();
let dst = DynamicList::default(); let dst = DynamicList::default();
bencher.iter_batched( bencher.iter_batched(
@ -146,8 +146,8 @@ fn dynamic_list_apply(criterion: &mut Criterion) {
let mut group = create_group(criterion, bench!("dynamic_list_apply")); let mut group = create_group(criterion, bench!("dynamic_list_apply"));
let empty_base = |_: usize| || Vec::<u64>::new().clone_dynamic(); let empty_base = |_: usize| || Vec::<u64>::new().clone_dynamic();
let full_base = |size: usize| move || iter::repeat(0).take(size).collect::<Vec<u64>>(); let full_base = |size: usize| move || iter::repeat_n(0, size).collect::<Vec<u64>>();
let patch = |size: usize| iter::repeat(1).take(size).collect::<Vec<u64>>(); let patch = |size: usize| iter::repeat_n(1, size).collect::<Vec<u64>>();
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch); list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);

View File

@ -145,7 +145,7 @@ fn u64_to_n_byte_key(k: u64, n: usize) -> String {
write!(&mut key, "{}", k).unwrap(); write!(&mut key, "{}", k).unwrap();
// Pad key to n bytes. // Pad key to n bytes.
key.extend(iter::repeat('\0').take(n - key.len())); key.extend(iter::repeat_n('\0', n - key.len()));
key key
} }

View File

@ -43,3 +43,6 @@ disallowed-methods = [
{ path = "f32::atanh", reason = "use bevy_math::ops::atanh instead for libm determinism" }, { path = "f32::atanh", reason = "use bevy_math::ops::atanh instead for libm determinism" },
{ path = "criterion::black_box", reason = "use core::hint::black_box instead" }, { path = "criterion::black_box", reason = "use core::hint::black_box instead" },
] ]
# Require `bevy_ecs::children!` to use `[]` braces, instead of `()` or `{}`.
standard-macro-braces = [{ name = "children", brace = "[" }]

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_a11y" name = "bevy_a11y"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides accessibility support for Bevy Engine" description = "Provides accessibility support for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
@ -50,15 +50,6 @@ critical-section = [
"bevy_input_focus/critical-section", "bevy_input_focus/critical-section",
] ]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_app/portable-atomic",
"bevy_ecs/portable-atomic",
"bevy_reflect?/portable-atomic",
"bevy_input_focus/portable-atomic",
]
## Uses the `libm` maths library instead of the one provided in `std` and `core`. ## Uses the `libm` maths library instead of the one provided in `std` and `core`.
libm = ["bevy_input_focus/libm"] libm = ["bevy_input_focus/libm"]

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_animation" name = "bevy_animation"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides animation functionality for Bevy Engine" description = "Provides animation functionality for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -884,10 +884,10 @@ impl ThreadedAnimationGraph {
self.sorted_edge_ranges.clear(); self.sorted_edge_ranges.clear();
self.sorted_edge_ranges self.sorted_edge_ranges
.extend(iter::repeat(0..0).take(node_count)); .extend(iter::repeat_n(0..0, node_count));
self.computed_masks.clear(); self.computed_masks.clear();
self.computed_masks.extend(iter::repeat(0).take(node_count)); self.computed_masks.extend(iter::repeat_n(0, node_count));
} }
/// Recursively constructs the [`ThreadedAnimationGraph`] for the subtree /// Recursively constructs the [`ThreadedAnimationGraph`] for the subtree

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_app" name = "bevy_app"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides core App functionality for Bevy Engine" description = "Provides core App functionality for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
@ -65,15 +65,6 @@ critical-section = [
"bevy_reflect?/critical-section", "bevy_reflect?/critical-section",
] ]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_tasks?/portable-atomic",
"bevy_ecs/portable-atomic",
"bevy_platform_support/portable-atomic",
"bevy_reflect?/portable-atomic",
]
[dependencies] [dependencies]
# bevy # bevy
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" } bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }

View File

@ -1440,7 +1440,7 @@ impl Termination for AppExit {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use core::{iter, marker::PhantomData}; use core::marker::PhantomData;
use std::sync::Mutex; use std::sync::Mutex;
use bevy_ecs::{ use bevy_ecs::{
@ -1664,7 +1664,7 @@ mod tests {
struct Foo; struct Foo;
let mut app = App::new(); let mut app = App::new();
app.world_mut().spawn_batch(iter::repeat(Foo).take(5)); app.world_mut().spawn_batch(core::iter::repeat_n(Foo, 5));
fn despawn_one_foo(mut commands: Commands, foos: Query<Entity, With<Foo>>) { fn despawn_one_foo(mut commands: Commands, foos: Query<Entity, With<Foo>>) {
if let Some(e) = foos.iter().next() { if let Some(e) = foos.iter().next() {
@ -1718,9 +1718,9 @@ mod tests {
fn raise_exits(mut exits: EventWriter<AppExit>) { fn raise_exits(mut exits: EventWriter<AppExit>) {
// Exit codes chosen by a fair dice roll. // Exit codes chosen by a fair dice roll.
// Unlikely to overlap with default values. // Unlikely to overlap with default values.
exits.send(AppExit::Success); exits.write(AppExit::Success);
exits.send(AppExit::from_code(4)); exits.write(AppExit::from_code(4));
exits.send(AppExit::from_code(73)); exits.write(AppExit::from_code(73));
} }
let exit = App::new().add_systems(Update, raise_exits).run(); let exit = App::new().add_systems(Update, raise_exits).run();

View File

@ -1,11 +1,3 @@
#![cfg_attr(
feature = "portable-atomic",
expect(
clippy::redundant_closure,
reason = "bevy_platform_support::sync::Arc has subtly different implicit behavior"
)
)]
use crate::{App, Plugin}; use crate::{App, Plugin};
use alloc::string::ToString; use alloc::string::ToString;

View File

@ -50,7 +50,7 @@ impl TerminalCtrlCHandlerPlugin {
/// Sends a [`AppExit`] event when the user presses `Ctrl+C` on the terminal. /// Sends a [`AppExit`] event when the user presses `Ctrl+C` on the terminal.
pub fn exit_on_flag(mut events: EventWriter<AppExit>) { pub fn exit_on_flag(mut events: EventWriter<AppExit>) {
if SHOULD_EXIT.load(Ordering::Relaxed) { if SHOULD_EXIT.load(Ordering::Relaxed) {
events.send(AppExit::from_code(130)); events.write(AppExit::from_code(130));
} }
} }
} }

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_asset" name = "bevy_asset"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides asset functionality for Bevy Engine" description = "Provides asset functionality for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_asset_macros" name = "bevy_asset_macros"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Derive implementations for bevy_asset" description = "Derive implementations for bevy_asset"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -330,7 +330,7 @@ mod tests {
_query: Query<&mut MyComponent, AssetChanged<MyComponent>>, _query: Query<&mut MyComponent, AssetChanged<MyComponent>>,
mut exit: EventWriter<AppExit>, mut exit: EventWriter<AppExit>,
) { ) {
exit.send(AppExit::Error(NonZero::<u8>::MIN)); exit.write(AppExit::Error(NonZero::<u8>::MIN));
} }
run_app(compatible_filter); run_app(compatible_filter);
} }

View File

@ -579,7 +579,7 @@ impl<A: Asset> Assets<A> {
}; };
} }
} }
events.send_batch(assets.queued_events.drain(..)); events.write_batch(assets.queued_events.drain(..));
} }
/// A run condition for [`asset_events`]. The system will not run if there are no events to /// A run condition for [`asset_events`]. The system will not run if there are no events to

View File

@ -639,7 +639,7 @@ mod tests {
}, },
loader::{AssetLoader, LoadContext}, loader::{AssetLoader, LoadContext},
Asset, AssetApp, AssetEvent, AssetId, AssetLoadError, AssetLoadFailedEvent, AssetPath, Asset, AssetApp, AssetEvent, AssetId, AssetLoadError, AssetLoadFailedEvent, AssetPath,
AssetPlugin, AssetServer, Assets, AssetPlugin, AssetServer, Assets, DuplicateLabelAssetError, LoadState,
}; };
use alloc::{ use alloc::{
boxed::Box, boxed::Box,
@ -695,6 +695,8 @@ mod tests {
CannotLoadDependency { dependency: AssetPath<'static> }, CannotLoadDependency { dependency: AssetPath<'static> },
#[error("A RON error occurred during loading")] #[error("A RON error occurred during loading")]
RonSpannedError(#[from] ron::error::SpannedError), RonSpannedError(#[from] ron::error::SpannedError),
#[error(transparent)]
DuplicateLabelAssetError(#[from] DuplicateLabelAssetError),
#[error("An IO error occurred during loading")] #[error("An IO error occurred during loading")]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
} }
@ -740,7 +742,7 @@ mod tests {
.sub_texts .sub_texts
.drain(..) .drain(..)
.map(|text| load_context.add_labeled_asset(text.clone(), SubText { text })) .map(|text| load_context.add_labeled_asset(text.clone(), SubText { text }))
.collect(), .collect::<Result<Vec<_>, _>>()?,
}) })
} }
@ -1778,6 +1780,49 @@ mod tests {
app.world_mut().run_schedule(Update); app.world_mut().run_schedule(Update);
} }
#[test]
fn fails_to_load_for_duplicate_subasset_labels() {
let mut app = App::new();
let dir = Dir::default();
dir.insert_asset_text(
Path::new("a.ron"),
r#"(
text: "b",
dependencies: [],
embedded_dependencies: [],
sub_texts: ["A", "A"],
)"#,
);
app.register_asset_source(
AssetSourceId::Default,
AssetSource::build()
.with_reader(move || Box::new(MemoryAssetReader { root: dir.clone() })),
)
.add_plugins((
TaskPoolPlugin::default(),
LogPlugin::default(),
AssetPlugin::default(),
));
app.init_asset::<CoolText>()
.init_asset::<SubText>()
.register_asset_loader(CoolTextLoader);
let asset_server = app.world().resource::<AssetServer>().clone();
let handle = asset_server.load::<CoolText>("a.ron");
run_app_until(&mut app, |_world| match asset_server.load_state(&handle) {
LoadState::Loading => None,
LoadState::Failed(err) => {
assert!(matches!(*err, AssetLoadError::AssetLoaderError(_)));
Some(())
}
state => panic!("Unexpected asset state: {state:?}"),
});
}
// validate the Asset derive macro for various asset types // validate the Asset derive macro for various asset types
#[derive(Asset, TypePath)] #[derive(Asset, TypePath)]
pub struct TestAsset; pub struct TestAsset;

View File

@ -13,7 +13,6 @@ use alloc::{
}; };
use atomicow::CowArc; use atomicow::CowArc;
use bevy_ecs::world::World; use bevy_ecs::world::World;
use bevy_log::warn;
use bevy_platform_support::collections::{HashMap, HashSet}; use bevy_platform_support::collections::{HashMap, HashSet};
use bevy_tasks::{BoxedFuture, ConditionalSendFuture}; use bevy_tasks::{BoxedFuture, ConditionalSendFuture};
use core::any::{Any, TypeId}; use core::any::{Any, TypeId};
@ -458,7 +457,7 @@ impl<'a> LoadContext<'a> {
&mut self, &mut self,
label: String, label: String,
load: impl FnOnce(&mut LoadContext) -> A, load: impl FnOnce(&mut LoadContext) -> A,
) -> Handle<A> { ) -> Result<Handle<A>, DuplicateLabelAssetError> {
let mut context = self.begin_labeled_asset(); let mut context = self.begin_labeled_asset();
let asset = load(&mut context); let asset = load(&mut context);
let complete_asset = context.finish(asset); let complete_asset = context.finish(asset);
@ -475,7 +474,11 @@ impl<'a> LoadContext<'a> {
/// new [`LoadContext`] to track the dependencies for the labeled asset. /// new [`LoadContext`] to track the dependencies for the labeled asset.
/// ///
/// See [`AssetPath`] for more on labeled assets. /// See [`AssetPath`] for more on labeled assets.
pub fn add_labeled_asset<A: Asset>(&mut self, label: String, asset: A) -> Handle<A> { pub fn add_labeled_asset<A: Asset>(
&mut self,
label: String,
asset: A,
) -> Result<Handle<A>, DuplicateLabelAssetError> {
self.labeled_asset_scope(label, |_| asset) self.labeled_asset_scope(label, |_| asset)
} }
@ -488,7 +491,7 @@ impl<'a> LoadContext<'a> {
&mut self, &mut self,
label: impl Into<CowArc<'static, str>>, label: impl Into<CowArc<'static, str>>,
loaded_asset: CompleteLoadedAsset<A>, loaded_asset: CompleteLoadedAsset<A>,
) -> Handle<A> { ) -> Result<Handle<A>, DuplicateLabelAssetError> {
let label = label.into(); let label = label.into();
let CompleteLoadedAsset { let CompleteLoadedAsset {
asset, asset,
@ -499,19 +502,25 @@ impl<'a> LoadContext<'a> {
let handle = self let handle = self
.asset_server .asset_server
.get_or_create_path_handle(labeled_path, None); .get_or_create_path_handle(labeled_path, None);
self.labeled_assets.insert( let has_duplicate = self
label, .labeled_assets
.insert(
label.clone(),
LabeledAsset { LabeledAsset {
asset: loaded_asset, asset: loaded_asset,
handle: handle.clone().untyped(), handle: handle.clone().untyped(),
}, },
); )
.is_some();
if has_duplicate {
return Err(DuplicateLabelAssetError(label.to_string()));
}
for (label, asset) in labeled_assets { for (label, asset) in labeled_assets {
if self.labeled_assets.insert(label.clone(), asset).is_some() { if self.labeled_assets.insert(label.clone(), asset).is_some() {
warn!("A labeled asset with the label \"{label}\" already exists. Replacing with the new asset."); return Err(DuplicateLabelAssetError(label.to_string()));
} }
} }
handle Ok(handle)
} }
/// Returns `true` if an asset with the label `label` exists in this context. /// Returns `true` if an asset with the label `label` exists in this context.
@ -552,8 +561,8 @@ impl<'a> LoadContext<'a> {
let path = path.into(); let path = path.into();
let source = self.asset_server.get_source(path.source())?; let source = self.asset_server.get_source(path.source())?;
let asset_reader = match self.asset_server.mode() { let asset_reader = match self.asset_server.mode() {
AssetServerMode::Unprocessed { .. } => source.reader(), AssetServerMode::Unprocessed => source.reader(),
AssetServerMode::Processed { .. } => source.processed_reader()?, AssetServerMode::Processed => source.processed_reader()?,
}; };
let mut reader = asset_reader.read(path.path()).await?; let mut reader = asset_reader.read(path.path()).await?;
let hash = if self.populate_hashes { let hash = if self.populate_hashes {
@ -661,3 +670,8 @@ pub enum ReadAssetBytesError {
#[error("The LoadContext for this read_asset_bytes call requires hash metadata, but it was not provided. This is likely an internal implementation error.")] #[error("The LoadContext for this read_asset_bytes call requires hash metadata, but it was not provided. This is likely an internal implementation error.")]
MissingAssetHash, MissingAssetHash,
} }
/// An error when labeled assets have the same label, containing the duplicate label.
#[derive(Error, Debug)]
#[error("Encountered a duplicate label while loading an asset: \"{0}\"")]
pub struct DuplicateLabelAssetError(pub String);

View File

@ -207,9 +207,12 @@ impl AssetProcessor {
/// Processes all assets. This will: /// Processes all assets. This will:
/// * For each "processed [`AssetSource`]: /// * For each "processed [`AssetSource`]:
/// * Scan the [`ProcessorTransactionLog`] and recover from any failures detected /// * Scan the [`ProcessorTransactionLog`] and recover from any failures detected
/// * Scan the processed [`AssetReader`](crate::io::AssetReader) to build the current view of already processed assets. /// * Scan the processed [`AssetReader`](crate::io::AssetReader) to build the current view of
/// * Scan the unprocessed [`AssetReader`](crate::io::AssetReader) and remove any final processed assets that are invalid or no longer exist. /// already processed assets.
/// * For each asset in the unprocessed [`AssetReader`](crate::io::AssetReader), kick off a new "process job", which will process the asset /// * Scan the unprocessed [`AssetReader`](crate::io::AssetReader) and remove any final
/// processed assets that are invalid or no longer exist.
/// * For each asset in the unprocessed [`AssetReader`](crate::io::AssetReader), kick off a new
/// "process job", which will process the asset
/// (if the latest version of the asset has not been processed). /// (if the latest version of the asset has not been processed).
#[cfg(all(not(target_arch = "wasm32"), feature = "multi_threaded"))] #[cfg(all(not(target_arch = "wasm32"), feature = "multi_threaded"))]
pub fn process_assets(&self) { pub fn process_assets(&self) {

View File

@ -38,12 +38,13 @@ use std::path::{Path, PathBuf};
use thiserror::Error; use thiserror::Error;
use tracing::{error, info}; use tracing::{error, info};
/// Loads and tracks the state of [`Asset`] values from a configured [`AssetReader`](crate::io::AssetReader). This can be used to kick off new asset loads and /// Loads and tracks the state of [`Asset`] values from a configured [`AssetReader`](crate::io::AssetReader).
/// retrieve their current load states. /// This can be used to kick off new asset loads and retrieve their current load states.
/// ///
/// The general process to load an asset is: /// The general process to load an asset is:
/// 1. Initialize a new [`Asset`] type with the [`AssetServer`] via [`AssetApp::init_asset`], which will internally call [`AssetServer::register_asset`] /// 1. Initialize a new [`Asset`] type with the [`AssetServer`] via [`AssetApp::init_asset`], which
/// and set up related ECS [`Assets`] storage and systems. /// will internally call [`AssetServer::register_asset`] and set up related ECS [`Assets`]
/// storage and systems.
/// 2. Register one or more [`AssetLoader`]s for that asset with [`AssetApp::init_asset_loader`] /// 2. Register one or more [`AssetLoader`]s for that asset with [`AssetApp::init_asset_loader`]
/// 3. Add the asset to your asset folder (defaults to `assets`). /// 3. Add the asset to your asset folder (defaults to `assets`).
/// 4. Call [`AssetServer::load`] with a path to your asset. /// 4. Call [`AssetServer::load`] with a path to your asset.
@ -923,8 +924,8 @@ impl AssetServer {
}; };
let asset_reader = match server.data.mode { let asset_reader = match server.data.mode {
AssetServerMode::Unprocessed { .. } => source.reader(), AssetServerMode::Unprocessed => source.reader(),
AssetServerMode::Processed { .. } => match source.processed_reader() { AssetServerMode::Processed => match source.processed_reader() {
Ok(reader) => reader, Ok(reader) => reader,
Err(_) => { Err(_) => {
error!( error!(
@ -1235,8 +1236,8 @@ impl AssetServer {
// Then the meta reader, if meta exists, will correspond to the meta for the current "version" of the asset. // Then the meta reader, if meta exists, will correspond to the meta for the current "version" of the asset.
// See ProcessedAssetInfo::file_transaction_lock for more context // See ProcessedAssetInfo::file_transaction_lock for more context
let asset_reader = match self.data.mode { let asset_reader = match self.data.mode {
AssetServerMode::Unprocessed { .. } => source.reader(), AssetServerMode::Unprocessed => source.reader(),
AssetServerMode::Processed { .. } => source.processed_reader()?, AssetServerMode::Processed => source.processed_reader()?,
}; };
let reader = asset_reader.read(asset_path.path()).await?; let reader = asset_reader.read(asset_path.path()).await?;
let read_meta = match &self.data.meta_check { let read_meta = match &self.data.meta_check {
@ -1584,14 +1585,14 @@ pub fn handle_internal_asset_events(world: &mut World) {
for source in server.data.sources.iter() { for source in server.data.sources.iter() {
match server.data.mode { match server.data.mode {
AssetServerMode::Unprocessed { .. } => { AssetServerMode::Unprocessed => {
if let Some(receiver) = source.event_receiver() { if let Some(receiver) = source.event_receiver() {
for event in receiver.try_iter() { for event in receiver.try_iter() {
handle_event(source.id(), event); handle_event(source.id(), event);
} }
} }
} }
AssetServerMode::Processed { .. } => { AssetServerMode::Processed => {
if let Some(receiver) = source.processed_event_receiver() { if let Some(receiver) = source.processed_event_receiver() {
for event in receiver.try_iter() { for event in receiver.try_iter() {
handle_event(source.id(), event); handle_event(source.id(), event);

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_audio" name = "bevy_audio"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides audio functionality for Bevy Engine" description = "Provides audio functionality for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,13 +1,13 @@
[package] [package]
name = "bevy_color" name = "bevy_color"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Types for representing and manipulating color values" description = "Types for representing and manipulating color values"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
keywords = ["bevy", "color"] keywords = ["bevy", "color"]
rust-version = "1.83.0" rust-version = "1.85.0"
[dependencies] [dependencies]
bevy_math = { path = "../bevy_math", version = "0.16.0-dev", default-features = false, features = [ bevy_math = { path = "../bevy_math", version = "0.16.0-dev", default-features = false, features = [

View File

@ -1,7 +1,7 @@
[package] [package]
name = "gen_tests" name = "gen_tests"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2024"
publish = false publish = false
[workspace] [workspace]

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_core_pipeline" name = "bevy_core_pipeline"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
authors = [ authors = [
"Bevy Contributors <bevyengine@gmail.com>", "Bevy Contributors <bevyengine@gmail.com>",
"Carter Anderson <mcanders1@gmail.com>", "Carter Anderson <mcanders1@gmail.com>",

View File

@ -152,7 +152,8 @@ impl ViewNode for BloomNode {
render_context.command_encoder().push_debug_group("bloom"); render_context.command_encoder().push_debug_group("bloom");
let diagnostics = render_context.diagnostic_recorder(); let diagnostics = render_context.diagnostic_recorder();
let time_span = diagnostics.time_span(render_context.command_encoder(), "bloom"); let command_encoder = render_context.command_encoder();
let time_span = diagnostics.time_span(command_encoder, "bloom");
// First downsample pass // First downsample pass
{ {

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_derive" name = "bevy_derive"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides derive implementations for Bevy Engine" description = "Provides derive implementations for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "bevy_derive_compile_fail" name = "bevy_derive_compile_fail"
edition = "2021" edition = "2024"
description = "Compile fail tests for Bevy Engine's various macros" description = "Compile fail tests for Bevy Engine's various macros"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -10,14 +10,20 @@ pub fn bevy_main(_attr: TokenStream, item: TokenStream) -> TokenStream {
); );
TokenStream::from(quote! { TokenStream::from(quote! {
#[no_mangle] // SAFETY: `#[bevy_main]` should only be placed on a single `main` function
// TODO: Potentially make `bevy_main` and unsafe attribute as there is a safety
// guarantee required from the caller.
#[unsafe(no_mangle)]
#[cfg(target_os = "android")] #[cfg(target_os = "android")]
fn android_main(android_app: bevy::window::android_activity::AndroidApp) { fn android_main(android_app: bevy::window::android_activity::AndroidApp) {
let _ = bevy::window::ANDROID_APP.set(android_app); let _ = bevy::window::ANDROID_APP.set(android_app);
main(); main();
} }
#[no_mangle] // SAFETY: `#[bevy_main]` should only be placed on a single `main` function
// TODO: Potentially make `bevy_main` and unsafe attribute as there is a safety
// guarantee required from the caller.
#[unsafe(no_mangle)]
#[cfg(target_os = "ios")] #[cfg(target_os = "ios")]
extern "C" fn main_rs() { extern "C" fn main_rs() {
main(); main();

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_dev_tools" name = "bevy_dev_tools"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Collection of developer tools for the Bevy Engine" description = "Collection of developer tools for the Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_diagnostic" name = "bevy_diagnostic"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides diagnostic functionality for Bevy Engine" description = "Provides diagnostic functionality for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
@ -54,17 +54,6 @@ critical-section = [
"bevy_tasks?/critical-section", "bevy_tasks?/critical-section",
] ]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_ecs/portable-atomic",
"bevy_app/portable-atomic",
"bevy_platform_support/portable-atomic",
"bevy_time/portable-atomic",
"bevy_utils/portable-atomic",
"bevy_tasks?/portable-atomic",
]
[dependencies] [dependencies]
# bevy # bevy
bevy_app = { path = "../bevy_app", version = "0.16.0-dev", default-features = false } bevy_app = { path = "../bevy_app", version = "0.16.0-dev", default-features = false }

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_dylib" name = "bevy_dylib"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Force the Bevy Engine to be dynamically linked for faster linking" description = "Force the Bevy Engine to be dynamically linked for faster linking"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,14 +1,14 @@
[package] [package]
name = "bevy_ecs" name = "bevy_ecs"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Bevy Engine's entity component system" description = "Bevy Engine's entity component system"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
keywords = ["ecs", "game", "bevy"] keywords = ["ecs", "game", "bevy"]
categories = ["game-engines", "data-structures"] categories = ["game-engines", "data-structures"]
rust-version = "1.83.0" rust-version = "1.85.0"
[features] [features]
default = ["std", "bevy_reflect", "async_executor"] default = ["std", "bevy_reflect", "async_executor"]
@ -93,15 +93,6 @@ critical-section = [
"bevy_reflect?/critical-section", "bevy_reflect?/critical-section",
] ]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_tasks?/portable-atomic",
"bevy_platform_support/portable-atomic",
"concurrent-queue/portable-atomic",
"bevy_reflect?/portable-atomic",
]
[dependencies] [dependencies]
bevy_ptr = { path = "../bevy_ptr", version = "0.16.0-dev" } bevy_ptr = { path = "../bevy_ptr", version = "0.16.0-dev" }
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", features = [ bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", features = [
@ -140,6 +131,11 @@ tracing = { version = "0.1", default-features = false, optional = true }
log = { version = "0.4", default-features = false } log = { version = "0.4", default-features = false }
bumpalo = "3" bumpalo = "3"
[target.'cfg(not(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr")))'.dependencies]
concurrent-queue = { version = "2.5.0", default-features = false, features = [
"portable-atomic",
] }
[dev-dependencies] [dev-dependencies]
rand = "0.8" rand = "0.8"
static_assertions = "1.1.0" static_assertions = "1.1.0"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "bevy_ecs_compile_fail" name = "bevy_ecs_compile_fail"
edition = "2021" edition = "2024"
description = "Compile fail tests for Bevy Engine's entity component system" description = "Compile fail tests for Bevy Engine's entity component system"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -45,7 +45,7 @@ struct MyEvent {
fn sending_system(mut event_writer: EventWriter<MyEvent>) { fn sending_system(mut event_writer: EventWriter<MyEvent>) {
let random_value: f32 = rand::random(); let random_value: f32 = rand::random();
if random_value > 0.5 { if random_value > 0.5 {
event_writer.send(MyEvent { event_writer.write(MyEvent {
message: "A random event with value > 0.5".to_string(), message: "A random event with value > 0.5".to_string(),
random_value, random_value,
}); });

View File

@ -2,7 +2,7 @@
name = "bevy_ecs_macros" name = "bevy_ecs_macros"
version = "0.16.0-dev" version = "0.16.0-dev"
description = "Bevy ECS Macros" description = "Bevy ECS Macros"
edition = "2021" edition = "2024"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
[lib] [lib]

View File

@ -257,7 +257,7 @@ pub fn derive_component(input: TokenStream) -> TokenStream {
fn visit_entities(data: &Data, bevy_ecs_path: &Path, is_relationship: bool) -> TokenStream2 { fn visit_entities(data: &Data, bevy_ecs_path: &Path, is_relationship: bool) -> TokenStream2 {
match data { match data {
Data::Struct(DataStruct { ref fields, .. }) => { Data::Struct(DataStruct { fields, .. }) => {
let mut visited_fields = Vec::new(); let mut visited_fields = Vec::new();
let mut visited_indices = Vec::new(); let mut visited_indices = Vec::new();
match fields { match fields {
@ -343,8 +343,8 @@ fn visit_entities(data: &Data, bevy_ecs_path: &Path, is_relationship: bool) -> T
let field_member = ident_or_index(field.ident.as_ref(), index); let field_member = ident_or_index(field.ident.as_ref(), index);
let field_ident = format_ident!("field_{}", field_member); let field_ident = format_ident!("field_{}", field_member);
variant_fields.push(quote!(#field_member: ref #field_ident)); variant_fields.push(quote!(#field_member: #field_ident));
variant_fields_mut.push(quote!(#field_member: ref mut #field_ident)); variant_fields_mut.push(quote!(#field_member: #field_ident));
visit_variant_fields.push(quote!(#field_ident.visit_entities(&mut func);)); visit_variant_fields.push(quote!(#field_ident.visit_entities(&mut func);));
visit_variant_fields_mut visit_variant_fields_mut

View File

@ -223,7 +223,7 @@ pub trait DetectChangesMut: DetectChanges {
/// let new_score = 0; /// let new_score = 0;
/// if let Some(Score(previous_score)) = score.replace_if_neq(Score(new_score)) { /// if let Some(Score(previous_score)) = score.replace_if_neq(Score(new_score)) {
/// // If `score` change, emit a `ScoreChanged` event. /// // If `score` change, emit a `ScoreChanged` event.
/// score_changed.send(ScoreChanged { /// score_changed.write(ScoreChanged {
/// current: new_score, /// current: new_score,
/// previous: previous_score, /// previous: previous_score,
/// }); /// });

View File

@ -2060,7 +2060,7 @@ impl RequiredComponents {
// //
// This would be resolved by https://github.com/rust-lang/rust/issues/123430 // This would be resolved by https://github.com/rust-lang/rust/issues/123430
#[cfg(feature = "portable-atomic")] #[cfg(not(target_has_atomic = "ptr"))]
use alloc::boxed::Box; use alloc::boxed::Box;
type Constructor = dyn for<'a, 'b> Fn( type Constructor = dyn for<'a, 'b> Fn(
@ -2072,10 +2072,10 @@ impl RequiredComponents {
MaybeLocation, MaybeLocation,
); );
#[cfg(feature = "portable-atomic")] #[cfg(not(target_has_atomic = "ptr"))]
type Intermediate<T> = Box<T>; type Intermediate<T> = Box<T>;
#[cfg(not(feature = "portable-atomic"))] #[cfg(target_has_atomic = "ptr")]
type Intermediate<T> = Arc<T>; type Intermediate<T> = Arc<T>;
let boxed: Intermediate<Constructor> = Intermediate::new( let boxed: Intermediate<Constructor> = Intermediate::new(

View File

@ -70,6 +70,10 @@ mod unique_slice;
pub use unique_slice::*; pub use unique_slice::*;
mod unique_array;
pub use unique_array::UniqueEntityArray;
use crate::{ use crate::{
archetype::{ArchetypeId, ArchetypeRow}, archetype::{ArchetypeId, ArchetypeRow},
change_detection::MaybeLocation, change_detection::MaybeLocation,

View File

@ -0,0 +1,543 @@
use core::{
array,
borrow::{Borrow, BorrowMut},
fmt::Debug,
ops::{
Bound, Deref, DerefMut, Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive,
RangeTo, RangeToInclusive,
},
ptr,
};
use alloc::{
boxed::Box,
collections::{BTreeSet, BinaryHeap, LinkedList, VecDeque},
rc::Rc,
sync::Arc,
vec::Vec,
};
use super::{unique_slice, TrustedEntityBorrow, UniqueEntityIter, UniqueEntitySlice};
/// An array that contains only unique entities.
///
/// It can be obtained through certain methods on [`UniqueEntitySlice`],
/// and some [`TryFrom`] implementations.
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct UniqueEntityArray<T: TrustedEntityBorrow, const N: usize>([T; N]);
impl<T: TrustedEntityBorrow, const N: usize> UniqueEntityArray<T, N> {
/// Constructs a `UniqueEntityArray` from a [`[T; N]`] unsafely.
///
/// # Safety
///
/// `array` must contain only unique elements.
pub const unsafe fn from_array_unchecked(array: [T; N]) -> Self {
Self(array)
}
/// Constructs a `&UniqueEntityArray` from a [`&[T; N]`] unsafely.
///
/// # Safety
///
/// `array` must contain only unique elements.
pub const unsafe fn from_array_ref_unchecked(array: &[T; N]) -> &Self {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { &*(ptr::from_ref(array).cast()) }
}
/// Constructs a `Box<UniqueEntityArray>` from a [`Box<[T; N]>`] unsafely.
///
/// # Safety
///
/// `array` must contain only unique elements.
pub unsafe fn from_boxed_array_unchecked(array: Box<[T; N]>) -> Box<Self> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Box::from_raw(Box::into_raw(array).cast()) }
}
/// Casts `self` into the inner array.
pub fn into_boxed_inner(self: Box<Self>) -> Box<[T; N]> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Box::from_raw(Box::into_raw(self).cast()) }
}
/// Constructs a `Arc<UniqueEntityArray>` from a [`Arc<[T; N]>`] unsafely.
///
/// # Safety
///
/// `slice` must contain only unique elements.
pub unsafe fn from_arc_array_unchecked(slice: Arc<[T; N]>) -> Arc<Self> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Arc::from_raw(Arc::into_raw(slice).cast()) }
}
/// Casts `self` to the inner array.
pub fn into_arc_inner(self: Arc<Self>) -> Arc<[T; N]> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Arc::from_raw(Arc::into_raw(self).cast()) }
}
// Constructs a `Rc<UniqueEntityArray>` from a [`Rc<[T; N]>`] unsafely.
///
/// # Safety
///
/// `slice` must contain only unique elements.
pub unsafe fn from_rc_array_unchecked(slice: Rc<[T; N]>) -> Rc<Self> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Rc::from_raw(Rc::into_raw(slice).cast()) }
}
/// Casts `self` to the inner array.
pub fn into_rc_inner(self: Rc<Self>) -> Rc<[T; N]> {
// SAFETY: UniqueEntityArray is a transparent wrapper around [T; N].
unsafe { Rc::from_raw(Rc::into_raw(self).cast()) }
}
/// Return the inner array.
pub fn into_inner(self) -> [T; N] {
self.0
}
/// Returns a reference to the inner array.
pub fn as_inner(&self) -> &[T; N] {
&self.0
}
/// Returns a slice containing the entire array. Equivalent to `&s[..]`.
pub const fn as_slice(&self) -> &UniqueEntitySlice<T> {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.as_slice()) }
}
/// Returns a mutable slice containing the entire array. Equivalent to
/// `&mut s[..]`.
pub fn as_mut_slice(&mut self) -> &mut UniqueEntitySlice<T> {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.as_mut_slice()) }
}
/// Borrows each element and returns an array of references with the same
/// size as `self`.
///
/// Equivalent to [`[T; N]::as_ref`](array::each_ref).
pub fn each_ref(&self) -> UniqueEntityArray<&T, N> {
UniqueEntityArray(self.0.each_ref())
}
}
impl<T: TrustedEntityBorrow, const N: usize> Deref for UniqueEntityArray<T, N> {
type Target = UniqueEntitySlice<T>;
fn deref(&self) -> &Self::Target {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(&self.0) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> DerefMut for UniqueEntityArray<T, N> {
fn deref_mut(&mut self) -> &mut Self::Target {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(&mut self.0) }
}
}
impl<T: TrustedEntityBorrow> Default for UniqueEntityArray<T, 0> {
fn default() -> Self {
Self(Default::default())
}
}
impl<'a, T: TrustedEntityBorrow, const N: usize> IntoIterator for &'a UniqueEntityArray<T, N> {
type Item = &'a T;
type IntoIter = unique_slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntityIter::from_iterator_unchecked(self.0.iter()) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IntoIterator for UniqueEntityArray<T, N> {
type Item = T;
type IntoIter = IntoIter<T, N>;
fn into_iter(self) -> Self::IntoIter {
// SAFETY: All elements in the original array are unique.
unsafe { UniqueEntityIter::from_iterator_unchecked(self.0.into_iter()) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> AsRef<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn as_ref(&self) -> &UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> AsMut<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn as_mut(&mut self) -> &mut UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> Borrow<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn borrow(&self) -> &UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> BorrowMut<UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
fn borrow_mut(&mut self) -> &mut UniqueEntitySlice<T> {
self
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<(Bound<usize>, Bound<usize>)>
for UniqueEntityArray<T, N>
{
type Output = UniqueEntitySlice<T>;
fn index(&self, key: (Bound<usize>, Bound<usize>)) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<Range<usize>> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: Range<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeFrom<usize>> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeFrom<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeFull> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeFull) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeInclusive<usize>>
for UniqueEntityArray<T, N>
{
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeInclusive<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeTo<usize>> for UniqueEntityArray<T, N> {
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeTo<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<RangeToInclusive<usize>>
for UniqueEntityArray<T, N>
{
type Output = UniqueEntitySlice<T>;
fn index(&self, key: RangeToInclusive<usize>) -> &Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.0.index(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> Index<usize> for UniqueEntityArray<T, N> {
type Output = T;
fn index(&self, key: usize) -> &T {
self.0.index(key)
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<(Bound<usize>, Bound<usize>)>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: (Bound<usize>, Bound<usize>)) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<Range<usize>> for UniqueEntityArray<T, N> {
fn index_mut(&mut self, key: Range<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeFrom<usize>>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: RangeFrom<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeFull> for UniqueEntityArray<T, N> {
fn index_mut(&mut self, key: RangeFull) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeInclusive<usize>>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: RangeInclusive<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeTo<usize>> for UniqueEntityArray<T, N> {
fn index_mut(&mut self, key: RangeTo<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow, const N: usize> IndexMut<RangeToInclusive<usize>>
for UniqueEntityArray<T, N>
{
fn index_mut(&mut self, key: RangeToInclusive<usize>) -> &mut Self::Output {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.0.index_mut(key)) }
}
}
impl<T: TrustedEntityBorrow + Clone> From<&[T; 1]> for UniqueEntityArray<T, 1> {
fn from(value: &[T; 1]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow + Clone> From<&[T; 0]> for UniqueEntityArray<T, 0> {
fn from(value: &[T; 0]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow + Clone> From<&mut [T; 1]> for UniqueEntityArray<T, 1> {
fn from(value: &mut [T; 1]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow + Clone> From<&mut [T; 0]> for UniqueEntityArray<T, 0> {
fn from(value: &mut [T; 0]) -> Self {
Self(value.clone())
}
}
impl<T: TrustedEntityBorrow> From<[T; 1]> for UniqueEntityArray<T, 1> {
fn from(value: [T; 1]) -> Self {
Self(value)
}
}
impl<T: TrustedEntityBorrow> From<[T; 0]> for UniqueEntityArray<T, 0> {
fn from(value: [T; 0]) -> Self {
Self(value)
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 1>> for (T,) {
fn from(array: UniqueEntityArray<T, 1>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 2>> for (T, T) {
fn from(array: UniqueEntityArray<T, 2>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 3>> for (T, T, T) {
fn from(array: UniqueEntityArray<T, 3>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 4>> for (T, T, T, T) {
fn from(array: UniqueEntityArray<T, 4>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 5>> for (T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 5>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 6>> for (T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 6>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 7>> for (T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 7>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 8>> for (T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 8>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 9>> for (T, T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 9>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 10>> for (T, T, T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 10>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 11>> for (T, T, T, T, T, T, T, T, T, T, T) {
fn from(array: UniqueEntityArray<T, 11>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityArray<T, 12>>
for (T, T, T, T, T, T, T, T, T, T, T, T)
{
fn from(array: UniqueEntityArray<T, 12>) -> Self {
Self::from(array.into_inner())
}
}
impl<T: TrustedEntityBorrow + Ord, const N: usize> From<UniqueEntityArray<T, N>> for BTreeSet<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
BTreeSet::from(value.0)
}
}
impl<T: TrustedEntityBorrow + Ord, const N: usize> From<UniqueEntityArray<T, N>> for BinaryHeap<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
BinaryHeap::from(value.0)
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for LinkedList<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
LinkedList::from(value.0)
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for Vec<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
Vec::from(value.0)
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for VecDeque<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
VecDeque::from(value.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&UniqueEntitySlice<U>> for UniqueEntityArray<T, N>
{
fn eq(&self, other: &&UniqueEntitySlice<U>) -> bool {
self.0.eq(&other.as_inner())
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntitySlice<U>> for UniqueEntityArray<T, N>
{
fn eq(&self, other: &UniqueEntitySlice<U>) -> bool {
self.0.eq(other.as_inner())
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<&UniqueEntityArray<U, N>>
for Vec<T>
{
fn eq(&self, other: &&UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<&UniqueEntityArray<U, N>>
for VecDeque<T>
{
fn eq(&self, other: &&UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&mut UniqueEntityArray<U, N>> for VecDeque<T>
{
fn eq(&self, other: &&mut UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<UniqueEntityArray<U, N>>
for Vec<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow, const N: usize> PartialEq<UniqueEntityArray<U, N>>
for VecDeque<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.eq(&other.0)
}
}
pub type IntoIter<T, const N: usize> = UniqueEntityIter<array::IntoIter<T, N>>;
impl<T: TrustedEntityBorrow, const N: usize> UniqueEntityIter<array::IntoIter<T, N>> {
/// Returns an immutable slice of all elements that have not been yielded
/// yet.
///
/// Equivalent to [`array::IntoIter::as_slice`].
pub fn as_slice(&self) -> &UniqueEntitySlice<T> {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked(self.as_inner().as_slice()) }
}
/// Returns a mutable slice of all elements that have not been yielded yet.
///
/// Equivalent to [`array::IntoIter::as_mut_slice`].
pub fn as_mut_slice(&mut self) -> &mut UniqueEntitySlice<T> {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_slice_unchecked_mut(self.as_mut_inner().as_mut_slice()) }
}
}

View File

@ -1,4 +1,5 @@
use core::{ use core::{
array::TryFromSliceError,
borrow::Borrow, borrow::Borrow,
cmp::Ordering, cmp::Ordering,
fmt::Debug, fmt::Debug,
@ -22,7 +23,7 @@ use alloc::{
use super::{ use super::{
unique_vec, EntitySet, EntitySetIterator, FromEntitySetIterator, TrustedEntityBorrow, unique_vec, EntitySet, EntitySetIterator, FromEntitySetIterator, TrustedEntityBorrow,
UniqueEntityIter, UniqueEntityVec, UniqueEntityArray, UniqueEntityIter, UniqueEntityVec,
}; };
/// A slice that contains only unique entities. /// A slice that contains only unique entities.
@ -128,6 +129,64 @@ impl<T: TrustedEntityBorrow> UniqueEntitySlice<T> {
Some((last, unsafe { Self::from_slice_unchecked(rest) })) Some((last, unsafe { Self::from_slice_unchecked(rest) }))
} }
/// Returns an array reference to the first `N` items in the slice.
///
/// Equivalent to [`[T]::first_chunk`](slice::first_chunk).
pub const fn first_chunk<const N: usize>(&self) -> Option<&UniqueEntityArray<T, N>> {
let Some(chunk) = self.0.first_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
Some(unsafe { UniqueEntityArray::from_array_ref_unchecked(chunk) })
}
/// Returns an array reference to the first `N` items in the slice and the remaining slice.
///
/// Equivalent to [`[T]::split_first_chunk`](slice::split_first_chunk).
pub const fn split_first_chunk<const N: usize>(
&self,
) -> Option<(&UniqueEntityArray<T, N>, &UniqueEntitySlice<T>)> {
let Some((chunk, rest)) = self.0.split_first_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
unsafe {
Some((
UniqueEntityArray::from_array_ref_unchecked(chunk),
Self::from_slice_unchecked(rest),
))
}
}
/// Returns an array reference to the last `N` items in the slice and the remaining slice.
///
/// Equivalent to [`[T]::split_last_chunk`](slice::split_last_chunk).
pub const fn split_last_chunk<const N: usize>(
&self,
) -> Option<(&UniqueEntitySlice<T>, &UniqueEntityArray<T, N>)> {
let Some((rest, chunk)) = self.0.split_last_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
unsafe {
Some((
Self::from_slice_unchecked(rest),
UniqueEntityArray::from_array_ref_unchecked(chunk),
))
}
}
/// Returns an array reference to the last `N` items in the slice.
///
/// Equivalent to [`[T]::last_chunk`](slice::last_chunk).
pub const fn last_chunk<const N: usize>(&self) -> Option<&UniqueEntityArray<T, N>> {
let Some(chunk) = self.0.last_chunk() else {
return None;
};
// SAFETY: All elements in the original slice are unique.
Some(unsafe { UniqueEntityArray::from_array_ref_unchecked(chunk) })
}
/// Returns a reference to a subslice. /// Returns a reference to a subslice.
/// ///
/// Equivalent to the range functionality of [`[T]::get`]. /// Equivalent to the range functionality of [`[T]::get`].
@ -949,6 +1008,15 @@ impl<'a, T: TrustedEntityBorrow + Clone> From<&'a UniqueEntitySlice<T>>
} }
} }
impl<T: TrustedEntityBorrow + Clone, const N: usize> From<UniqueEntityArray<T, N>>
for Box<UniqueEntitySlice<T>>
{
fn from(value: UniqueEntityArray<T, N>) -> Self {
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntitySlice::from_boxed_slice_unchecked(Box::new(value.into_inner())) }
}
}
impl<'a, T: TrustedEntityBorrow + Clone> From<Cow<'a, UniqueEntitySlice<T>>> impl<'a, T: TrustedEntityBorrow + Clone> From<Cow<'a, UniqueEntitySlice<T>>>
for Box<UniqueEntitySlice<T>> for Box<UniqueEntitySlice<T>>
{ {
@ -1134,6 +1202,30 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<[U; N]>
} }
} }
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for &UniqueEntitySlice<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for &mut UniqueEntitySlice<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for UniqueEntitySlice<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.0)
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U> PartialEq<Vec<U>> for &UniqueEntitySlice<T> { impl<T: TrustedEntityBorrow + PartialEq<U>, U> PartialEq<Vec<U>> for &UniqueEntitySlice<T> {
fn eq(&self, other: &Vec<U>) -> bool { fn eq(&self, other: &Vec<U>) -> bool {
self.0.eq(other) self.0.eq(other)
@ -1161,6 +1253,38 @@ impl<T: TrustedEntityBorrow + Clone> ToOwned for UniqueEntitySlice<T> {
} }
} }
impl<'a, T: TrustedEntityBorrow + Copy, const N: usize> TryFrom<&'a UniqueEntitySlice<T>>
for &'a UniqueEntityArray<T, N>
{
type Error = TryFromSliceError;
fn try_from(value: &'a UniqueEntitySlice<T>) -> Result<Self, Self::Error> {
<&[T; N]>::try_from(&value.0).map(|array|
// SAFETY: All elements in the original slice are unique.
unsafe { UniqueEntityArray::from_array_ref_unchecked(array) })
}
}
impl<T: TrustedEntityBorrow + Copy, const N: usize> TryFrom<&UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
type Error = TryFromSliceError;
fn try_from(value: &UniqueEntitySlice<T>) -> Result<Self, Self::Error> {
<&Self>::try_from(value).copied()
}
}
impl<T: TrustedEntityBorrow + Copy, const N: usize> TryFrom<&mut UniqueEntitySlice<T>>
for UniqueEntityArray<T, N>
{
type Error = TryFromSliceError;
fn try_from(value: &mut UniqueEntitySlice<T>) -> Result<Self, Self::Error> {
<Self>::try_from(&*value)
}
}
impl<T: TrustedEntityBorrow> Index<(Bound<usize>, Bound<usize>)> for UniqueEntitySlice<T> { impl<T: TrustedEntityBorrow> Index<(Bound<usize>, Bound<usize>)> for UniqueEntitySlice<T> {
type Output = Self; type Output = Self;
fn index(&self, key: (Bound<usize>, Bound<usize>)) -> &Self { fn index(&self, key: (Bound<usize>, Bound<usize>)) -> &Self {
@ -1280,7 +1404,6 @@ impl<T: TrustedEntityBorrow> IndexMut<RangeToInclusive<usize>> for UniqueEntityS
/// the [`IntoIterator`] impls on it and [`UniqueEntityVec`]. /// the [`IntoIterator`] impls on it and [`UniqueEntityVec`].
/// ///
/// [`iter`]: `UniqueEntitySlice::iter` /// [`iter`]: `UniqueEntitySlice::iter`
/// [`into_iter`]: UniqueEntitySlice::into_iter
pub type Iter<'a, T> = UniqueEntityIter<slice::Iter<'a, T>>; pub type Iter<'a, T> = UniqueEntityIter<slice::Iter<'a, T>>;
impl<'a, T: TrustedEntityBorrow> UniqueEntityIter<slice::Iter<'a, T>> { impl<'a, T: TrustedEntityBorrow> UniqueEntityIter<slice::Iter<'a, T>> {

View File

@ -17,8 +17,8 @@ use alloc::{
}; };
use super::{ use super::{
unique_slice, EntitySet, FromEntitySetIterator, TrustedEntityBorrow, UniqueEntityIter, unique_slice, EntitySet, FromEntitySetIterator, TrustedEntityBorrow, UniqueEntityArray,
UniqueEntitySlice, UniqueEntityIter, UniqueEntitySlice,
}; };
/// A `Vec` that contains only unique entities. /// A `Vec` that contains only unique entities.
@ -550,6 +550,14 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<&[U; N]
} }
} }
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&UniqueEntityArray<U, N>> for UniqueEntityVec<T>
{
fn eq(&self, other: &&UniqueEntityArray<U, N>) -> bool {
self.0.eq(&other.as_inner())
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<&mut [U; N]> impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<&mut [U; N]>
for UniqueEntityVec<T> for UniqueEntityVec<T>
{ {
@ -558,6 +566,14 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<&mut [U
} }
} }
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<&mut UniqueEntityArray<U, N>> for UniqueEntityVec<T>
{
fn eq(&self, other: &&mut UniqueEntityArray<U, N>) -> bool {
self.0.eq(other.as_inner())
}
}
impl<T: TrustedEntityBorrow + PartialEq<U>, U> PartialEq<[U]> for UniqueEntityVec<T> { impl<T: TrustedEntityBorrow + PartialEq<U>, U> PartialEq<[U]> for UniqueEntityVec<T> {
fn eq(&self, other: &[U]) -> bool { fn eq(&self, other: &[U]) -> bool {
self.0.eq(other) self.0.eq(other)
@ -580,6 +596,14 @@ impl<T: TrustedEntityBorrow + PartialEq<U>, U, const N: usize> PartialEq<[U; N]>
} }
} }
impl<T: TrustedEntityBorrow + PartialEq<U>, U: TrustedEntityBorrow, const N: usize>
PartialEq<UniqueEntityArray<U, N>> for UniqueEntityVec<T>
{
fn eq(&self, other: &UniqueEntityArray<U, N>) -> bool {
self.0.eq(other.as_inner())
}
}
impl<T: PartialEq<U>, U: TrustedEntityBorrow> PartialEq<UniqueEntityVec<U>> for Vec<T> { impl<T: PartialEq<U>, U: TrustedEntityBorrow> PartialEq<UniqueEntityVec<U>> for Vec<T> {
fn eq(&self, other: &UniqueEntityVec<U>) -> bool { fn eq(&self, other: &UniqueEntityVec<U>) -> bool {
self.eq(&other.0) self.eq(&other.0)
@ -683,6 +707,28 @@ impl<T: TrustedEntityBorrow> From<[T; 0]> for UniqueEntityVec<T> {
} }
} }
impl<T: TrustedEntityBorrow + Clone, const N: usize> From<&UniqueEntityArray<T, N>>
for UniqueEntityVec<T>
{
fn from(value: &UniqueEntityArray<T, N>) -> Self {
Self(Vec::from(value.as_inner().clone()))
}
}
impl<T: TrustedEntityBorrow + Clone, const N: usize> From<&mut UniqueEntityArray<T, N>>
for UniqueEntityVec<T>
{
fn from(value: &mut UniqueEntityArray<T, N>) -> Self {
Self(Vec::from(value.as_inner().clone()))
}
}
impl<T: TrustedEntityBorrow, const N: usize> From<UniqueEntityArray<T, N>> for UniqueEntityVec<T> {
fn from(value: UniqueEntityArray<T, N>) -> Self {
Self(Vec::from(value.into_inner()))
}
}
impl<T: TrustedEntityBorrow> From<UniqueEntityVec<T>> for Vec<T> { impl<T: TrustedEntityBorrow> From<UniqueEntityVec<T>> for Vec<T> {
fn from(value: UniqueEntityVec<T>) -> Self { fn from(value: UniqueEntityVec<T>) -> Self {
value.0 value.0
@ -755,6 +801,20 @@ impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>> for Box
} }
} }
impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>>
for Box<UniqueEntityArray<T, N>>
{
type Error = UniqueEntityVec<T>;
fn try_from(value: UniqueEntityVec<T>) -> Result<Self, Self::Error> {
Box::try_from(value.0)
.map(|v|
// SAFETY: All elements in the original Vec are unique.
unsafe { UniqueEntityArray::from_boxed_array_unchecked(v) })
.map_err(UniqueEntityVec)
}
}
impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>> for [T; N] { impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>> for [T; N] {
type Error = UniqueEntityVec<T>; type Error = UniqueEntityVec<T>;
@ -763,6 +823,20 @@ impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>> for [T;
} }
} }
impl<T: TrustedEntityBorrow, const N: usize> TryFrom<UniqueEntityVec<T>>
for UniqueEntityArray<T, N>
{
type Error = UniqueEntityVec<T>;
fn try_from(value: UniqueEntityVec<T>) -> Result<Self, Self::Error> {
<[T; N] as TryFrom<Vec<T>>>::try_from(value.0)
.map(|v|
// SAFETY: All elements in the original Vec are unique.
unsafe { UniqueEntityArray::from_array_unchecked(v) })
.map_err(UniqueEntityVec)
}
}
impl<T: TrustedEntityBorrow> From<BTreeSet<T>> for UniqueEntityVec<T> { impl<T: TrustedEntityBorrow> From<BTreeSet<T>> for UniqueEntityVec<T> {
fn from(value: BTreeSet<T>) -> Self { fn from(value: BTreeSet<T>) -> Self {
Self(value.into_iter().collect::<Vec<T>>()) Self(value.into_iter().collect::<Vec<T>>())
@ -809,7 +883,7 @@ impl<T: TrustedEntityBorrow> Extend<T> for UniqueEntityVec<T> {
let reserve = if self.is_empty() { let reserve = if self.is_empty() {
iter.size_hint().0 iter.size_hint().0
} else { } else {
(iter.size_hint().0 + 1) / 2 iter.size_hint().0.div_ceil(2)
}; };
self.reserve(reserve); self.reserve(reserve);
// Internal iteration (fold/for_each) is known to result in better code generation // Internal iteration (fold/for_each) is known to result in better code generation
@ -836,7 +910,7 @@ impl<'a, T: TrustedEntityBorrow + Copy + 'a> Extend<&'a T> for UniqueEntityVec<T
let reserve = if self.is_empty() { let reserve = if self.is_empty() {
iter.size_hint().0 iter.size_hint().0
} else { } else {
(iter.size_hint().0 + 1) / 2 iter.size_hint().0.div_ceil(2)
}; };
self.reserve(reserve); self.reserve(reserve);
// Internal iteration (fold/for_each) is known to result in better code generation // Internal iteration (fold/for_each) is known to result in better code generation

View File

@ -14,14 +14,14 @@ use bevy_ecs::{
/// #[derive(Event)] /// #[derive(Event)]
/// pub struct MyEvent; // Custom event type. /// pub struct MyEvent; // Custom event type.
/// fn my_system(mut writer: EventWriter<MyEvent>) { /// fn my_system(mut writer: EventWriter<MyEvent>) {
/// writer.send(MyEvent); /// writer.write(MyEvent);
/// } /// }
/// ///
/// # bevy_ecs::system::assert_is_system(my_system); /// # bevy_ecs::system::assert_is_system(my_system);
/// ``` /// ```
/// # Observers /// # Observers
/// ///
/// "Buffered" Events, such as those sent directly in [`Events`] or sent using [`EventWriter`], do _not_ automatically /// "Buffered" Events, such as those sent directly in [`Events`] or written using [`EventWriter`], do _not_ automatically
/// trigger any [`Observer`]s watching for that event, as each [`Event`] has different requirements regarding _if_ it will /// trigger any [`Observer`]s watching for that event, as each [`Event`] has different requirements regarding _if_ it will
/// be triggered, and if so, _when_ it will be triggered in the schedule. /// be triggered, and if so, _when_ it will be triggered in the schedule.
/// ///
@ -32,7 +32,7 @@ use bevy_ecs::{
/// ///
/// # Untyped events /// # Untyped events
/// ///
/// `EventWriter` can only send events of one specific type, which must be known at compile-time. /// `EventWriter` can only write events of one specific type, which must be known at compile-time.
/// This is not a problem most of the time, but you may find a situation where you cannot know /// This is not a problem most of the time, but you may find a situation where you cannot know
/// ahead of time every kind of event you'll need to send. In this case, you can use the "type-erased event" pattern. /// ahead of time every kind of event you'll need to send. In this case, you can use the "type-erased event" pattern.
/// ///
@ -64,13 +64,48 @@ pub struct EventWriter<'w, E: Event> {
} }
impl<'w, E: Event> EventWriter<'w, E> { impl<'w, E: Event> EventWriter<'w, E> {
/// Writes an `event`, which can later be read by [`EventReader`](super::EventReader)s.
/// This method returns the [ID](`EventId`) of the written `event`.
///
/// See [`Events`] for details.
#[doc(alias = "send")]
#[track_caller]
pub fn write(&mut self, event: E) -> EventId<E> {
self.events.send(event)
}
/// Sends a list of `events` all at once, which can later be read by [`EventReader`](super::EventReader)s.
/// This is more efficient than sending each event individually.
/// This method returns the [IDs](`EventId`) of the written `events`.
///
/// See [`Events`] for details.
#[doc(alias = "send_batch")]
#[track_caller]
pub fn write_batch(&mut self, events: impl IntoIterator<Item = E>) -> SendBatchIds<E> {
self.events.send_batch(events)
}
/// Writes the default value of the event. Useful when the event is an empty struct.
/// This method returns the [ID](`EventId`) of the written `event`.
///
/// See [`Events`] for details.
#[doc(alias = "send_default")]
#[track_caller]
pub fn write_default(&mut self) -> EventId<E>
where
E: Default,
{
self.events.send_default()
}
/// Sends an `event`, which can later be read by [`EventReader`](super::EventReader)s. /// Sends an `event`, which can later be read by [`EventReader`](super::EventReader)s.
/// This method returns the [ID](`EventId`) of the sent `event`. /// This method returns the [ID](`EventId`) of the sent `event`.
/// ///
/// See [`Events`] for details. /// See [`Events`] for details.
#[deprecated(since = "0.16.0", note = "Use `EventWriter::write` instead.")]
#[track_caller] #[track_caller]
pub fn send(&mut self, event: E) -> EventId<E> { pub fn send(&mut self, event: E) -> EventId<E> {
self.events.send(event) self.write(event)
} }
/// Sends a list of `events` all at once, which can later be read by [`EventReader`](super::EventReader)s. /// Sends a list of `events` all at once, which can later be read by [`EventReader`](super::EventReader)s.
@ -78,20 +113,22 @@ impl<'w, E: Event> EventWriter<'w, E> {
/// This method returns the [IDs](`EventId`) of the sent `events`. /// This method returns the [IDs](`EventId`) of the sent `events`.
/// ///
/// See [`Events`] for details. /// See [`Events`] for details.
#[deprecated(since = "0.16.0", note = "Use `EventWriter::write_batch` instead.")]
#[track_caller] #[track_caller]
pub fn send_batch(&mut self, events: impl IntoIterator<Item = E>) -> SendBatchIds<E> { pub fn send_batch(&mut self, events: impl IntoIterator<Item = E>) -> SendBatchIds<E> {
self.events.send_batch(events) self.write_batch(events)
} }
/// Sends the default value of the event. Useful when the event is an empty struct. /// Sends the default value of the event. Useful when the event is an empty struct.
/// This method returns the [ID](`EventId`) of the sent `event`. /// This method returns the [ID](`EventId`) of the sent `event`.
/// ///
/// See [`Events`] for details. /// See [`Events`] for details.
#[deprecated(since = "0.16.0", note = "Use `EventWriter::write_default` instead.")]
#[track_caller] #[track_caller]
pub fn send_default(&mut self) -> EventId<E> pub fn send_default(&mut self) -> EventId<E>
where where
E: Default, E: Default,
{ {
self.events.send_default() self.write_default()
} }
} }

View File

@ -2,13 +2,6 @@
unsafe_op_in_unsafe_fn, unsafe_op_in_unsafe_fn,
reason = "See #11590. To be removed once all applicable unsafe code has an unsafe block with a safety comment." reason = "See #11590. To be removed once all applicable unsafe code has an unsafe block with a safety comment."
)] )]
#![cfg_attr(
test,
expect(
dependency_on_unit_never_type_fallback,
reason = "See #17340. To be removed once Edition 2024 is released"
)
)]
#![doc = include_str!("../README.md")] #![doc = include_str!("../README.md")]
#![cfg_attr( #![cfg_attr(
any(docsrs, docsrs_dep), any(docsrs, docsrs_dep),

View File

@ -5,6 +5,7 @@ mod runner;
pub use entity_observer::ObservedBy; pub use entity_observer::ObservedBy;
pub use runner::*; pub use runner::*;
use variadics_please::all_tuples;
use crate::{ use crate::{
archetype::ArchetypeFlags, archetype::ArchetypeFlags,
@ -177,92 +178,108 @@ impl<'w, E, B: Bundle> DerefMut for Trigger<'w, E, B> {
/// will run. /// will run.
pub trait TriggerTargets { pub trait TriggerTargets {
/// The components the trigger should target. /// The components the trigger should target.
fn components(&self) -> &[ComponentId]; fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_;
/// The entities the trigger should target. /// The entities the trigger should target.
fn entities(&self) -> &[Entity]; fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_;
} }
impl TriggerTargets for () { impl<T: TriggerTargets + ?Sized> TriggerTargets for &T {
fn components(&self) -> &[ComponentId] { fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
&[] (**self).components()
} }
fn entities(&self) -> &[Entity] { fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
&[] (**self).entities()
} }
} }
impl TriggerTargets for Entity { impl TriggerTargets for Entity {
fn components(&self) -> &[ComponentId] { fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
&[] [].into_iter()
} }
fn entities(&self) -> &[Entity] { fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
core::slice::from_ref(self) core::iter::once(*self)
}
}
impl TriggerTargets for Vec<Entity> {
fn components(&self) -> &[ComponentId] {
&[]
}
fn entities(&self) -> &[Entity] {
self.as_slice()
}
}
impl<const N: usize> TriggerTargets for [Entity; N] {
fn components(&self) -> &[ComponentId] {
&[]
}
fn entities(&self) -> &[Entity] {
self.as_slice()
} }
} }
impl TriggerTargets for ComponentId { impl TriggerTargets for ComponentId {
fn components(&self) -> &[ComponentId] { fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
core::slice::from_ref(self) core::iter::once(*self)
} }
fn entities(&self) -> &[Entity] { fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
&[] [].into_iter()
} }
} }
impl TriggerTargets for Vec<ComponentId> { impl<T: TriggerTargets> TriggerTargets for Vec<T> {
fn components(&self) -> &[ComponentId] { fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
self.as_slice() self.iter().flat_map(T::components)
} }
fn entities(&self) -> &[Entity] { fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
&[] self.iter().flat_map(T::entities)
} }
} }
impl<const N: usize> TriggerTargets for [ComponentId; N] { impl<const N: usize, T: TriggerTargets> TriggerTargets for [T; N] {
fn components(&self) -> &[ComponentId] { fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
self.as_slice() self.iter().flat_map(T::components)
} }
fn entities(&self) -> &[Entity] { fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
&[] self.iter().flat_map(T::entities)
} }
} }
impl TriggerTargets for &Vec<Entity> { impl<T: TriggerTargets> TriggerTargets for [T] {
fn components(&self) -> &[ComponentId] { fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
&[] self.iter().flat_map(T::components)
} }
fn entities(&self) -> &[Entity] { fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
self.as_slice() self.iter().flat_map(T::entities)
} }
} }
macro_rules! impl_trigger_targets_tuples {
($(#[$meta:meta])* $($trigger_targets: ident),*) => {
#[expect(clippy::allow_attributes, reason = "can't guarantee violation of non_snake_case")]
#[allow(non_snake_case, reason = "`all_tuples!()` generates non-snake-case variable names.")]
$(#[$meta])*
impl<$($trigger_targets: TriggerTargets),*> TriggerTargets for ($($trigger_targets,)*)
{
fn components(&self) -> impl Iterator<Item = ComponentId> + Clone + '_ {
let iter = [].into_iter();
let ($($trigger_targets,)*) = self;
$(
let iter = iter.chain($trigger_targets.components());
)*
iter
}
fn entities(&self) -> impl Iterator<Item = Entity> + Clone + '_ {
let iter = [].into_iter();
let ($($trigger_targets,)*) = self;
$(
let iter = iter.chain($trigger_targets.entities());
)*
iter
}
}
}
}
all_tuples!(
#[doc(fake_variadic)]
impl_trigger_targets_tuples,
0,
15,
T
);
/// A description of what an [`Observer`] observes. /// A description of what an [`Observer`] observes.
#[derive(Default, Clone)] #[derive(Default, Clone)]
pub struct ObserverDescriptor { pub struct ObserverDescriptor {
@ -673,7 +690,8 @@ impl World {
caller: MaybeLocation, caller: MaybeLocation,
) { ) {
let mut world = DeferredWorld::from(self); let mut world = DeferredWorld::from(self);
if targets.entities().is_empty() { let mut entity_targets = targets.entities().peekable();
if entity_targets.peek().is_none() {
// SAFETY: `event_data` is accessible as the type represented by `event_id` // SAFETY: `event_data` is accessible as the type represented by `event_id`
unsafe { unsafe {
world.trigger_observers_with_data::<_, E::Traversal>( world.trigger_observers_with_data::<_, E::Traversal>(
@ -686,12 +704,12 @@ impl World {
); );
}; };
} else { } else {
for target in targets.entities() { for target_entity in entity_targets {
// SAFETY: `event_data` is accessible as the type represented by `event_id` // SAFETY: `event_data` is accessible as the type represented by `event_id`
unsafe { unsafe {
world.trigger_observers_with_data::<_, E::Traversal>( world.trigger_observers_with_data::<_, E::Traversal>(
event_id, event_id,
*target, target_entity,
targets.components(), targets.components(),
event_data, event_data,
E::AUTO_PROPAGATE, E::AUTO_PROPAGATE,
@ -1115,11 +1133,10 @@ mod tests {
fn observer_despawn() { fn observer_despawn() {
let mut world = World::new(); let mut world = World::new();
let observer = world let system: fn(Trigger<OnAdd, A>) = |_| {
.add_observer(|_: Trigger<OnAdd, A>| { panic!("Observer triggered after being despawned.");
panic!("Observer triggered after being despawned.") };
}) let observer = world.add_observer(system).id();
.id();
world.despawn(observer); world.despawn(observer);
world.spawn(A).flush(); world.spawn(A).flush();
} }
@ -1136,11 +1153,11 @@ mod tests {
res.observed("remove_a"); res.observed("remove_a");
}); });
let observer = world let system: fn(Trigger<OnRemove, B>) = |_: Trigger<OnRemove, B>| {
.add_observer(|_: Trigger<OnRemove, B>| { panic!("Observer triggered after being despawned.");
panic!("Observer triggered after being despawned.") };
})
.flush(); let observer = world.add_observer(system).flush();
world.despawn(observer); world.despawn(observer);
world.despawn(entity); world.despawn(entity);
@ -1166,9 +1183,10 @@ mod tests {
let mut world = World::new(); let mut world = World::new();
world.init_resource::<Order>(); world.init_resource::<Order>();
world let system: fn(Trigger<EventA>) = |_| {
.spawn_empty() panic!("Trigger routed to non-targeted entity.");
.observe(|_: Trigger<EventA>| panic!("Trigger routed to non-targeted entity.")); };
world.spawn_empty().observe(system);
world.add_observer(move |obs: Trigger<EventA>, mut res: ResMut<Order>| { world.add_observer(move |obs: Trigger<EventA>, mut res: ResMut<Order>| {
assert_eq!(obs.target(), Entity::PLACEHOLDER); assert_eq!(obs.target(), Entity::PLACEHOLDER);
res.observed("event_a"); res.observed("event_a");
@ -1187,9 +1205,11 @@ mod tests {
let mut world = World::new(); let mut world = World::new();
world.init_resource::<Order>(); world.init_resource::<Order>();
world let system: fn(Trigger<EventA>) = |_| {
.spawn_empty() panic!("Trigger routed to non-targeted entity.");
.observe(|_: Trigger<EventA>| panic!("Trigger routed to non-targeted entity.")); };
world.spawn_empty().observe(system);
let entity = world let entity = world
.spawn_empty() .spawn_empty()
.observe(|_: Trigger<EventA>, mut res: ResMut<Order>| res.observed("a_1")) .observe(|_: Trigger<EventA>, mut res: ResMut<Order>| res.observed("a_1"))
@ -1207,6 +1227,119 @@ mod tests {
assert_eq!(vec!["a_2", "a_1"], world.resource::<Order>().0); assert_eq!(vec!["a_2", "a_1"], world.resource::<Order>().0);
} }
#[test]
fn observer_multiple_targets() {
#[derive(Resource, Default)]
struct R(i32);
let mut world = World::new();
let component_a = world.register_component::<A>();
let component_b = world.register_component::<B>();
world.init_resource::<R>();
// targets (entity_1, A)
let entity_1 = world
.spawn_empty()
.observe(|_: Trigger<EventA, A>, mut res: ResMut<R>| res.0 += 1)
.id();
// targets (entity_2, B)
let entity_2 = world
.spawn_empty()
.observe(|_: Trigger<EventA, B>, mut res: ResMut<R>| res.0 += 10)
.id();
// targets any entity or component
world.add_observer(|_: Trigger<EventA>, mut res: ResMut<R>| res.0 += 100);
// targets any entity, and components A or B
world.add_observer(|_: Trigger<EventA, (A, B)>, mut res: ResMut<R>| res.0 += 1000);
// test all tuples
world.add_observer(|_: Trigger<EventA, (A, B, (A, B))>, mut res: ResMut<R>| res.0 += 10000);
world.add_observer(
|_: Trigger<EventA, (A, B, (A, B), ((A, B), (A, B)))>, mut res: ResMut<R>| {
res.0 += 100000;
},
);
world.add_observer(
|_: Trigger<EventA, (A, B, (A, B), (B, A), (A, B, ((A, B), (B, A))))>,
mut res: ResMut<R>| res.0 += 1000000,
);
// WorldEntityMut does not automatically flush.
world.flush();
// trigger for an entity and a component
world.trigger_targets(EventA, (entity_1, component_a));
world.flush();
// only observer that doesn't trigger is the one only watching entity_2
assert_eq!(1111101, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger for both entities, but no components: trigger once per entity target
world.trigger_targets(EventA, (entity_1, entity_2));
world.flush();
// only the observer that doesn't require components triggers - once per entity
assert_eq!(200, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger for both components, but no entities: trigger once
world.trigger_targets(EventA, (component_a, component_b));
world.flush();
// all component observers trigger, entities are not observed
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger for both entities and both components: trigger once per entity target
// we only get 2222211 because a given observer can trigger only once per entity target
world.trigger_targets(EventA, ((component_a, component_b), (entity_1, entity_2)));
world.flush();
assert_eq!(2222211, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger to test complex tuples: (A, B, (A, B))
world.trigger_targets(
EventA,
(component_a, component_b, (component_a, component_b)),
);
world.flush();
// the duplicate components in the tuple don't cause multiple triggers
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger to test complex tuples: (A, B, (A, B), ((A, B), (A, B)))
world.trigger_targets(
EventA,
(
component_a,
component_b,
(component_a, component_b),
((component_a, component_b), (component_a, component_b)),
),
);
world.flush();
// the duplicate components in the tuple don't cause multiple triggers
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
// trigger to test the most complex tuple: (A, B, (A, B), (B, A), (A, B, ((A, B), (B, A))))
world.trigger_targets(
EventA,
(
component_a,
component_b,
(component_a, component_b),
(component_b, component_a),
(
component_a,
component_b,
((component_a, component_b), (component_b, component_a)),
),
),
);
world.flush();
// the duplicate components in the tuple don't cause multiple triggers
assert_eq!(1111100, world.resource::<R>().0);
world.resource_mut::<R>().0 = 0;
}
#[test] #[test]
fn observer_dynamic_component() { fn observer_dynamic_component() {
let mut world = World::new(); let mut world = World::new();

View File

@ -66,9 +66,7 @@ use variadics_please::all_tuples;
/// # bevy_ecs::system::assert_is_system(my_system); /// # bevy_ecs::system::assert_is_system(my_system);
/// ``` /// ```
/// ///
/// [`matches_component_set`]: Self::matches_component_set
/// [`Query`]: crate::system::Query /// [`Query`]: crate::system::Query
/// [`State`]: Self::State
/// ///
/// # Safety /// # Safety
/// ///

View File

@ -80,43 +80,111 @@ impl ScheduleBuildPass for AutoInsertApplyDeferredPass {
let mut sync_point_graph = dependency_flattened.clone(); let mut sync_point_graph = dependency_flattened.clone();
let topo = graph.topsort_graph(dependency_flattened, ReportCycles::Dependency)?; let topo = graph.topsort_graph(dependency_flattened, ReportCycles::Dependency)?;
fn set_has_conditions(graph: &ScheduleGraph, node: NodeId) -> bool {
!graph.set_conditions_at(node).is_empty()
|| graph
.hierarchy()
.graph()
.edges_directed(node, Direction::Incoming)
.any(|(parent, _)| set_has_conditions(graph, parent))
}
fn system_has_conditions(graph: &ScheduleGraph, node: NodeId) -> bool {
assert!(node.is_system());
!graph.system_conditions[node.index()].is_empty()
|| graph
.hierarchy()
.graph()
.edges_directed(node, Direction::Incoming)
.any(|(parent, _)| set_has_conditions(graph, parent))
}
let mut system_has_conditions_cache = HashMap::default();
fn is_valid_explicit_sync_point(
graph: &ScheduleGraph,
system: NodeId,
system_has_conditions_cache: &mut HashMap<usize, bool>,
) -> bool {
let index = system.index();
is_apply_deferred(graph.systems[index].get().unwrap())
&& !*system_has_conditions_cache
.entry(index)
.or_insert_with(|| system_has_conditions(graph, system))
}
// calculate the number of sync points each sync point is from the beginning of the graph // calculate the number of sync points each sync point is from the beginning of the graph
// use the same sync point if the distance is the same let mut distances: HashMap<usize, u32> =
let mut distances: HashMap<usize, Option<u32>> =
HashMap::with_capacity_and_hasher(topo.len(), Default::default()); HashMap::with_capacity_and_hasher(topo.len(), Default::default());
// Keep track of any explicit sync nodes for a specific distance.
let mut distance_to_explicit_sync_node: HashMap<u32, NodeId> = HashMap::default();
for node in &topo { for node in &topo {
let add_sync_after = graph.systems[node.index()].get().unwrap().has_deferred(); let node_system = graph.systems[node.index()].get().unwrap();
let node_needs_sync =
if is_valid_explicit_sync_point(graph, *node, &mut system_has_conditions_cache) {
distance_to_explicit_sync_node.insert(
distances.get(&node.index()).copied().unwrap_or_default(),
*node,
);
// This node just did a sync, so the only reason to do another sync is if one was
// explicitly scheduled afterwards.
false
} else {
node_system.has_deferred()
};
for target in dependency_flattened.neighbors_directed(*node, Direction::Outgoing) { for target in dependency_flattened.neighbors_directed(*node, Direction::Outgoing) {
let add_sync_on_edge = add_sync_after let edge_needs_sync = node_needs_sync
&& !is_apply_deferred(graph.systems[target.index()].get().unwrap()) && !self.no_sync_edges.contains(&(*node, target))
&& !self.no_sync_edges.contains(&(*node, target)); || is_valid_explicit_sync_point(
graph,
target,
&mut system_has_conditions_cache,
);
let weight = if add_sync_on_edge { 1 } else { 0 }; let weight = if edge_needs_sync { 1 } else { 0 };
// Use whichever distance is larger, either the current distance, or the distance to
// the parent plus the weight.
let distance = distances let distance = distances
.get(&target.index()) .get(&target.index())
.unwrap_or(&None) .copied()
.or(Some(0)) .unwrap_or_default()
.map(|distance| { .max(distances.get(&node.index()).copied().unwrap_or_default() + weight);
distance.max(
distances.get(&node.index()).unwrap_or(&None).unwrap_or(0) + weight,
)
});
distances.insert(target.index(), distance); distances.insert(target.index(), distance);
}
}
// Find any edges which have a different number of sync points between them and make sure
// there is a sync point between them.
for node in &topo {
let node_distance = distances.get(&node.index()).copied().unwrap_or_default();
for target in dependency_flattened.neighbors_directed(*node, Direction::Outgoing) {
let target_distance = distances.get(&target.index()).copied().unwrap_or_default();
if node_distance == target_distance {
// These nodes are the same distance, so they don't need an edge between them.
continue;
}
if is_apply_deferred(graph.systems[target.index()].get().unwrap()) {
// We don't need to insert a sync point since ApplyDeferred is a sync point
// already!
continue;
}
let sync_point = distance_to_explicit_sync_node
.get(&target_distance)
.copied()
.unwrap_or_else(|| self.get_sync_point(graph, target_distance));
if add_sync_on_edge {
let sync_point =
self.get_sync_point(graph, distances[&target.index()].unwrap());
sync_point_graph.add_edge(*node, sync_point); sync_point_graph.add_edge(*node, sync_point);
sync_point_graph.add_edge(sync_point, target); sync_point_graph.add_edge(sync_point, target);
// edge is now redundant
sync_point_graph.remove_edge(*node, target); sync_point_graph.remove_edge(*node, target);
} }
} }
}
*dependency_flattened = sync_point_graph; *dependency_flattened = sync_point_graph;
Ok(()) Ok(())

View File

@ -276,7 +276,7 @@ pub fn simple_cycles_in_component(graph: &DiGraph, scc: &[NodeId]) -> Vec<Vec<No
stack.clear(); stack.clear();
stack.push((root, subgraph.neighbors(root))); stack.push((root, subgraph.neighbors(root)));
while !stack.is_empty() { while !stack.is_empty() {
let (ref node, successors) = stack.last_mut().unwrap(); let &mut (ref node, ref mut successors) = stack.last_mut().unwrap();
if let Some(next) = successors.next() { if let Some(next) = successors.next() {
if next == root { if next == root {
// found a cycle // found a cycle

View File

@ -1192,7 +1192,7 @@ mod tests {
let mut schedule = Schedule::new(TestSchedule); let mut schedule = Schedule::new(TestSchedule);
schedule schedule
.set_executor_kind($executor) .set_executor_kind($executor)
.add_systems(|| panic!("Executor ignored Stepping")); .add_systems(|| -> () { panic!("Executor ignored Stepping") });
// Add our schedule to stepping & and enable stepping; this should // Add our schedule to stepping & and enable stepping; this should
// prevent any systems in the schedule from running // prevent any systems in the schedule from running

View File

@ -758,6 +758,26 @@ impl ScheduleGraph {
.unwrap() .unwrap()
} }
/// Returns the conditions for the set at the given [`NodeId`], if it exists.
pub fn get_set_conditions_at(&self, id: NodeId) -> Option<&[BoxedCondition]> {
if !id.is_set() {
return None;
}
self.system_set_conditions
.get(id.index())
.map(Vec::as_slice)
}
/// Returns the conditions for the set at the given [`NodeId`].
///
/// Panics if it doesn't exist.
#[track_caller]
pub fn set_conditions_at(&self, id: NodeId) -> &[BoxedCondition] {
self.get_set_conditions_at(id)
.ok_or_else(|| format!("set with id {id:?} does not exist in this Schedule"))
.unwrap()
}
/// Returns an iterator over all systems in this schedule, along with the conditions for each system. /// Returns an iterator over all systems in this schedule, along with the conditions for each system.
pub fn systems(&self) -> impl Iterator<Item = (NodeId, &ScheduleSystem, &[BoxedCondition])> { pub fn systems(&self) -> impl Iterator<Item = (NodeId, &ScheduleSystem, &[BoxedCondition])> {
self.systems self.systems
@ -2036,7 +2056,7 @@ mod tests {
use bevy_ecs_macros::ScheduleLabel; use bevy_ecs_macros::ScheduleLabel;
use crate::{ use crate::{
prelude::{Res, Resource}, prelude::{ApplyDeferred, Res, Resource},
schedule::{ schedule::{
tests::ResMut, IntoSystemConfigs, IntoSystemSetConfigs, Schedule, tests::ResMut, IntoSystemConfigs, IntoSystemSetConfigs, Schedule,
ScheduleBuildSettings, SystemSet, ScheduleBuildSettings, SystemSet,
@ -2062,12 +2082,12 @@ mod tests {
let mut world = World::new(); let mut world = World::new();
let mut schedule = Schedule::default(); let mut schedule = Schedule::default();
let system: fn() = || {
panic!("This system must not run");
};
schedule.configure_sets(Set.run_if(|| false)); schedule.configure_sets(Set.run_if(|| false));
schedule.add_systems( schedule.add_systems(system.ambiguous_with(|| ()).in_set(Set));
(|| panic!("This system must not run"))
.ambiguous_with(|| ())
.in_set(Set),
);
schedule.run(&mut world); schedule.run(&mut world);
} }
@ -2088,6 +2108,108 @@ mod tests {
assert_eq!(schedule.executable.systems.len(), 3); assert_eq!(schedule.executable.systems.len(), 3);
} }
#[test]
fn explicit_sync_point_used_as_auto_sync_point() {
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred, || {}).chain());
schedule.run(&mut world);
// No sync point was inserted, since we can reuse the explicit sync point.
assert_eq!(schedule.executable.systems.len(), 5);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point() {
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred.run_if(|| false), || {}).chain());
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point_condition_on_chain() {
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred, || {}).chain().run_if(|| false));
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point_condition_on_system_set() {
#[derive(SystemSet, Debug, Clone, PartialEq, Eq, Hash)]
struct Set;
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.configure_sets(Set.run_if(|| false));
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred.in_set(Set), || {}).chain());
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test]
fn conditional_explicit_sync_point_not_used_as_auto_sync_point_condition_on_nested_system_set()
{
#[derive(SystemSet, Debug, Clone, PartialEq, Eq, Hash)]
struct Set1;
#[derive(SystemSet, Debug, Clone, PartialEq, Eq, Hash)]
struct Set2;
let mut schedule = Schedule::default();
let mut world = World::default();
schedule.configure_sets(Set2.run_if(|| false));
schedule.configure_sets(Set1.in_set(Set2));
schedule.add_systems(
(
|mut commands: Commands| commands.insert_resource(Resource1),
|_: Res<Resource1>| {},
)
.chain(),
);
schedule.add_systems((|| {}, ApplyDeferred, || {}).chain().in_set(Set1));
schedule.run(&mut world);
// A sync point was inserted, since the explicit sync point is not always run.
assert_eq!(schedule.executable.systems.len(), 6);
}
#[test] #[test]
fn merges_sync_points_into_one() { fn merges_sync_points_into_one() {
let mut schedule = Schedule::default(); let mut schedule = Schedule::default();

View File

@ -1348,7 +1348,9 @@ mod tests {
// //
// first system will be configured as `run_if(|| false)`, so it can // first system will be configured as `run_if(|| false)`, so it can
// just panic if called // just panic if called
let first_system = move || panic!("first_system should not be run"); let first_system: fn() = move || {
panic!("first_system should not be run");
};
// The second system, we need to know when it has been called, so we'll // The second system, we need to know when it has been called, so we'll
// add a resource for tracking if it has been run. The system will // add a resource for tracking if it has been run. The system will

View File

@ -256,7 +256,7 @@ impl BlobArray {
new_capacity: NonZeroUsize, new_capacity: NonZeroUsize,
) { ) {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
debug_assert_eq!(self.capacity, current_capacity.into()); debug_assert_eq!(self.capacity, current_capacity.get());
if !self.is_zst() { if !self.is_zst() {
// SAFETY: `new_capacity` can't overflow usize // SAFETY: `new_capacity` can't overflow usize
let new_layout = let new_layout =

View File

@ -722,10 +722,10 @@ mod tests {
assert_eq!(sets.len(), 0); assert_eq!(sets.len(), 0);
assert!(sets.is_empty()); assert!(sets.is_empty());
init_component::<TestComponent1>(&mut sets, 1); register_component::<TestComponent1>(&mut sets, 1);
assert_eq!(sets.len(), 1); assert_eq!(sets.len(), 1);
init_component::<TestComponent2>(&mut sets, 2); register_component::<TestComponent2>(&mut sets, 2);
assert_eq!(sets.len(), 2); assert_eq!(sets.len(), 2);
// check its shape by iter // check its shape by iter
@ -739,7 +739,7 @@ mod tests {
vec![(ComponentId::new(1), 0), (ComponentId::new(2), 0),] vec![(ComponentId::new(1), 0), (ComponentId::new(2), 0),]
); );
fn init_component<T: Component>(sets: &mut SparseSets, id: usize) { fn register_component<T: Component>(sets: &mut SparseSets, id: usize) {
let descriptor = ComponentDescriptor::new::<T>(); let descriptor = ComponentDescriptor::new::<T>();
let id = ComponentId::new(id); let id = ComponentId::new(id);
let info = ComponentInfo::new(id, descriptor); let info = ComponentInfo::new(id, descriptor);

View File

@ -87,7 +87,7 @@ impl<T> ThinArrayPtr<T> {
/// - The caller should update their saved `capacity` value to reflect the fact that it was changed /// - The caller should update their saved `capacity` value to reflect the fact that it was changed
pub unsafe fn realloc(&mut self, current_capacity: NonZeroUsize, new_capacity: NonZeroUsize) { pub unsafe fn realloc(&mut self, current_capacity: NonZeroUsize, new_capacity: NonZeroUsize) {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
assert_eq!(self.capacity, current_capacity.into()); assert_eq!(self.capacity, current_capacity.get());
self.set_capacity(new_capacity.get()); self.set_capacity(new_capacity.get());
if size_of::<T>() != 0 { if size_of::<T>() != 0 {
let new_layout = let new_layout =

View File

@ -79,7 +79,11 @@ pub trait HandleError<Out = ()> {
} }
} }
impl<C: Command<Result<T, E>>, T, E: Into<Error>> HandleError<Result<T, E>> for C { impl<C, T, E> HandleError<Result<T, E>> for C
where
C: Command<Result<T, E>>,
E: Into<Error>,
{
fn handle_error_with(self, error_handler: fn(&mut World, Error)) -> impl Command { fn handle_error_with(self, error_handler: fn(&mut World, Error)) -> impl Command {
move |world: &mut World| match self.apply(world) { move |world: &mut World| match self.apply(world) {
Ok(_) => {} Ok(_) => {}
@ -88,7 +92,10 @@ impl<C: Command<Result<T, E>>, T, E: Into<Error>> HandleError<Result<T, E>> for
} }
} }
impl<C: Command> HandleError for C { impl<C> HandleError for C
where
C: Command,
{
#[inline] #[inline]
fn handle_error_with(self, _error_handler: fn(&mut World, Error)) -> impl Command { fn handle_error_with(self, _error_handler: fn(&mut World, Error)) -> impl Command {
self self

View File

@ -5,6 +5,7 @@
//! [`EntityCommands`](crate::system::EntityCommands). //! [`EntityCommands`](crate::system::EntityCommands).
use alloc::vec::Vec; use alloc::vec::Vec;
use core::fmt;
use log::info; use log::info;
use crate::{ use crate::{
@ -79,8 +80,7 @@ use bevy_ptr::OwningPtr;
/// } /// }
/// ``` /// ```
pub trait EntityCommand<Out = ()>: Send + 'static { pub trait EntityCommand<Out = ()>: Send + 'static {
/// Executes this command for the given [`Entity`] and /// Executes this command for the given [`Entity`].
/// returns a [`Result`] for error handling.
fn apply(self, entity: EntityWorldMut) -> Out; fn apply(self, entity: EntityWorldMut) -> Out;
} }
/// Passes in a specific entity to an [`EntityCommand`], resulting in a [`Command`] that /// Passes in a specific entity to an [`EntityCommand`], resulting in a [`Command`] that
@ -96,7 +96,10 @@ pub trait CommandWithEntity<Out> {
fn with_entity(self, entity: Entity) -> impl Command<Out> + HandleError<Out>; fn with_entity(self, entity: Entity) -> impl Command<Out> + HandleError<Out>;
} }
impl<C: EntityCommand> CommandWithEntity<Result<(), EntityMutableFetchError>> for C { impl<C> CommandWithEntity<Result<(), EntityMutableFetchError>> for C
where
C: EntityCommand,
{
fn with_entity( fn with_entity(
self, self,
entity: Entity, entity: Entity,
@ -110,11 +113,10 @@ impl<C: EntityCommand> CommandWithEntity<Result<(), EntityMutableFetchError>> fo
} }
} }
impl< impl<C, T, Err> CommandWithEntity<Result<T, EntityCommandError<Err>>> for C
where
C: EntityCommand<Result<T, Err>>, C: EntityCommand<Result<T, Err>>,
T, Err: fmt::Debug + fmt::Display + Send + Sync + 'static,
Err: core::fmt::Debug + core::fmt::Display + Send + Sync + 'static,
> CommandWithEntity<Result<T, EntityCommandError<Err>>> for C
{ {
fn with_entity( fn with_entity(
self, self,
@ -245,8 +247,9 @@ pub fn retain<T: Bundle>() -> impl EntityCommand {
/// ///
/// # Note /// # Note
/// ///
/// This will also despawn any [`Children`](crate::hierarchy::Children) entities, and any other [`RelationshipTarget`](crate::relationship::RelationshipTarget) that is configured /// This will also despawn any [`Children`](crate::hierarchy::Children) entities,
/// to despawn descendants. This results in "recursive despawn" behavior. /// and any other [`RelationshipTarget`](crate::relationship::RelationshipTarget) that is configured to despawn descendants.
/// This results in "recursive despawn" behavior.
#[track_caller] #[track_caller]
pub fn despawn() -> impl EntityCommand { pub fn despawn() -> impl EntityCommand {
let caller = MaybeLocation::caller(); let caller = MaybeLocation::caller();

View File

@ -82,21 +82,25 @@ use crate::{
/// // NOTE: type inference fails here, so annotations are required on the closure. /// // NOTE: type inference fails here, so annotations are required on the closure.
/// commands.queue(|w: &mut World| { /// commands.queue(|w: &mut World| {
/// // Mutate the world however you want... /// // Mutate the world however you want...
/// # todo!();
/// }); /// });
/// # } /// # }
/// ``` /// ```
/// ///
/// # Error handling /// # Error handling
/// ///
/// Commands can return a [`Result`](crate::result::Result), which can be passed to /// A [`Command`] can return a [`Result`](crate::result::Result),
/// an error handler. Error handlers are functions/closures of the form /// which will be passed to an error handler if the `Result` is an error.
/// `fn(&mut World, CommandError)`.
/// ///
/// The default error handler panics. It can be configured by enabling the `configurable_error_handler` /// Error handlers are functions/closures of the form `fn(&mut World, Error)`.
/// cargo feature, then setting the `GLOBAL_ERROR_HANDLER`. /// They are granted exclusive access to the [`World`], which enables them to
/// respond to the error in whatever way is necessary.
/// ///
/// Alternatively, you can customize the error handler for a specific command by calling [`Commands::queue_handled`]. /// The [default error handler](error_handler::default) panics.
/// It can be configured by enabling the `configurable_error_handler` cargo feature,
/// then setting the `GLOBAL_ERROR_HANDLER`.
///
/// Alternatively, you can customize the error handler for a specific command
/// by calling [`Commands::queue_handled`].
/// ///
/// The [`error_handler`] module provides some simple error handlers for convenience. /// The [`error_handler`] module provides some simple error handlers for convenience.
/// ///
@ -546,7 +550,8 @@ impl<'w, 's> Commands<'w, 's> {
/// Pushes a generic [`Command`] to the command queue. /// Pushes a generic [`Command`] to the command queue.
/// ///
/// If the [`Command`] returns a [`Result`], it will be handled using the [default error handler](error_handler::default). /// If the [`Command`] returns a [`Result`],
/// it will be handled using the [default error handler](error_handler::default).
/// ///
/// To use a custom error handler, see [`Commands::queue_handled`]. /// To use a custom error handler, see [`Commands::queue_handled`].
/// ///
@ -589,8 +594,11 @@ impl<'w, 's> Commands<'w, 's> {
pub fn queue<C: Command<T> + HandleError<T>, T>(&mut self, command: C) { pub fn queue<C: Command<T> + HandleError<T>, T>(&mut self, command: C) {
self.queue_internal(command.handle_error()); self.queue_internal(command.handle_error());
} }
/// Pushes a generic [`Command`] to the command queue. If the command returns a [`Result`] the given
/// `error_handler` will be used to handle error cases. /// Pushes a generic [`Command`] to the command queue.
///
/// If the [`Command`] returns a [`Result`],
/// the given `error_handler` will be used to handle error cases.
/// ///
/// To implicitly use the default error handler, see [`Commands::queue`]. /// To implicitly use the default error handler, see [`Commands::queue`].
/// ///
@ -1137,7 +1145,7 @@ impl<'w, 's> Commands<'w, 's> {
/// Most [`Commands`] (and thereby [`EntityCommands`]) are deferred: when you call the command, /// Most [`Commands`] (and thereby [`EntityCommands`]) are deferred: when you call the command,
/// if it requires mutable access to the [`World`] (that is, if it removes, adds, or changes something), /// if it requires mutable access to the [`World`] (that is, if it removes, adds, or changes something),
/// it's not executed immediately. Instead, the command is added to a "command queue." /// it's not executed immediately. Instead, the command is added to a "command queue."
/// The command queue is applied between [`Schedules`](bevy_ecs::schedule::Schedule), one by one, /// The command queue is applied between [`Schedules`](crate::schedule::Schedule), one by one,
/// so that each command can have exclusive access to the World. /// so that each command can have exclusive access to the World.
/// ///
/// # Fallible /// # Fallible
@ -1148,14 +1156,19 @@ impl<'w, 's> Commands<'w, 's> {
/// ///
/// # Error handling /// # Error handling
/// ///
/// [`EntityCommands`] can return a [`Result`](crate::result::Result), which can be passed to /// An [`EntityCommand`] can return a [`Result`](crate::result::Result),
/// an error handler. Error handlers are functions/closures of the form /// which will be passed to an error handler if the `Result` is an error.
/// `fn(&mut World, CommandError)`.
/// ///
/// The default error handler panics. It can be configured by enabling the `configurable_error_handler` /// Error handlers are functions/closures of the form `fn(&mut World, Error)`.
/// cargo feature, then setting the `GLOBAL_ERROR_HANDLER`. /// They are granted exclusive access to the [`World`], which enables them to
/// respond to the error in whatever way is necessary.
/// ///
/// Alternatively, you can customize the error handler for a specific command by calling [`EntityCommands::queue_handled`]. /// The [default error handler](error_handler::default) panics.
/// It can be configured by enabling the `configurable_error_handler` cargo feature,
/// then setting the `GLOBAL_ERROR_HANDLER`.
///
/// Alternatively, you can customize the error handler for a specific command
/// by calling [`EntityCommands::queue_handled`].
/// ///
/// The [`error_handler`] module provides some simple error handlers for convenience. /// The [`error_handler`] module provides some simple error handlers for convenience.
pub struct EntityCommands<'a> { pub struct EntityCommands<'a> {
@ -1754,7 +1767,8 @@ impl<'a> EntityCommands<'a> {
/// Pushes an [`EntityCommand`] to the queue, which will get executed for the current [`Entity`]. /// Pushes an [`EntityCommand`] to the queue, which will get executed for the current [`Entity`].
/// ///
/// If the [`EntityCommand`] returns a [`Result`], it will be handled using the [default error handler](error_handler::default). /// If the [`EntityCommand`] returns a [`Result`],
/// it will be handled using the [default error handler](error_handler::default).
/// ///
/// To use a custom error handler, see [`EntityCommands::queue_handled`]. /// To use a custom error handler, see [`EntityCommands::queue_handled`].
/// ///
@ -1788,7 +1802,9 @@ impl<'a> EntityCommands<'a> {
} }
/// Pushes an [`EntityCommand`] to the queue, which will get executed for the current [`Entity`]. /// Pushes an [`EntityCommand`] to the queue, which will get executed for the current [`Entity`].
/// If the command returns a [`Result`] the given `error_handler` will be used to handle error cases. ///
/// If the [`EntityCommand`] returns a [`Result`],
/// the given `error_handler` will be used to handle error cases.
/// ///
/// To implicitly use the default error handler, see [`EntityCommands::queue`]. /// To implicitly use the default error handler, see [`EntityCommands::queue`].
/// ///

View File

@ -1648,7 +1648,10 @@ mod tests {
#[should_panic] #[should_panic]
fn panic_inside_system() { fn panic_inside_system() {
let mut world = World::new(); let mut world = World::new();
run_system(&mut world, || panic!("this system panics")); let system: fn() = || {
panic!("this system panics");
};
run_system(&mut world, system);
} }
#[test] #[test]

View File

@ -685,7 +685,7 @@ unsafe impl<'w, 's, D: ReadOnlyQueryData + 'static, F: QueryFilter + 'static> Re
/// // ... /// // ...
/// # let _event = event; /// # let _event = event;
/// } /// }
/// set.p1().send(MyEvent::new()); /// set.p1().write(MyEvent::new());
/// ///
/// let entities = set.p2().entities(); /// let entities = set.p2().entities();
/// // ... /// // ...

View File

@ -674,7 +674,7 @@ impl<'w> DeferredWorld<'w> {
&mut self, &mut self,
event: ComponentId, event: ComponentId,
mut target: Entity, mut target: Entity,
components: &[ComponentId], components: impl Iterator<Item = ComponentId> + Clone,
data: &mut E, data: &mut E,
mut propagate: bool, mut propagate: bool,
caller: MaybeLocation, caller: MaybeLocation,
@ -686,7 +686,7 @@ impl<'w> DeferredWorld<'w> {
self.reborrow(), self.reborrow(),
event, event,
target, target,
components.iter().copied(), components.clone(),
data, data,
&mut propagate, &mut propagate,
caller, caller,

View File

@ -2653,34 +2653,29 @@ unsafe fn trigger_on_replace_and_on_remove_hooks_and_observers(
bundle_info: &BundleInfo, bundle_info: &BundleInfo,
caller: MaybeLocation, caller: MaybeLocation,
) { ) {
let bundle_components_in_archetype = || {
bundle_info
.iter_explicit_components()
.filter(|component_id| archetype.contains(*component_id))
};
if archetype.has_replace_observer() { if archetype.has_replace_observer() {
deferred_world.trigger_observers( deferred_world.trigger_observers(
ON_REPLACE, ON_REPLACE,
entity, entity,
bundle_info.iter_explicit_components(), bundle_components_in_archetype(),
caller, caller,
); );
} }
deferred_world.trigger_on_replace( deferred_world.trigger_on_replace(archetype, entity, bundle_components_in_archetype(), caller);
archetype,
entity,
bundle_info.iter_explicit_components(),
caller,
);
if archetype.has_remove_observer() { if archetype.has_remove_observer() {
deferred_world.trigger_observers( deferred_world.trigger_observers(
ON_REMOVE, ON_REMOVE,
entity, entity,
bundle_info.iter_explicit_components(), bundle_components_in_archetype(),
caller, caller,
); );
} }
deferred_world.trigger_on_remove( deferred_world.trigger_on_remove(archetype, entity, bundle_components_in_archetype(), caller);
archetype,
entity,
bundle_info.iter_explicit_components(),
caller,
);
} }
/// A view into a single entity and component in a world, which may either be vacant or occupied. /// A view into a single entity and component in a world, which may either be vacant or occupied.
@ -5900,4 +5895,42 @@ mod tests {
assert_eq!(archetype_pointer_before, archetype_pointer_after); assert_eq!(archetype_pointer_before, archetype_pointer_after);
} }
#[test]
fn bundle_remove_only_triggers_for_present_components() {
let mut world = World::default();
#[derive(Component)]
struct A;
#[derive(Component)]
struct B;
#[derive(Resource, PartialEq, Eq, Debug)]
struct Tracker {
a: bool,
b: bool,
}
world.insert_resource(Tracker { a: false, b: false });
let entity = world.spawn(A).id();
world.add_observer(|_: Trigger<OnRemove, A>, mut tracker: ResMut<Tracker>| {
tracker.a = true;
});
world.add_observer(|_: Trigger<OnRemove, B>, mut tracker: ResMut<Tracker>| {
tracker.b = true;
});
world.entity_mut(entity).remove::<(A, B)>();
assert_eq!(
world.resource::<Tracker>(),
&Tracker {
a: true,
// The entity didn't have a B component, so it should not have been triggered.
b: false,
}
);
}
} }

View File

@ -247,6 +247,10 @@ impl World {
} }
/// Registers a new [`Component`] type and returns the [`ComponentId`] created for it. /// Registers a new [`Component`] type and returns the [`ComponentId`] created for it.
///
/// # Usage Notes
/// In most cases, you don't need to call this method directly since component registration
/// happens automatically during system initialization.
pub fn register_component<T: Component>(&mut self) -> ComponentId { pub fn register_component<T: Component>(&mut self) -> ComponentId {
self.components.register_component::<T>() self.components.register_component::<T>()
} }

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_encase_derive" name = "bevy_encase_derive"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Bevy derive macro for encase" description = "Bevy derive macro for encase"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_gilrs" name = "bevy_gilrs"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Gamepad system made using Gilrs for Bevy Engine" description = "Gamepad system made using Gilrs for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -26,7 +26,7 @@ pub fn gilrs_event_startup_system(
gamepads.id_to_entity.insert(id, entity); gamepads.id_to_entity.insert(id, entity);
gamepads.entity_to_id.insert(entity, id); gamepads.entity_to_id.insert(entity, id);
events.send(GamepadConnectionEvent { events.write(GamepadConnectionEvent {
gamepad: entity, gamepad: entity,
connection: GamepadConnection::Connected { connection: GamepadConnection::Connected {
name: gamepad.name().to_string(), name: gamepad.name().to_string(),
@ -69,8 +69,8 @@ pub fn gilrs_event_system(
}, },
); );
events.send(event.clone().into()); events.write(event.clone().into());
connection_events.send(event); connection_events.write(event);
} }
EventType::Disconnected => { EventType::Disconnected => {
let gamepad = gamepads let gamepad = gamepads
@ -79,8 +79,8 @@ pub fn gilrs_event_system(
.copied() .copied()
.expect("mapping should exist from connection"); .expect("mapping should exist from connection");
let event = GamepadConnectionEvent::new(gamepad, GamepadConnection::Disconnected); let event = GamepadConnectionEvent::new(gamepad, GamepadConnection::Disconnected);
events.send(event.clone().into()); events.write(event.clone().into());
connection_events.send(event); connection_events.write(event);
} }
EventType::ButtonChanged(gilrs_button, raw_value, _) => { EventType::ButtonChanged(gilrs_button, raw_value, _) => {
let Some(button) = convert_button(gilrs_button) else { let Some(button) = convert_button(gilrs_button) else {
@ -91,8 +91,8 @@ pub fn gilrs_event_system(
.get(&gilrs_event.id) .get(&gilrs_event.id)
.copied() .copied()
.expect("mapping should exist from connection"); .expect("mapping should exist from connection");
events.send(RawGamepadButtonChangedEvent::new(gamepad, button, raw_value).into()); events.write(RawGamepadButtonChangedEvent::new(gamepad, button, raw_value).into());
button_events.send(RawGamepadButtonChangedEvent::new( button_events.write(RawGamepadButtonChangedEvent::new(
gamepad, button, raw_value, gamepad, button, raw_value,
)); ));
} }
@ -105,8 +105,8 @@ pub fn gilrs_event_system(
.get(&gilrs_event.id) .get(&gilrs_event.id)
.copied() .copied()
.expect("mapping should exist from connection"); .expect("mapping should exist from connection");
events.send(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value).into()); events.write(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value).into());
axis_event.send(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value)); axis_event.write(RawGamepadAxisChangedEvent::new(gamepad, axis, raw_value));
} }
_ => (), _ => (),
}; };

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_gizmos" name = "bevy_gizmos"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides gizmos for Bevy Engine" description = "Provides gizmos for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_gizmos_macros" name = "bevy_gizmos_macros"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Derive implementations for bevy_gizmos" description = "Derive implementations for bevy_gizmos"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -820,8 +820,7 @@ where
let polymorphic_color: Color = color.into(); let polymorphic_color: Color = color.into();
let linear_color = LinearRgba::from(polymorphic_color); let linear_color = LinearRgba::from(polymorphic_color);
self.list_colors self.list_colors.extend(iter::repeat_n(linear_color, count));
.extend(iter::repeat(linear_color).take(count));
} }
#[inline] #[inline]

View File

@ -187,9 +187,8 @@ where
/// ///
/// - `isometry` defines the translation and rotation of the grid. /// - `isometry` defines the translation and rotation of the grid.
/// - the translation specifies the center of the grid /// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default /// - defines the orientation of the grid, by default we assume the grid is contained in a
/// we assume the grid is contained in a plane parallel /// plane parallel to the XY plane
/// to the XY plane
/// - `cell_count`: defines the amount of cells in the x and y axes /// - `cell_count`: defines the amount of cells in the x and y axes
/// - `spacing`: defines the distance between cells along the x and y axes /// - `spacing`: defines the distance between cells along the x and y axes
/// - `color`: color of the grid /// - `color`: color of the grid
@ -242,8 +241,7 @@ where
/// ///
/// - `isometry` defines the translation and rotation of the grid. /// - `isometry` defines the translation and rotation of the grid.
/// - the translation specifies the center of the grid /// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default /// - defines the orientation of the grid, by default we assume the grid is aligned with all axes
/// we assume the grid is aligned with all axes
/// - `cell_count`: defines the amount of cells in the x, y and z axes /// - `cell_count`: defines the amount of cells in the x, y and z axes
/// - `spacing`: defines the distance between cells along the x, y and z axes /// - `spacing`: defines the distance between cells along the x, y and z axes
/// - `color`: color of the grid /// - `color`: color of the grid
@ -296,8 +294,7 @@ where
/// ///
/// - `isometry` defines the translation and rotation of the grid. /// - `isometry` defines the translation and rotation of the grid.
/// - the translation specifies the center of the grid /// - the translation specifies the center of the grid
/// - defines the orientation of the grid, by default /// - defines the orientation of the grid, by default we assume the grid is aligned with all axes
/// we assume the grid is aligned with all axes
/// - `cell_count`: defines the amount of cells in the x and y axes /// - `cell_count`: defines the amount of cells in the x and y axes
/// - `spacing`: defines the distance between cells along the x and y axes /// - `spacing`: defines the distance between cells along the x and y axes
/// - `color`: color of the grid /// - `color`: color of the grid

View File

@ -7,6 +7,7 @@ use crate::{
use bevy_app::{App, Plugin}; use bevy_app::{App, Plugin};
use bevy_core_pipeline::{ use bevy_core_pipeline::{
core_3d::{Transparent3d, CORE_3D_DEPTH_FORMAT}, core_3d::{Transparent3d, CORE_3D_DEPTH_FORMAT},
oit::OrderIndependentTransparencySettings,
prepass::{DeferredPrepass, DepthPrepass, MotionVectorPrepass, NormalPrepass}, prepass::{DeferredPrepass, DepthPrepass, MotionVectorPrepass, NormalPrepass},
}; };
@ -301,6 +302,7 @@ fn queue_line_gizmos_3d(
Has<DepthPrepass>, Has<DepthPrepass>,
Has<MotionVectorPrepass>, Has<MotionVectorPrepass>,
Has<DeferredPrepass>, Has<DeferredPrepass>,
Has<OrderIndependentTransparencySettings>,
), ),
)>, )>,
) { ) {
@ -314,7 +316,7 @@ fn queue_line_gizmos_3d(
view, view,
msaa, msaa,
render_layers, render_layers,
(normal_prepass, depth_prepass, motion_vector_prepass, deferred_prepass), (normal_prepass, depth_prepass, motion_vector_prepass, deferred_prepass, oit),
) in &views ) in &views
{ {
let Some(transparent_phase) = transparent_render_phases.get_mut(&view.retained_view_entity) let Some(transparent_phase) = transparent_render_phases.get_mut(&view.retained_view_entity)
@ -343,6 +345,10 @@ fn queue_line_gizmos_3d(
view_key |= MeshPipelineKey::DEFERRED_PREPASS; view_key |= MeshPipelineKey::DEFERRED_PREPASS;
} }
if oit {
view_key |= MeshPipelineKey::OIT_ENABLED;
}
for (entity, main_entity, config) in &line_gizmos { for (entity, main_entity, config) in &line_gizmos {
if !config.render_layers.intersects(render_layers) { if !config.render_layers.intersects(render_layers) {
continue; continue;

View File

@ -239,9 +239,8 @@ where
/// ///
/// - `isometry` defines the translation and rotation of the rectangle. /// - `isometry` defines the translation and rotation of the rectangle.
/// - the translation specifies the center of the rectangle /// - the translation specifies the center of the rectangle
/// - defines orientation of the rectangle, by default we /// - defines orientation of the rectangle, by default we assume the rectangle is contained in
/// assume the rectangle is contained in a plane parallel /// a plane parallel to the XY plane.
/// to the XY plane.
/// - `size`: defines the size of the rectangle. This refers to the 'outer size', similar to a bounding box. /// - `size`: defines the size of the rectangle. This refers to the 'outer size', similar to a bounding box.
/// - `color`: color of the rectangle /// - `color`: color of the rectangle
/// ///
@ -294,8 +293,7 @@ where
/// ///
/// - `isometry` defines the translation and rotation of the rectangle. /// - `isometry` defines the translation and rotation of the rectangle.
/// - the translation specifies the center of the rectangle /// - the translation specifies the center of the rectangle
/// - defines orientation of the rectangle, by default we /// - defines orientation of the rectangle, by default we assume the rectangle aligned with all axes.
/// assume the rectangle aligned with all axes.
/// - `size`: defines the size of the rectangle. This refers to the 'outer size', similar to a bounding box. /// - `size`: defines the size of the rectangle. This refers to the 'outer size', similar to a bounding box.
/// - `color`: color of the rectangle /// - `color`: color of the rectangle
/// ///
@ -352,8 +350,7 @@ where
/// ///
/// - `isometry` defines the translation and rotation of the cuboid. /// - `isometry` defines the translation and rotation of the cuboid.
/// - the translation specifies the center of the cuboid /// - the translation specifies the center of the cuboid
/// - defines orientation of the cuboid, by default we /// - defines orientation of the cuboid, by default we assume the cuboid aligned with all axes.
/// assume the cuboid aligned with all axes.
/// - `size`: defines the size of the cuboid. This refers to the 'outer size', similar to a bounding box. /// - `size`: defines the size of the cuboid. This refers to the 'outer size', similar to a bounding box.
/// - `color`: color of the cuboid /// - `color`: color of the cuboid
/// ///

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_gltf" name = "bevy_gltf"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Bevy Engine GLTF loading" description = "Bevy Engine GLTF loading"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -517,10 +517,12 @@ async fn load_gltf<'a, 'b, 'c>(
); );
} }
} }
let handle = load_context.add_labeled_asset( let handle = load_context
.add_labeled_asset(
GltfAssetLabel::Animation(animation.index()).to_string(), GltfAssetLabel::Animation(animation.index()).to_string(),
animation_clip, animation_clip,
); )
.expect("animation indices are unique, so the label is unique");
if let Some(name) = animation.name() { if let Some(name) = animation.name() {
named_animations.insert(name.into(), handle.clone()); named_animations.insert(name.into(), handle.clone());
} }
@ -540,9 +542,9 @@ async fn load_gltf<'a, 'b, 'c>(
texture: ImageOrPath, texture: ImageOrPath,
) { ) {
let handle = match texture { let handle = match texture {
ImageOrPath::Image { label, image } => { ImageOrPath::Image { label, image } => load_context
load_context.add_labeled_asset(label.to_string(), image) .add_labeled_asset(label.to_string(), image)
} .expect("texture indices are unique, so the label is unique"),
ImageOrPath::Path { ImageOrPath::Path {
path, path,
is_srgb, is_srgb,
@ -696,7 +698,8 @@ async fn load_gltf<'a, 'b, 'c>(
RenderAssetUsages::default(), RenderAssetUsages::default(),
)?; )?;
let handle = load_context let handle = load_context
.add_labeled_asset(morph_targets_label.to_string(), morph_target_image.0); .add_labeled_asset(morph_targets_label.to_string(), morph_target_image.0)
.expect("morph target indices are unique, so the label is unique");
mesh.set_morph_targets(handle); mesh.set_morph_targets(handle);
let extras = gltf_mesh.extras().as_ref(); let extras = gltf_mesh.extras().as_ref();
@ -749,7 +752,9 @@ async fn load_gltf<'a, 'b, 'c>(
}); });
} }
let mesh_handle = load_context.add_labeled_asset(primitive_label.to_string(), mesh); let mesh_handle = load_context
.add_labeled_asset(primitive_label.to_string(), mesh)
.expect("primitive indices are unique, so the label is unique");
primitives.push(super::GltfPrimitive::new( primitives.push(super::GltfPrimitive::new(
&gltf_mesh, &gltf_mesh,
&primitive, &primitive,
@ -766,7 +771,9 @@ async fn load_gltf<'a, 'b, 'c>(
let mesh = let mesh =
super::GltfMesh::new(&gltf_mesh, primitives, get_gltf_extras(gltf_mesh.extras())); super::GltfMesh::new(&gltf_mesh, primitives, get_gltf_extras(gltf_mesh.extras()));
let handle = load_context.add_labeled_asset(mesh.asset_label().to_string(), mesh); let handle = load_context
.add_labeled_asset(mesh.asset_label().to_string(), mesh)
.expect("mesh indices are unique, so the label is unique");
if let Some(name) = gltf_mesh.name() { if let Some(name) = gltf_mesh.name() {
named_meshes.insert(name.into(), handle.clone()); named_meshes.insert(name.into(), handle.clone());
} }
@ -783,16 +790,18 @@ async fn load_gltf<'a, 'b, 'c>(
.map(|mat| Mat4::from_cols_array_2d(&mat)) .map(|mat| Mat4::from_cols_array_2d(&mat))
.collect(); .collect();
load_context.add_labeled_asset( load_context
.add_labeled_asset(
inverse_bind_matrices_label(&gltf_skin), inverse_bind_matrices_label(&gltf_skin),
SkinnedMeshInverseBindposes::from(local_to_bone_bind_matrices), SkinnedMeshInverseBindposes::from(local_to_bone_bind_matrices),
) )
.expect("inverse bind matrix indices are unique, so the label is unique")
}) })
.collect(); .collect();
let mut nodes = HashMap::<usize, Handle<GltfNode>>::default(); let mut nodes = HashMap::<usize, Handle<GltfNode>>::default();
let mut named_nodes = <HashMap<_, _>>::default(); let mut named_nodes = <HashMap<_, _>>::default();
let mut skins = vec![]; let mut skins = <HashMap<_, _>>::default();
let mut named_skins = <HashMap<_, _>>::default(); let mut named_skins = <HashMap<_, _>>::default();
// First, create the node handles. // First, create the node handles.
@ -808,6 +817,9 @@ async fn load_gltf<'a, 'b, 'c>(
// Now populate the nodes. // Now populate the nodes.
for node in gltf.nodes() { for node in gltf.nodes() {
let skin = node.skin().map(|skin| { let skin = node.skin().map(|skin| {
skins
.entry(skin.index())
.or_insert_with(|| {
let joints: Vec<_> = skin let joints: Vec<_> = skin
.joints() .joints()
.map(|joint| nodes.get(&joint.index()).unwrap().clone()) .map(|joint| nodes.get(&joint.index()).unwrap().clone())
@ -831,14 +843,17 @@ async fn load_gltf<'a, 'b, 'c>(
get_gltf_extras(skin.extras()), get_gltf_extras(skin.extras()),
); );
let handle = load_context.add_labeled_asset(skin_label(&skin), gltf_skin); let handle = load_context
.add_labeled_asset(skin_label(&skin), gltf_skin)
.expect("skin indices are unique, so the label is unique");
skins.push(handle.clone());
if let Some(name) = skin.name() { if let Some(name) = skin.name() {
named_skins.insert(name.into(), handle.clone()); named_skins.insert(name.into(), handle.clone());
} }
handle handle
})
.clone()
}); });
let children = node let children = node
@ -863,7 +878,9 @@ async fn load_gltf<'a, 'b, 'c>(
#[cfg(feature = "bevy_animation")] #[cfg(feature = "bevy_animation")]
let gltf_node = gltf_node.with_animation_root(animation_roots.contains(&node.index())); let gltf_node = gltf_node.with_animation_root(animation_roots.contains(&node.index()));
let handle = load_context.add_labeled_asset(gltf_node.asset_label().to_string(), gltf_node); let handle = load_context
.add_labeled_asset(gltf_node.asset_label().to_string(), gltf_node)
.expect("node indices are unique, so the label is unique");
nodes.insert(node.index(), handle.clone()); nodes.insert(node.index(), handle.clone());
if let Some(name) = node.name() { if let Some(name) = node.name() {
named_nodes.insert(name.into(), handle); named_nodes.insert(name.into(), handle);
@ -952,7 +969,9 @@ async fn load_gltf<'a, 'b, 'c>(
}); });
} }
let loaded_scene = scene_load_context.finish(Scene::new(world)); let loaded_scene = scene_load_context.finish(Scene::new(world));
let scene_handle = load_context.add_loaded_labeled_asset(scene_label(&scene), loaded_scene); let scene_handle = load_context
.add_loaded_labeled_asset(scene_label(&scene), loaded_scene)
.expect("scene indices are unique, so the label is unique");
if let Some(name) = scene.name() { if let Some(name) = scene.name() {
named_scenes.insert(name.into(), scene_handle.clone()); named_scenes.insert(name.into(), scene_handle.clone());
@ -969,7 +988,7 @@ async fn load_gltf<'a, 'b, 'c>(
named_scenes, named_scenes,
meshes, meshes,
named_meshes, named_meshes,
skins, skins: skins.into_values().collect(),
named_skins, named_skins,
materials, materials,
named_materials, named_materials,
@ -1119,7 +1138,8 @@ fn load_material(
is_scale_inverted: bool, is_scale_inverted: bool,
) -> Handle<StandardMaterial> { ) -> Handle<StandardMaterial> {
let material_label = material_label(material, is_scale_inverted); let material_label = material_label(material, is_scale_inverted);
load_context.labeled_asset_scope(material_label, |load_context| { load_context
.labeled_asset_scope(material_label, |load_context| {
let pbr = material.pbr_metallic_roughness(); let pbr = material.pbr_metallic_roughness();
// TODO: handle missing label handle errors here? // TODO: handle missing label handle errors here?
@ -1185,8 +1205,11 @@ fn load_material(
}); });
#[cfg(feature = "pbr_transmission_textures")] #[cfg(feature = "pbr_transmission_textures")]
let (specular_transmission, specular_transmission_channel, specular_transmission_texture) = let (
material specular_transmission,
specular_transmission_channel,
specular_transmission_texture,
) = material
.transmission() .transmission()
.map_or((0.0, UvChannel::Uv0, None), |transmission| { .map_or((0.0, UvChannel::Uv0, None), |transmission| {
let specular_transmission_channel = transmission let specular_transmission_channel = transmission
@ -1316,8 +1339,9 @@ fn load_material(
alpha_mode: alpha_mode(material), alpha_mode: alpha_mode(material),
uv_transform, uv_transform,
clearcoat: clearcoat.clearcoat_factor.unwrap_or_default() as f32, clearcoat: clearcoat.clearcoat_factor.unwrap_or_default() as f32,
clearcoat_perceptual_roughness: clearcoat.clearcoat_roughness_factor.unwrap_or_default() clearcoat_perceptual_roughness: clearcoat
as f32, .clearcoat_roughness_factor
.unwrap_or_default() as f32,
#[cfg(feature = "pbr_multi_layer_material_textures")] #[cfg(feature = "pbr_multi_layer_material_textures")]
clearcoat_channel: clearcoat.clearcoat_channel, clearcoat_channel: clearcoat.clearcoat_channel,
#[cfg(feature = "pbr_multi_layer_material_textures")] #[cfg(feature = "pbr_multi_layer_material_textures")]
@ -1344,7 +1368,9 @@ fn load_material(
#[cfg(feature = "pbr_specular_textures")] #[cfg(feature = "pbr_specular_textures")]
specular_texture: specular.specular_texture, specular_texture: specular.specular_texture,
specular_tint: match specular.specular_color_factor { specular_tint: match specular.specular_color_factor {
Some(color) => Color::linear_rgb(color[0] as f32, color[1] as f32, color[2] as f32), Some(color) => {
Color::linear_rgb(color[0] as f32, color[1] as f32, color[2] as f32)
}
None => Color::WHITE, None => Color::WHITE,
}, },
#[cfg(feature = "pbr_specular_textures")] #[cfg(feature = "pbr_specular_textures")]
@ -1354,6 +1380,7 @@ fn load_material(
..Default::default() ..Default::default()
} }
}) })
.expect("material indices are unique, so the label is unique")
} }
fn get_uv_channel(material: &Material, texture_kind: &str, tex_coord: u32) -> UvChannel { fn get_uv_channel(material: &Material, texture_kind: &str, tex_coord: u32) -> UvChannel {

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_image" name = "bevy_image"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides image types for Bevy Engine" description = "Provides image types for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -8,7 +8,7 @@ use wgpu_types::{
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
use {bevy_utils::once, tracing::warn}; use {bevy_utils::once, tracing::warn};
use super::{CompressedImageFormats, Image, TextureError}; use super::{CompressedImageFormats, Image, TextureError, TranscodeFormat};
#[cfg(feature = "dds")] #[cfg(feature = "dds")]
pub fn dds_buffer_to_image( pub fn dds_buffer_to_image(
@ -20,7 +20,18 @@ pub fn dds_buffer_to_image(
let mut cursor = Cursor::new(buffer); let mut cursor = Cursor::new(buffer);
let dds = Dds::read(&mut cursor) let dds = Dds::read(&mut cursor)
.map_err(|error| TextureError::InvalidData(format!("Failed to parse DDS file: {error}")))?; .map_err(|error| TextureError::InvalidData(format!("Failed to parse DDS file: {error}")))?;
let texture_format = dds_format_to_texture_format(&dds, is_srgb)?; let (texture_format, transcode_format) = match dds_format_to_texture_format(&dds, is_srgb) {
Ok(format) => (format, None),
Err(TextureError::FormatRequiresTranscodingError(TranscodeFormat::Rgb8)) => {
let format = if is_srgb {
TextureFormat::Bgra8UnormSrgb
} else {
TextureFormat::Bgra8Unorm
};
(format, Some(TranscodeFormat::Rgb8))
}
Err(error) => return Err(error),
};
if !supported_compressed_formats.supports(texture_format) { if !supported_compressed_formats.supports(texture_format) {
return Err(TextureError::UnsupportedTextureFormat(format!( return Err(TextureError::UnsupportedTextureFormat(format!(
"Format not supported by this GPU: {texture_format:?}", "Format not supported by this GPU: {texture_format:?}",
@ -66,10 +77,14 @@ pub fn dds_buffer_to_image(
image.texture_descriptor.format = texture_format; image.texture_descriptor.format = texture_format;
image.texture_descriptor.dimension = if dds.get_depth() > 1 { image.texture_descriptor.dimension = if dds.get_depth() > 1 {
TextureDimension::D3 TextureDimension::D3
} else if image.is_compressed() || dds.get_height() > 1 { // 1x1 textures should generally be interpreted as solid 2D
TextureDimension::D2 } else if ((dds.get_width() > 1 || dds.get_height() > 1)
} else { && !(dds.get_width() > 1 && dds.get_height() > 1))
&& !image.is_compressed()
{
TextureDimension::D1 TextureDimension::D1
} else {
TextureDimension::D2
}; };
if is_cubemap { if is_cubemap {
let dimension = if image.texture_descriptor.size.depth_or_array_layers > 6 { let dimension = if image.texture_descriptor.size.depth_or_array_layers > 6 {
@ -82,7 +97,29 @@ pub fn dds_buffer_to_image(
..Default::default() ..Default::default()
}); });
} }
image.data = Some(dds.data);
// DDS mipmap layout is directly compatible with wgpu's layout (Slice -> Face -> Mip):
// https://learn.microsoft.com/fr-fr/windows/win32/direct3ddds/dx-graphics-dds-reference
image.data = if let Some(transcode_format) = transcode_format {
match transcode_format {
TranscodeFormat::Rgb8 => {
let data = dds
.data
.chunks_exact(3)
.flat_map(|pixel| [pixel[0], pixel[1], pixel[2], u8::MAX])
.collect();
Some(data)
}
_ => {
return Err(TextureError::TranscodeError(format!(
"unsupported transcode from {transcode_format:?} to {texture_format:?}"
)))
}
}
} else {
Some(dds.data)
};
Ok(image) Ok(image)
} }
@ -108,6 +145,9 @@ pub fn dds_format_to_texture_format(
TextureFormat::Bgra8Unorm TextureFormat::Bgra8Unorm
} }
} }
D3DFormat::R8G8B8 => {
return Err(TextureError::FormatRequiresTranscodingError(TranscodeFormat::Rgb8));
},
D3DFormat::G16R16 => TextureFormat::Rg16Uint, D3DFormat::G16R16 => TextureFormat::Rg16Uint,
D3DFormat::A2B10G10R10 => TextureFormat::Rgb10a2Unorm, D3DFormat::A2B10G10R10 => TextureFormat::Rgb10a2Unorm,
D3DFormat::A8L8 => TextureFormat::Rg8Uint, D3DFormat::A8L8 => TextureFormat::Rg8Uint,
@ -149,7 +189,6 @@ pub fn dds_format_to_texture_format(
// FIXME: Map to argb format and user has to know to ignore the alpha channel? // FIXME: Map to argb format and user has to know to ignore the alpha channel?
| D3DFormat::X8B8G8R8 | D3DFormat::X8B8G8R8
| D3DFormat::A2R10G10B10 | D3DFormat::A2R10G10B10
| D3DFormat::R8G8B8
| D3DFormat::X1R5G5B5 | D3DFormat::X1R5G5B5
| D3DFormat::A4R4G4B4 | D3DFormat::A4R4G4B4
| D3DFormat::X4R4G4B4 | D3DFormat::X4R4G4B4

View File

@ -1491,19 +1491,20 @@ pub enum DataFormat {
Rg, Rg,
} }
/// Texture data need to be transcoded from this format for use with `wgpu`.
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub enum TranscodeFormat { pub enum TranscodeFormat {
Etc1s, Etc1s,
Uastc(DataFormat), Uastc(DataFormat),
// Has to be transcoded to R8Unorm for use with `wgpu` // Has to be transcoded to R8Unorm for use with `wgpu`.
R8UnormSrgb, R8UnormSrgb,
// Has to be transcoded to R8G8Unorm for use with `wgpu` // Has to be transcoded to R8G8Unorm for use with `wgpu`.
Rg8UnormSrgb, Rg8UnormSrgb,
// Has to be transcoded to Rgba8 for use with `wgpu` // Has to be transcoded to Rgba8 for use with `wgpu`.
Rgb8, Rgb8,
} }
/// An error that occurs when accessing specific pixels in a texture /// An error that occurs when accessing specific pixels in a texture.
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum TextureAccessError { pub enum TextureAccessError {
#[error("out of bounds (x: {x}, y: {y}, z: {z})")] #[error("out of bounds (x: {x}, y: {y}, z: {z})")]
@ -1514,25 +1515,34 @@ pub enum TextureAccessError {
WrongDimension, WrongDimension,
} }
/// An error that occurs when loading a texture /// An error that occurs when loading a texture.
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum TextureError { pub enum TextureError {
/// Image MIME type is invalid.
#[error("invalid image mime type: {0}")] #[error("invalid image mime type: {0}")]
InvalidImageMimeType(String), InvalidImageMimeType(String),
/// Image extension is invalid.
#[error("invalid image extension: {0}")] #[error("invalid image extension: {0}")]
InvalidImageExtension(String), InvalidImageExtension(String),
/// Failed to load an image.
#[error("failed to load an image: {0}")] #[error("failed to load an image: {0}")]
ImageError(#[from] image::ImageError), ImageError(#[from] image::ImageError),
/// Texture format isn't supported.
#[error("unsupported texture format: {0}")] #[error("unsupported texture format: {0}")]
UnsupportedTextureFormat(String), UnsupportedTextureFormat(String),
/// Supercompression isn't supported.
#[error("supercompression not supported: {0}")] #[error("supercompression not supported: {0}")]
SuperCompressionNotSupported(String), SuperCompressionNotSupported(String),
#[error("failed to load an image: {0}")] /// Failed to decompress an image.
#[error("failed to decompress an image: {0}")]
SuperDecompressionError(String), SuperDecompressionError(String),
/// Invalid data.
#[error("invalid data: {0}")] #[error("invalid data: {0}")]
InvalidData(String), InvalidData(String),
/// Transcode error.
#[error("transcode error: {0}")] #[error("transcode error: {0}")]
TranscodeError(String), TranscodeError(String),
/// Format requires transcoding.
#[error("format requires transcoding: {0:?}")] #[error("format requires transcoding: {0:?}")]
FormatRequiresTranscodingError(TranscodeFormat), FormatRequiresTranscodingError(TranscodeFormat),
/// Only cubemaps with six faces are supported. /// Only cubemaps with six faces are supported.

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_input" name = "bevy_input"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides input functionality for Bevy Engine" description = "Provides input functionality for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
@ -56,15 +56,6 @@ critical-section = [
"bevy_platform_support/critical-section", "bevy_platform_support/critical-section",
] ]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_app/portable-atomic",
"bevy_ecs/portable-atomic",
"bevy_reflect?/portable-atomic",
"bevy_platform_support/portable-atomic",
]
## Uses the `libm` maths library instead of the one provided in `std` and `core`. ## Uses the `libm` maths library instead of the one provided in `std` and `core`.
libm = ["bevy_math/libm"] libm = ["bevy_math/libm"]

View File

@ -1539,7 +1539,7 @@ pub fn gamepad_event_processing_system(
match event { match event {
// Connections require inserting/removing components so they are done in a separate system // Connections require inserting/removing components so they are done in a separate system
RawGamepadEvent::Connection(send_event) => { RawGamepadEvent::Connection(send_event) => {
processed_events.send(GamepadEvent::from(send_event.clone())); processed_events.write(GamepadEvent::from(send_event.clone()));
} }
RawGamepadEvent::Axis(RawGamepadAxisChangedEvent { RawGamepadEvent::Axis(RawGamepadAxisChangedEvent {
gamepad, gamepad,
@ -1559,8 +1559,8 @@ pub fn gamepad_event_processing_system(
gamepad_axis.analog.set(axis, filtered_value.raw); gamepad_axis.analog.set(axis, filtered_value.raw);
let send_event = let send_event =
GamepadAxisChangedEvent::new(gamepad, axis, filtered_value.scaled.to_f32()); GamepadAxisChangedEvent::new(gamepad, axis, filtered_value.scaled.to_f32());
processed_axis_events.send(send_event); processed_axis_events.write(send_event);
processed_events.send(GamepadEvent::from(send_event)); processed_events.write(GamepadEvent::from(send_event));
} }
RawGamepadEvent::Button(RawGamepadButtonChangedEvent { RawGamepadEvent::Button(RawGamepadButtonChangedEvent {
gamepad, gamepad,
@ -1583,7 +1583,7 @@ pub fn gamepad_event_processing_system(
if button_settings.is_released(filtered_value.raw) { if button_settings.is_released(filtered_value.raw) {
// Check if button was previously pressed // Check if button was previously pressed
if gamepad_buttons.pressed(button) { if gamepad_buttons.pressed(button) {
processed_digital_events.send(GamepadButtonStateChangedEvent::new( processed_digital_events.write(GamepadButtonStateChangedEvent::new(
gamepad, gamepad,
button, button,
ButtonState::Released, ButtonState::Released,
@ -1595,7 +1595,7 @@ pub fn gamepad_event_processing_system(
} else if button_settings.is_pressed(filtered_value.raw) { } else if button_settings.is_pressed(filtered_value.raw) {
// Check if button was previously not pressed // Check if button was previously not pressed
if !gamepad_buttons.pressed(button) { if !gamepad_buttons.pressed(button) {
processed_digital_events.send(GamepadButtonStateChangedEvent::new( processed_digital_events.write(GamepadButtonStateChangedEvent::new(
gamepad, gamepad,
button, button,
ButtonState::Pressed, ButtonState::Pressed,
@ -1615,8 +1615,8 @@ pub fn gamepad_event_processing_system(
button_state, button_state,
filtered_value.scaled.to_f32(), filtered_value.scaled.to_f32(),
); );
processed_analog_events.send(send_event); processed_analog_events.write(send_event);
processed_events.send(GamepadEvent::from(send_event)); processed_events.write(GamepadEvent::from(send_event));
} }
} }
} }
@ -1699,7 +1699,7 @@ impl GamepadRumbleIntensity {
/// gamepads: Query<Entity, With<Gamepad>>, /// gamepads: Query<Entity, With<Gamepad>>,
/// ) { /// ) {
/// for entity in gamepads.iter() { /// for entity in gamepads.iter() {
/// rumble_requests.send(GamepadRumbleRequest::Add { /// rumble_requests.write(GamepadRumbleRequest::Add {
/// gamepad: entity, /// gamepad: entity,
/// intensity: GamepadRumbleIntensity::MAX, /// intensity: GamepadRumbleIntensity::MAX,
/// duration: Duration::from_secs_f32(0.5), /// duration: Duration::from_secs_f32(0.5),

View File

@ -1,13 +1,13 @@
[package] [package]
name = "bevy_input_focus" name = "bevy_input_focus"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Keyboard focus management" description = "Keyboard focus management"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
keywords = ["bevy"] keywords = ["bevy"]
rust-version = "1.83.0" rust-version = "1.85.0"
[features] [features]
default = ["std", "bevy_reflect", "bevy_ecs/async_executor"] default = ["std", "bevy_reflect", "bevy_ecs/async_executor"]
@ -55,15 +55,6 @@ critical-section = [
"bevy_input/critical-section", "bevy_input/critical-section",
] ]
## `portable-atomic` provides additional platform support for atomic types and
## operations, even on targets without native support.
portable-atomic = [
"bevy_app/portable-atomic",
"bevy_ecs/portable-atomic",
"bevy_reflect?/portable-atomic",
"bevy_input/portable-atomic",
]
## Uses the `libm` maths library instead of the one provided in `std` and `core`. ## Uses the `libm` maths library instead of the one provided in `std` and `core`.
libm = ["bevy_math/libm", "bevy_window/libm"] libm = ["bevy_math/libm", "bevy_window/libm"]

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_internal" name = "bevy_internal"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "An internal Bevy crate used to facilitate optional dynamic linking via the 'dynamic_linking' feature" description = "An internal Bevy crate used to facilitate optional dynamic linking via the 'dynamic_linking' feature"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
@ -272,6 +272,9 @@ reflect_auto_register = [
"bevy_ecs/reflect_auto_register", "bevy_ecs/reflect_auto_register",
] ]
# Enable documentation reflection
reflect_documentation = ["bevy_reflect/documentation"]
# Enable winit custom cursor support # Enable winit custom cursor support
custom_cursor = ["bevy_winit/custom_cursor"] custom_cursor = ["bevy_winit/custom_cursor"]

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_log" name = "bevy_log"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides logging for Bevy Engine" description = "Provides logging for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -117,7 +117,10 @@ pub(crate) struct FlushGuard(SyncCell<tracing_chrome::FlushGuard>);
/// # use bevy_app::{App, NoopPluginGroup as DefaultPlugins, PluginGroup}; /// # use bevy_app::{App, NoopPluginGroup as DefaultPlugins, PluginGroup};
/// # use bevy_log::LogPlugin; /// # use bevy_log::LogPlugin;
/// fn main() { /// fn main() {
/// # // SAFETY: Single-threaded
/// # unsafe {
/// std::env::set_var("NO_COLOR", "1"); /// std::env::set_var("NO_COLOR", "1");
/// # }
/// App::new() /// App::new()
/// .add_plugins(DefaultPlugins) /// .add_plugins(DefaultPlugins)
/// .run(); /// .run();

View File

@ -1,7 +1,7 @@
[package] [package]
name = "bevy_macro_utils" name = "bevy_macro_utils"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "A collection of utils for Bevy Engine" description = "A collection of utils for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"

View File

@ -1,13 +1,13 @@
[package] [package]
name = "bevy_math" name = "bevy_math"
version = "0.16.0-dev" version = "0.16.0-dev"
edition = "2021" edition = "2024"
description = "Provides math functionality for Bevy Engine" description = "Provides math functionality for Bevy Engine"
homepage = "https://bevyengine.org" homepage = "https://bevyengine.org"
repository = "https://github.com/bevyengine/bevy" repository = "https://github.com/bevyengine/bevy"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
keywords = ["bevy"] keywords = ["bevy"]
rust-version = "1.83.0" rust-version = "1.85.0"
[dependencies] [dependencies]
glam = { version = "0.29", default-features = false, features = ["bytemuck"] } glam = { version = "0.29", default-features = false, features = ["bytemuck"] }

Some files were not shown because too many files have changed in this diff Show More