Merge branch 'main' into reflect-auto-registration
This commit is contained in:
commit
a0325cda32
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@ -1 +1 @@
|
||||
custom: https://bevyengine.org/donate/
|
||||
custom: https://bevy.org/donate/
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/docs_improvement.md
vendored
2
.github/ISSUE_TEMPLATE/docs_improvement.md
vendored
@ -10,4 +10,4 @@ assignees: ''
|
||||
|
||||
Provide a link to the documentation and describe how it could be improved. In what ways is it incomplete, incorrect, or misleading?
|
||||
|
||||
If you have suggestions on exactly what the new docs should say, feel free to include them here. Alternatively, make the changes yourself and [create a pull request](https://bevyengine.org/learn/contribute/helping-out/writing-docs/) instead.
|
||||
If you have suggestions on exactly what the new docs should say, feel free to include them here. Alternatively, make the changes yourself and [create a pull request](https://bevy.org/learn/contribute/helping-out/writing-docs/) instead.
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@ -13,7 +13,7 @@ env:
|
||||
CARGO_PROFILE_TEST_DEBUG: 0
|
||||
CARGO_PROFILE_DEV_DEBUG: 0
|
||||
# If nightly is breaking CI, modify this variable to target a specific nightly version.
|
||||
NIGHTLY_TOOLCHAIN: nightly-2025-05-16 # pinned until a fix for https://github.com/rust-lang/miri/issues/4323 is released
|
||||
NIGHTLY_TOOLCHAIN: nightly
|
||||
RUSTFLAGS: "-D warnings"
|
||||
BINSTALL_VERSION: "v1.12.3"
|
||||
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@ -82,7 +82,7 @@ jobs:
|
||||
- name: Finalize documentation
|
||||
run: |
|
||||
echo "<meta http-equiv=\"refresh\" content=\"0; url=bevy/index.html\">" > target/doc/index.html
|
||||
echo "dev-docs.bevyengine.org" > target/doc/CNAME
|
||||
echo "dev-docs.bevy.org" > target/doc/CNAME
|
||||
echo $'User-Agent: *\nDisallow: /' > target/doc/robots.txt
|
||||
rm target/doc/.lock
|
||||
|
||||
|
||||
2
.github/workflows/welcome.yml
vendored
2
.github/workflows/welcome.yml
vendored
@ -43,5 +43,5 @@ jobs:
|
||||
repo: context.repo.repo,
|
||||
body: `**Welcome**, new contributor!
|
||||
|
||||
Please make sure you've read our [contributing guide](https://bevyengine.org/learn/contribute/introduction) and we look forward to reviewing your pull request shortly ✨`
|
||||
Please make sure you've read our [contributing guide](https://bevy.org/learn/contribute/introduction) and we look forward to reviewing your pull request shortly ✨`
|
||||
})
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# Contributing to Bevy
|
||||
|
||||
If you'd like to help build Bevy, start by reading this
|
||||
[introduction](https://bevyengine.org/learn/contribute/introduction). Thanks for contributing!
|
||||
[introduction](https://bevy.org/learn/contribute/introduction). Thanks for contributing!
|
||||
|
||||
78
Cargo.toml
78
Cargo.toml
@ -5,12 +5,12 @@ edition = "2024"
|
||||
categories = ["game-engines", "graphics", "gui", "rendering"]
|
||||
description = "A refreshingly simple data-driven game engine and app framework"
|
||||
exclude = ["assets/", "tools/", ".github/", "crates/", "examples/wasm/assets/"]
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
keywords = ["game", "engine", "gamedev", "graphics", "bevy"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
documentation = "https://docs.rs/bevy"
|
||||
rust-version = "1.85.0"
|
||||
rust-version = "1.86.0"
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
@ -74,6 +74,7 @@ allow_attributes_without_reason = "warn"
|
||||
|
||||
[workspace.lints.rust]
|
||||
missing_docs = "warn"
|
||||
mismatched_lifetime_syntaxes = "allow"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docsrs_dep)'] }
|
||||
unsafe_code = "deny"
|
||||
unsafe_op_in_unsafe_fn = "warn"
|
||||
@ -543,6 +544,9 @@ libm = ["bevy_internal/libm"]
|
||||
# Enables use of browser APIs. Note this is currently only applicable on `wasm32` architectures.
|
||||
web = ["bevy_internal/web"]
|
||||
|
||||
# Enable hotpatching of Bevy systems
|
||||
hotpatching = ["bevy_internal/hotpatching"]
|
||||
|
||||
[dependencies]
|
||||
bevy_internal = { path = "crates/bevy_internal", version = "0.16.0-dev", default-features = false }
|
||||
tracing = { version = "0.1", default-features = false, optional = true }
|
||||
@ -557,7 +561,7 @@ rand_chacha = "0.3.1"
|
||||
ron = "0.8.0"
|
||||
flate2 = "1.0"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_json = "1.0.140"
|
||||
bytemuck = "1.7"
|
||||
bevy_render = { path = "crates/bevy_render", version = "0.16.0-dev", default-features = false }
|
||||
# The following explicit dependencies are needed for proc macros to work inside of examples as they are part of the bevy crate itself.
|
||||
@ -578,7 +582,7 @@ hyper = { version = "1", features = ["server", "http1"] }
|
||||
http-body-util = "0.1"
|
||||
anyhow = "1"
|
||||
macro_rules_attribute = "0.2"
|
||||
accesskit = "0.18"
|
||||
accesskit = "0.19"
|
||||
nonmax = "0.5"
|
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies]
|
||||
@ -591,6 +595,17 @@ ureq = { version = "3.0.8", features = ["json"] }
|
||||
wasm-bindgen = { version = "0.2" }
|
||||
web-sys = { version = "0.3", features = ["Window"] }
|
||||
|
||||
[[example]]
|
||||
name = "context_menu"
|
||||
path = "examples/usages/context_menu.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.context_menu]
|
||||
name = "Context Menu"
|
||||
description = "Example of a context menu"
|
||||
category = "Usage"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "hello_world"
|
||||
path = "examples/hello_world.rs"
|
||||
@ -3438,6 +3453,28 @@ description = "An example for CSS Grid layout"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "gradients"
|
||||
path = "examples/ui/gradients.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.gradients]
|
||||
name = "Gradients"
|
||||
description = "An example demonstrating gradients"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "stacked_gradients"
|
||||
path = "examples/ui/stacked_gradients.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.stacked_gradients]
|
||||
name = "Stacked Gradients"
|
||||
description = "An example demonstrating stacked gradients"
|
||||
category = "UI (User Interface)"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "scroll"
|
||||
path = "examples/ui/scroll.rs"
|
||||
@ -3909,6 +3946,16 @@ description = "A simple 2D screen shake effect"
|
||||
category = "Camera"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "2d_on_ui"
|
||||
path = "examples/camera/2d_on_ui.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.2d_on_ui]
|
||||
name = "2D on Bevy UI"
|
||||
description = "Shows how to render 2D objects on top of Bevy UI"
|
||||
category = "Camera"
|
||||
wasm = true
|
||||
|
||||
[package.metadata.example.fps_overlay]
|
||||
name = "FPS overlay"
|
||||
@ -4377,3 +4424,26 @@ name = "Extended Bindless Material"
|
||||
description = "Demonstrates bindless `ExtendedMaterial`"
|
||||
category = "Shaders"
|
||||
wasm = false
|
||||
|
||||
[[example]]
|
||||
name = "cooldown"
|
||||
path = "examples/usage/cooldown.rs"
|
||||
doc-scrape-examples = true
|
||||
|
||||
[package.metadata.example.cooldown]
|
||||
name = "Cooldown"
|
||||
description = "Example for cooldown on button clicks"
|
||||
category = "Usage"
|
||||
wasm = true
|
||||
|
||||
[[example]]
|
||||
name = "hotpatching_systems"
|
||||
path = "examples/ecs/hotpatching_systems.rs"
|
||||
doc-scrape-examples = true
|
||||
required-features = ["hotpatching"]
|
||||
|
||||
[package.metadata.example.hotpatching_systems]
|
||||
name = "Hotpatching Systems"
|
||||
description = "Demonstrates how to hotpatch systems"
|
||||
category = "ECS (Entity Component System)"
|
||||
wasm = false
|
||||
|
||||
26
README.md
26
README.md
@ -1,4 +1,4 @@
|
||||
# [](https://bevyengine.org)
|
||||
# [](https://bevy.org)
|
||||
|
||||
[](https://github.com/bevyengine/bevy#license)
|
||||
[](https://crates.io/crates/bevy)
|
||||
@ -13,7 +13,7 @@ Bevy is a refreshingly simple data-driven game engine built in Rust. It is free
|
||||
|
||||
## WARNING
|
||||
|
||||
Bevy is still in the early stages of development. Important features are missing. Documentation is sparse. A new version of Bevy containing breaking changes to the API is released [approximately once every 3 months](https://bevyengine.org/news/bevy-0-6/#the-train-release-schedule). We provide [migration guides](https://bevyengine.org/learn/migration-guides/), but we can't guarantee migrations will always be easy. Use only if you are willing to work in this environment.
|
||||
Bevy is still in the early stages of development. Important features are missing. Documentation is sparse. A new version of Bevy containing breaking changes to the API is released [approximately once every 3 months](https://bevy.org/news/bevy-0-6/#the-train-release-schedule). We provide [migration guides](https://bevy.org/learn/migration-guides/), but we can't guarantee migrations will always be easy. Use only if you are willing to work in this environment.
|
||||
|
||||
**MSRV:** Bevy relies heavily on improvements in the Rust language and compiler.
|
||||
As a result, the Minimum Supported Rust Version (MSRV) is generally close to "the latest stable release" of Rust.
|
||||
@ -29,15 +29,15 @@ As a result, the Minimum Supported Rust Version (MSRV) is generally close to "th
|
||||
|
||||
## About
|
||||
|
||||
* **[Features](https://bevyengine.org):** A quick overview of Bevy's features.
|
||||
* **[News](https://bevyengine.org/news/)**: A development blog that covers our progress, plans and shiny new features.
|
||||
* **[Features](https://bevy.org):** A quick overview of Bevy's features.
|
||||
* **[News](https://bevy.org/news/)**: A development blog that covers our progress, plans and shiny new features.
|
||||
|
||||
## Docs
|
||||
|
||||
* **[Quick Start Guide](https://bevyengine.org/learn/quick-start/introduction):** Bevy's official Quick Start Guide. The best place to start learning Bevy.
|
||||
* **[Quick Start Guide](https://bevy.org/learn/quick-start/introduction):** Bevy's official Quick Start Guide. The best place to start learning Bevy.
|
||||
* **[Bevy Rust API Docs](https://docs.rs/bevy):** Bevy's Rust API docs, which are automatically generated from the doc comments in this repo.
|
||||
* **[Official Examples](https://github.com/bevyengine/bevy/tree/latest/examples):** Bevy's dedicated, runnable examples, which are great for digging into specific concepts.
|
||||
* **[Community-Made Learning Resources](https://bevyengine.org/assets/#learning)**: More tutorials, documentation, and examples made by the Bevy community.
|
||||
* **[Community-Made Learning Resources](https://bevy.org/assets/#learning)**: More tutorials, documentation, and examples made by the Bevy community.
|
||||
|
||||
## Community
|
||||
|
||||
@ -46,11 +46,11 @@ Before contributing or participating in discussions with the community, you shou
|
||||
* **[Discord](https://discord.gg/bevy):** Bevy's official discord server.
|
||||
* **[Reddit](https://reddit.com/r/bevy):** Bevy's official subreddit.
|
||||
* **[GitHub Discussions](https://github.com/bevyengine/bevy/discussions):** The best place for questions about Bevy, answered right here!
|
||||
* **[Bevy Assets](https://bevyengine.org/assets/):** A collection of awesome Bevy projects, tools, plugins and learning materials.
|
||||
* **[Bevy Assets](https://bevy.org/assets/):** A collection of awesome Bevy projects, tools, plugins and learning materials.
|
||||
|
||||
### Contributing
|
||||
|
||||
If you'd like to help build Bevy, check out the **[Contributor's Guide](https://bevyengine.org/learn/contribute/introduction)**.
|
||||
If you'd like to help build Bevy, check out the **[Contributor's Guide](https://bevy.org/learn/contribute/introduction)**.
|
||||
For simple problems, feel free to [open an issue](https://github.com/bevyengine/bevy/issues) or
|
||||
[PR](https://github.com/bevyengine/bevy/pulls) and tackle it yourself!
|
||||
|
||||
@ -58,9 +58,9 @@ For more complex architecture decisions and experimental mad science, please ope
|
||||
|
||||
## Getting Started
|
||||
|
||||
We recommend checking out the [Quick Start Guide](https://bevyengine.org/learn/quick-start/introduction) for a brief introduction.
|
||||
We recommend checking out the [Quick Start Guide](https://bevy.org/learn/quick-start/introduction) for a brief introduction.
|
||||
|
||||
Follow the [Setup guide](https://bevyengine.org/learn/quick-start/getting-started/setup) to ensure your development environment is set up correctly.
|
||||
Follow the [Setup guide](https://bevy.org/learn/quick-start/getting-started/setup) to ensure your development environment is set up correctly.
|
||||
Once set up, you can quickly try out the [examples](https://github.com/bevyengine/bevy/tree/latest/examples) by cloning this repo and running the following commands:
|
||||
|
||||
```sh
|
||||
@ -75,7 +75,7 @@ To draw a window with standard functionality enabled, use:
|
||||
```rust
|
||||
use bevy::prelude::*;
|
||||
|
||||
fn main(){
|
||||
fn main() {
|
||||
App::new()
|
||||
.add_plugins(DefaultPlugins)
|
||||
.run();
|
||||
@ -84,7 +84,7 @@ fn main(){
|
||||
|
||||
### Fast Compiles
|
||||
|
||||
Bevy can be built just fine using default configuration on stable Rust. However for really fast iterative compiles, you should enable the "fast compiles" setup by [following the instructions here](https://bevyengine.org/learn/quick-start/getting-started/setup).
|
||||
Bevy can be built just fine using default configuration on stable Rust. However for really fast iterative compiles, you should enable the "fast compiles" setup by [following the instructions here](https://bevy.org/learn/quick-start/getting-started/setup).
|
||||
|
||||
## [Bevy Cargo Features][cargo_features]
|
||||
|
||||
@ -96,7 +96,7 @@ This [list][cargo_features] outlines the different cargo features supported by B
|
||||
|
||||
Bevy is the result of the hard work of many people. A huge thanks to all Bevy contributors, the many open source projects that have come before us, the [Rust gamedev ecosystem](https://arewegameyet.rs/), and the many libraries we build on.
|
||||
|
||||
A huge thanks to Bevy's [generous sponsors](https://bevyengine.org). Bevy will always be free and open source, but it isn't free to make. Please consider [sponsoring our work](https://bevyengine.org/donate/) if you like what we're building.
|
||||
A huge thanks to Bevy's [generous sponsors](https://bevy.org). Bevy will always be free and open source, but it isn't free to make. Please consider [sponsoring our work](https://bevy.org/donate/) if you like what we're building.
|
||||
|
||||
<!-- This next line need to stay exactly as is. It is required for BrowserStack sponsorship. -->
|
||||
This project is tested with BrowserStack.
|
||||
|
||||
BIN
assets/textures/food_kenney.png
Normal file
BIN
assets/textures/food_kenney.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 66 KiB |
@ -45,7 +45,6 @@ pub fn heavy_compute(c: &mut Criterion) {
|
||||
|
||||
let mut system = IntoSystem::into_system(sys);
|
||||
system.initialize(&mut world);
|
||||
system.update_archetype_component_access(world.as_unsafe_world_cell());
|
||||
|
||||
b.iter(move || system.run((), &mut world));
|
||||
});
|
||||
|
||||
@ -37,7 +37,6 @@ impl Benchmark {
|
||||
|
||||
let mut system = IntoSystem::into_system(query_system);
|
||||
system.initialize(&mut world);
|
||||
system.update_archetype_component_access(world.as_unsafe_world_cell());
|
||||
Self(world, Box::new(system))
|
||||
}
|
||||
|
||||
|
||||
@ -62,6 +62,31 @@ pub fn spawn_commands(criterion: &mut Criterion) {
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn nonempty_spawn_commands(criterion: &mut Criterion) {
|
||||
let mut group = criterion.benchmark_group("nonempty_spawn_commands");
|
||||
group.warm_up_time(core::time::Duration::from_millis(500));
|
||||
group.measurement_time(core::time::Duration::from_secs(4));
|
||||
|
||||
for entity_count in [100, 1_000, 10_000] {
|
||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||
let mut world = World::default();
|
||||
let mut command_queue = CommandQueue::default();
|
||||
|
||||
bencher.iter(|| {
|
||||
let mut commands = Commands::new(&mut command_queue, &world);
|
||||
for i in 0..entity_count {
|
||||
if black_box(i % 2 == 0) {
|
||||
commands.spawn(A);
|
||||
}
|
||||
}
|
||||
command_queue.apply(&mut world);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
#[derive(Default, Component)]
|
||||
struct Matrix([[f32; 4]; 4]);
|
||||
|
||||
|
||||
@ -17,6 +17,7 @@ criterion_group!(
|
||||
benches,
|
||||
empty_commands,
|
||||
spawn_commands,
|
||||
nonempty_spawn_commands,
|
||||
insert_commands,
|
||||
fake_commands,
|
||||
zero_sized_commands,
|
||||
|
||||
@ -32,7 +32,7 @@ fn segment_ease(c: &mut Criterion) {
|
||||
|
||||
fn curve_position(c: &mut Criterion) {
|
||||
/// A helper function that benchmarks calling [`CubicCurve::position()`] over a generic [`VectorSpace`].
|
||||
fn bench_curve<M: Measurement, P: VectorSpace>(
|
||||
fn bench_curve<M: Measurement, P: VectorSpace<Scalar = f32>>(
|
||||
group: &mut BenchmarkGroup<M>,
|
||||
name: &str,
|
||||
curve: CubicCurve<P>,
|
||||
|
||||
@ -41,7 +41,6 @@ disallowed-methods = [
|
||||
{ path = "f32::asinh", reason = "use bevy_math::ops::asinh instead for libm determinism" },
|
||||
{ path = "f32::acosh", reason = "use bevy_math::ops::acosh instead for libm determinism" },
|
||||
{ path = "f32::atanh", reason = "use bevy_math::ops::atanh instead for libm determinism" },
|
||||
{ path = "criterion::black_box", reason = "use core::hint::black_box instead" },
|
||||
]
|
||||
|
||||
# Require `bevy_ecs::children!` to use `[]` braces, instead of `()` or `{}`.
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_a11y"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides accessibility support for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy", "accessibility", "a11y"]
|
||||
@ -46,7 +46,7 @@ bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev", default-features = fa
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", default-features = false, optional = true }
|
||||
|
||||
# other
|
||||
accesskit = { version = "0.18", default-features = false }
|
||||
accesskit = { version = "0.19", default-features = false }
|
||||
serde = { version = "1", default-features = false, features = [
|
||||
"alloc",
|
||||
], optional = true }
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
#![forbid(unsafe_code)]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
#![no_std]
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_animation"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides animation functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -55,7 +55,7 @@ pub struct CubicKeyframeCurve<T> {
|
||||
|
||||
impl<V> Curve<V> for CubicKeyframeCurve<V>
|
||||
where
|
||||
V: VectorSpace,
|
||||
V: VectorSpace<Scalar = f32>,
|
||||
{
|
||||
#[inline]
|
||||
fn domain(&self) -> Interval {
|
||||
@ -179,7 +179,7 @@ pub struct WideLinearKeyframeCurve<T> {
|
||||
|
||||
impl<T> IterableCurve<T> for WideLinearKeyframeCurve<T>
|
||||
where
|
||||
T: VectorSpace,
|
||||
T: VectorSpace<Scalar = f32>,
|
||||
{
|
||||
#[inline]
|
||||
fn domain(&self) -> Interval {
|
||||
@ -289,7 +289,7 @@ pub struct WideCubicKeyframeCurve<T> {
|
||||
|
||||
impl<T> IterableCurve<T> for WideCubicKeyframeCurve<T>
|
||||
where
|
||||
T: VectorSpace,
|
||||
T: VectorSpace<Scalar = f32>,
|
||||
{
|
||||
#[inline]
|
||||
fn domain(&self) -> Interval {
|
||||
@ -406,7 +406,7 @@ fn cubic_spline_interpolation<T>(
|
||||
step_duration: f32,
|
||||
) -> T
|
||||
where
|
||||
T: VectorSpace,
|
||||
T: VectorSpace<Scalar = f32>,
|
||||
{
|
||||
let coeffs = (vec4(2.0, 1.0, -2.0, 1.0) * lerp + vec4(-3.0, -2.0, 3.0, -1.0)) * lerp;
|
||||
value_start * (coeffs.x * lerp + 1.0)
|
||||
@ -415,7 +415,7 @@ where
|
||||
+ tangent_in_end * step_duration * lerp * coeffs.w
|
||||
}
|
||||
|
||||
fn cubic_spline_interpolate_slices<'a, T: VectorSpace>(
|
||||
fn cubic_spline_interpolate_slices<'a, T: VectorSpace<Scalar = f32>>(
|
||||
width: usize,
|
||||
first: &'a [T],
|
||||
second: &'a [T],
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![forbid(unsafe_code)]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
//! Animation for the game engine Bevy
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_anti_aliasing"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides various anti aliasing implementations for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_core_pipeline::{
|
||||
core_2d::graph::{Core2d, Node2d},
|
||||
core_3d::graph::{Core3d, Node3d},
|
||||
@ -95,20 +95,12 @@ impl ExtractComponent for ContrastAdaptiveSharpening {
|
||||
}
|
||||
}
|
||||
|
||||
const CONTRAST_ADAPTIVE_SHARPENING_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("ef83f0a5-51df-4b51-9ab7-b5fd1ae5a397");
|
||||
|
||||
/// Adds Support for Contrast Adaptive Sharpening (CAS).
|
||||
pub struct CasPlugin;
|
||||
|
||||
impl Plugin for CasPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
CONTRAST_ADAPTIVE_SHARPENING_SHADER_HANDLE,
|
||||
"robust_contrast_adaptive_sharpening.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
embedded_asset!(app, "robust_contrast_adaptive_sharpening.wgsl");
|
||||
|
||||
app.register_type::<ContrastAdaptiveSharpening>();
|
||||
app.add_plugins((
|
||||
@ -171,6 +163,7 @@ impl Plugin for CasPlugin {
|
||||
pub struct CasPipeline {
|
||||
texture_bind_group: BindGroupLayout,
|
||||
sampler: Sampler,
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl FromWorld for CasPipeline {
|
||||
@ -194,6 +187,7 @@ impl FromWorld for CasPipeline {
|
||||
CasPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
shader: load_embedded_asset!(render_world, "robust_contrast_adaptive_sharpening.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -217,7 +211,7 @@ impl SpecializedRenderPipeline for CasPipeline {
|
||||
layout: vec![self.texture_bind_group.clone()],
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: CONTRAST_ADAPTIVE_SHARPENING_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
//! Experimental rendering features.
|
||||
//!
|
||||
//! Experimental features are features with known problems, missing features,
|
||||
//! compatibility issues, low performance, and/or future breaking changes, but
|
||||
//! are included nonetheless for testing purposes.
|
||||
|
||||
pub mod taa {
|
||||
pub use crate::taa::{TemporalAntiAliasNode, TemporalAntiAliasPlugin, TemporalAntiAliasing};
|
||||
}
|
||||
@ -1,5 +1,5 @@
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_core_pipeline::{
|
||||
core_2d::graph::{Core2d, Node2d},
|
||||
core_3d::graph::{Core3d, Node3d},
|
||||
@ -80,13 +80,11 @@ impl Default for Fxaa {
|
||||
}
|
||||
}
|
||||
|
||||
const FXAA_SHADER_HANDLE: Handle<Shader> = weak_handle!("fc58c0a8-01c0-46e9-94cc-83a794bae7b0");
|
||||
|
||||
/// Adds support for Fast Approximate Anti-Aliasing (FXAA)
|
||||
pub struct FxaaPlugin;
|
||||
impl Plugin for FxaaPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, FXAA_SHADER_HANDLE, "fxaa.wgsl", Shader::from_wgsl);
|
||||
embedded_asset!(app, "fxaa.wgsl");
|
||||
|
||||
app.register_type::<Fxaa>();
|
||||
app.add_plugins(ExtractComponentPlugin::<Fxaa>::default());
|
||||
@ -132,6 +130,7 @@ impl Plugin for FxaaPlugin {
|
||||
pub struct FxaaPipeline {
|
||||
texture_bind_group: BindGroupLayout,
|
||||
sampler: Sampler,
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl FromWorld for FxaaPipeline {
|
||||
@ -158,6 +157,7 @@ impl FromWorld for FxaaPipeline {
|
||||
FxaaPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
shader: load_embedded_asset!(render_world, "fxaa.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -183,7 +183,7 @@ impl SpecializedRenderPipeline for FxaaPipeline {
|
||||
layout: vec![self.texture_bind_group.clone()],
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: FXAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: vec![
|
||||
format!("EDGE_THRESH_{}", key.edge_threshold.get_str()).into(),
|
||||
format!("EDGE_THRESH_MIN_{}", key.edge_threshold_min.get_str()).into(),
|
||||
|
||||
@ -2,26 +2,25 @@
|
||||
#![forbid(unsafe_code)]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
use bevy_app::Plugin;
|
||||
use contrast_adaptive_sharpening::CasPlugin;
|
||||
use fxaa::FxaaPlugin;
|
||||
use smaa::SmaaPlugin;
|
||||
use taa::TemporalAntiAliasPlugin;
|
||||
|
||||
pub mod contrast_adaptive_sharpening;
|
||||
pub mod experimental;
|
||||
pub mod fxaa;
|
||||
pub mod smaa;
|
||||
|
||||
mod taa;
|
||||
pub mod taa;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct AntiAliasingPlugin;
|
||||
impl Plugin for AntiAliasingPlugin {
|
||||
fn build(&self, app: &mut bevy_app::App) {
|
||||
app.add_plugins((FxaaPlugin, CasPlugin, SmaaPlugin));
|
||||
app.add_plugins((FxaaPlugin, SmaaPlugin, TemporalAntiAliasPlugin, CasPlugin));
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,7 +32,7 @@
|
||||
use bevy_app::{App, Plugin};
|
||||
#[cfg(feature = "smaa_luts")]
|
||||
use bevy_asset::load_internal_binary_asset;
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, weak_handle, Handle};
|
||||
#[cfg(not(feature = "smaa_luts"))]
|
||||
use bevy_core_pipeline::tonemapping::lut_placeholder;
|
||||
use bevy_core_pipeline::{
|
||||
@ -80,8 +80,6 @@ use bevy_render::{
|
||||
};
|
||||
use bevy_utils::prelude::default;
|
||||
|
||||
/// The handle of the `smaa.wgsl` shader.
|
||||
const SMAA_SHADER_HANDLE: Handle<Shader> = weak_handle!("fdd9839f-1ab4-4e0d-88a0-240b67da2ddf");
|
||||
/// The handle of the area LUT, a KTX2 format texture that SMAA uses internally.
|
||||
const SMAA_AREA_LUT_TEXTURE_HANDLE: Handle<Image> =
|
||||
weak_handle!("569c4d67-c7fa-4958-b1af-0836023603c0");
|
||||
@ -147,6 +145,8 @@ struct SmaaEdgeDetectionPipeline {
|
||||
postprocess_bind_group_layout: BindGroupLayout,
|
||||
/// The bind group layout for data specific to this pass.
|
||||
edge_detection_bind_group_layout: BindGroupLayout,
|
||||
/// The shader asset handle.
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
/// The pipeline data for phase 2 of SMAA: blending weight calculation.
|
||||
@ -155,6 +155,8 @@ struct SmaaBlendingWeightCalculationPipeline {
|
||||
postprocess_bind_group_layout: BindGroupLayout,
|
||||
/// The bind group layout for data specific to this pass.
|
||||
blending_weight_calculation_bind_group_layout: BindGroupLayout,
|
||||
/// The shader asset handle.
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
/// The pipeline data for phase 3 of SMAA: neighborhood blending.
|
||||
@ -163,6 +165,8 @@ struct SmaaNeighborhoodBlendingPipeline {
|
||||
postprocess_bind_group_layout: BindGroupLayout,
|
||||
/// The bind group layout for data specific to this pass.
|
||||
neighborhood_blending_bind_group_layout: BindGroupLayout,
|
||||
/// The shader asset handle.
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
/// A unique identifier for a set of SMAA pipelines.
|
||||
@ -287,7 +291,7 @@ pub struct SmaaSpecializedRenderPipelines {
|
||||
impl Plugin for SmaaPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
// Load the shader.
|
||||
load_internal_asset!(app, SMAA_SHADER_HANDLE, "smaa.wgsl", Shader::from_wgsl);
|
||||
embedded_asset!(app, "smaa.wgsl");
|
||||
|
||||
// Load the two lookup textures. These are compressed textures in KTX2
|
||||
// format.
|
||||
@ -431,18 +435,23 @@ impl FromWorld for SmaaPipelines {
|
||||
),
|
||||
);
|
||||
|
||||
let shader = load_embedded_asset!(world, "smaa.wgsl");
|
||||
|
||||
SmaaPipelines {
|
||||
edge_detection: SmaaEdgeDetectionPipeline {
|
||||
postprocess_bind_group_layout: postprocess_bind_group_layout.clone(),
|
||||
edge_detection_bind_group_layout,
|
||||
shader: shader.clone(),
|
||||
},
|
||||
blending_weight_calculation: SmaaBlendingWeightCalculationPipeline {
|
||||
postprocess_bind_group_layout: postprocess_bind_group_layout.clone(),
|
||||
blending_weight_calculation_bind_group_layout,
|
||||
shader: shader.clone(),
|
||||
},
|
||||
neighborhood_blending: SmaaNeighborhoodBlendingPipeline {
|
||||
postprocess_bind_group_layout,
|
||||
neighborhood_blending_bind_group_layout,
|
||||
shader,
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -472,13 +481,13 @@ impl SpecializedRenderPipeline for SmaaEdgeDetectionPipeline {
|
||||
self.edge_detection_bind_group_layout.clone(),
|
||||
],
|
||||
vertex: VertexState {
|
||||
shader: SMAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: shader_defs.clone(),
|
||||
entry_point: "edge_detection_vertex_main".into(),
|
||||
buffers: vec![],
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: SMAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "luma_edge_detection_fragment_main".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
@ -532,13 +541,13 @@ impl SpecializedRenderPipeline for SmaaBlendingWeightCalculationPipeline {
|
||||
self.blending_weight_calculation_bind_group_layout.clone(),
|
||||
],
|
||||
vertex: VertexState {
|
||||
shader: SMAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: shader_defs.clone(),
|
||||
entry_point: "blending_weight_calculation_vertex_main".into(),
|
||||
buffers: vec![],
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: SMAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "blending_weight_calculation_fragment_main".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
@ -580,13 +589,13 @@ impl SpecializedRenderPipeline for SmaaNeighborhoodBlendingPipeline {
|
||||
self.neighborhood_blending_bind_group_layout.clone(),
|
||||
],
|
||||
vertex: VertexState {
|
||||
shader: SMAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: shader_defs.clone(),
|
||||
entry_point: "neighborhood_blending_vertex_main".into(),
|
||||
buffers: vec![],
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: SMAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "neighborhood_blending_fragment_main".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_core_pipeline::{
|
||||
core_3d::graph::{Core3d, Node3d},
|
||||
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
||||
@ -40,8 +40,6 @@ use bevy_render::{
|
||||
};
|
||||
use tracing::warn;
|
||||
|
||||
const TAA_SHADER_HANDLE: Handle<Shader> = weak_handle!("fea20d50-86b6-4069-aa32-374346aec00c");
|
||||
|
||||
/// Plugin for temporal anti-aliasing.
|
||||
///
|
||||
/// See [`TemporalAntiAliasing`] for more details.
|
||||
@ -49,7 +47,7 @@ pub struct TemporalAntiAliasPlugin;
|
||||
|
||||
impl Plugin for TemporalAntiAliasPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, TAA_SHADER_HANDLE, "taa.wgsl", Shader::from_wgsl);
|
||||
embedded_asset!(app, "taa.wgsl");
|
||||
|
||||
app.register_type::<TemporalAntiAliasing>();
|
||||
|
||||
@ -64,7 +62,7 @@ impl Plugin for TemporalAntiAliasPlugin {
|
||||
.add_systems(
|
||||
Render,
|
||||
(
|
||||
prepare_taa_jitter_and_mip_bias.in_set(RenderSystems::ManageViews),
|
||||
prepare_taa_jitter.in_set(RenderSystems::ManageViews),
|
||||
prepare_taa_pipelines.in_set(RenderSystems::Prepare),
|
||||
prepare_taa_history_textures.in_set(RenderSystems::PrepareResources),
|
||||
),
|
||||
@ -115,7 +113,6 @@ impl Plugin for TemporalAntiAliasPlugin {
|
||||
///
|
||||
/// # Usage Notes
|
||||
///
|
||||
/// The [`TemporalAntiAliasPlugin`] must be added to your app.
|
||||
/// Any camera with this component must also disable [`Msaa`] by setting it to [`Msaa::Off`].
|
||||
///
|
||||
/// [Currently](https://github.com/bevyengine/bevy/issues/8423), TAA cannot be used with [`bevy_render::camera::OrthographicProjection`].
|
||||
@ -128,11 +125,9 @@ impl Plugin for TemporalAntiAliasPlugin {
|
||||
///
|
||||
/// 1. Write particle motion vectors to the motion vectors prepass texture
|
||||
/// 2. Render particles after TAA
|
||||
///
|
||||
/// If no [`MipBias`] component is attached to the camera, TAA will add a `MipBias(-1.0)` component.
|
||||
#[derive(Component, Reflect, Clone)]
|
||||
#[reflect(Component, Default, Clone)]
|
||||
#[require(TemporalJitter, DepthPrepass, MotionVectorPrepass)]
|
||||
#[require(TemporalJitter, MipBias, DepthPrepass, MotionVectorPrepass)]
|
||||
#[doc(alias = "Taa")]
|
||||
pub struct TemporalAntiAliasing {
|
||||
/// Set to true to delete the saved temporal history (past frames).
|
||||
@ -243,6 +238,7 @@ struct TaaPipeline {
|
||||
taa_bind_group_layout: BindGroupLayout,
|
||||
nearest_sampler: Sampler,
|
||||
linear_sampler: Sampler,
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl FromWorld for TaaPipeline {
|
||||
@ -287,6 +283,7 @@ impl FromWorld for TaaPipeline {
|
||||
taa_bind_group_layout,
|
||||
nearest_sampler,
|
||||
linear_sampler,
|
||||
shader: load_embedded_asset!(world, "taa.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -319,7 +316,7 @@ impl SpecializedRenderPipeline for TaaPipeline {
|
||||
layout: vec![self.taa_bind_group_layout.clone()],
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: TAA_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "taa".into(),
|
||||
targets: vec![
|
||||
@ -345,16 +342,11 @@ impl SpecializedRenderPipeline for TaaPipeline {
|
||||
}
|
||||
|
||||
fn extract_taa_settings(mut commands: Commands, mut main_world: ResMut<MainWorld>) {
|
||||
let mut cameras_3d = main_world.query_filtered::<(
|
||||
let mut cameras_3d = main_world.query::<(
|
||||
RenderEntity,
|
||||
&Camera,
|
||||
&Projection,
|
||||
&mut TemporalAntiAliasing,
|
||||
), (
|
||||
With<Camera3d>,
|
||||
With<TemporalJitter>,
|
||||
With<DepthPrepass>,
|
||||
With<MotionVectorPrepass>,
|
||||
Option<&mut TemporalAntiAliasing>,
|
||||
)>();
|
||||
|
||||
for (entity, camera, camera_projection, mut taa_settings) in
|
||||
@ -364,14 +356,12 @@ fn extract_taa_settings(mut commands: Commands, mut main_world: ResMut<MainWorld
|
||||
let mut entity_commands = commands
|
||||
.get_entity(entity)
|
||||
.expect("Camera entity wasn't synced.");
|
||||
if camera.is_active && has_perspective_projection {
|
||||
entity_commands.insert(taa_settings.clone());
|
||||
taa_settings.reset = false;
|
||||
if taa_settings.is_some() && camera.is_active && has_perspective_projection {
|
||||
entity_commands.insert(taa_settings.as_deref().unwrap().clone());
|
||||
taa_settings.as_mut().unwrap().reset = false;
|
||||
} else {
|
||||
// TODO: needs better strategy for cleaning up
|
||||
entity_commands.remove::<(
|
||||
TemporalAntiAliasing,
|
||||
// components added in prepare systems (because `TemporalAntiAliasNode` does not query extracted components)
|
||||
TemporalAntiAliasHistoryTextures,
|
||||
TemporalAntiAliasPipelineId,
|
||||
)>();
|
||||
@ -379,13 +369,22 @@ fn extract_taa_settings(mut commands: Commands, mut main_world: ResMut<MainWorld
|
||||
}
|
||||
}
|
||||
|
||||
fn prepare_taa_jitter_and_mip_bias(
|
||||
fn prepare_taa_jitter(
|
||||
frame_count: Res<FrameCount>,
|
||||
mut query: Query<(Entity, &mut TemporalJitter, Option<&MipBias>), With<TemporalAntiAliasing>>,
|
||||
mut commands: Commands,
|
||||
mut query: Query<
|
||||
&mut TemporalJitter,
|
||||
(
|
||||
With<TemporalAntiAliasing>,
|
||||
With<Camera3d>,
|
||||
With<TemporalJitter>,
|
||||
With<DepthPrepass>,
|
||||
With<MotionVectorPrepass>,
|
||||
),
|
||||
>,
|
||||
) {
|
||||
// Halton sequence (2, 3) - 0.5, skipping i = 0
|
||||
// Halton sequence (2, 3) - 0.5
|
||||
let halton_sequence = [
|
||||
vec2(0.0, 0.0),
|
||||
vec2(0.0, -0.16666666),
|
||||
vec2(-0.25, 0.16666669),
|
||||
vec2(0.25, -0.3888889),
|
||||
@ -393,17 +392,12 @@ fn prepare_taa_jitter_and_mip_bias(
|
||||
vec2(0.125, 0.2777778),
|
||||
vec2(-0.125, -0.2777778),
|
||||
vec2(0.375, 0.055555582),
|
||||
vec2(-0.4375, 0.3888889),
|
||||
];
|
||||
|
||||
let offset = halton_sequence[frame_count.0 as usize % halton_sequence.len()];
|
||||
|
||||
for (entity, mut jitter, mip_bias) in &mut query {
|
||||
for mut jitter in &mut query {
|
||||
jitter.offset = offset;
|
||||
|
||||
if mip_bias.is_none() {
|
||||
commands.entity(entity).insert(MipBias(-1.0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_app"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides core App functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
@ -52,7 +52,6 @@ std = [
|
||||
"bevy_ecs/std",
|
||||
"dep:ctrlc",
|
||||
"downcast-rs/std",
|
||||
"bevy_utils/std",
|
||||
"bevy_tasks/std",
|
||||
"bevy_platform/std",
|
||||
]
|
||||
@ -77,14 +76,18 @@ web = [
|
||||
"dep:console_error_panic_hook",
|
||||
]
|
||||
|
||||
hotpatching = [
|
||||
"bevy_ecs/hotpatching",
|
||||
"dep:dioxus-devtools",
|
||||
"dep:crossbeam-channel",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
# bevy
|
||||
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev", default-features = false }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", default-features = false, optional = true }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false }
|
||||
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev", default-features = false }
|
||||
bevy_platform = { path = "../bevy_platform", version = "0.16.0-dev", default-features = false }
|
||||
|
||||
@ -95,8 +98,10 @@ variadics_please = "1.1"
|
||||
tracing = { version = "0.1", default-features = false, optional = true }
|
||||
log = { version = "0.4", default-features = false }
|
||||
cfg-if = "1.0.0"
|
||||
dioxus-devtools = { version = "0.7.0-alpha.1", optional = true }
|
||||
crossbeam-channel = { version = "0.5.0", optional = true }
|
||||
|
||||
[target.'cfg(any(unix, windows))'.dependencies]
|
||||
[target.'cfg(any(all(unix, not(target_os = "horizon")), windows))'.dependencies]
|
||||
ctrlc = { version = "3.4.4", optional = true }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
|
||||
@ -1311,6 +1311,8 @@ impl App {
|
||||
|
||||
/// Spawns an [`Observer`] entity, which will watch for and respond to the given event.
|
||||
///
|
||||
/// `observer` can be any system whose first parameter is a [`Trigger`].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
@ -1331,7 +1333,7 @@ impl App {
|
||||
/// # #[derive(Component)]
|
||||
/// # struct Friend;
|
||||
/// #
|
||||
/// // An observer system can be any system where the first parameter is a trigger
|
||||
///
|
||||
/// app.add_observer(|trigger: Trigger<Party>, friends: Query<Entity, With<Friend>>, mut commands: Commands| {
|
||||
/// if trigger.event().friends_allowed {
|
||||
/// for friend in friends.iter() {
|
||||
|
||||
42
crates/bevy_app/src/hotpatch.rs
Normal file
42
crates/bevy_app/src/hotpatch.rs
Normal file
@ -0,0 +1,42 @@
|
||||
//! Utilities for hotpatching code.
|
||||
extern crate alloc;
|
||||
|
||||
use alloc::sync::Arc;
|
||||
|
||||
use bevy_ecs::{event::EventWriter, HotPatched};
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use dioxus_devtools::connect_subsecond;
|
||||
use dioxus_devtools::subsecond;
|
||||
|
||||
pub use dioxus_devtools::subsecond::{call, HotFunction};
|
||||
|
||||
use crate::{Last, Plugin};
|
||||
|
||||
/// Plugin connecting to Dioxus CLI to enable hot patching.
|
||||
#[derive(Default)]
|
||||
pub struct HotPatchPlugin;
|
||||
|
||||
impl Plugin for HotPatchPlugin {
|
||||
fn build(&self, app: &mut crate::App) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded::<HotPatched>(1);
|
||||
|
||||
// Connects to the dioxus CLI that will handle rebuilds
|
||||
// This will open a connection to the dioxus CLI to receive updated jump tables
|
||||
// Sends a `HotPatched` message through the channel when the jump table is updated
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
connect_subsecond();
|
||||
subsecond::register_handler(Arc::new(move || {
|
||||
sender.send(HotPatched).unwrap();
|
||||
}));
|
||||
|
||||
// Adds a system that will read the channel for new `HotPatched`, and forward them as event to the ECS
|
||||
app.add_event::<HotPatched>().add_systems(
|
||||
Last,
|
||||
move |mut events: EventWriter<HotPatched>| {
|
||||
if receiver.try_recv().is_ok() {
|
||||
events.write_default();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -8,8 +8,8 @@
|
||||
#![cfg_attr(any(docsrs, docsrs_dep), feature(doc_auto_cfg, rustdoc_internals))]
|
||||
#![forbid(unsafe_code)]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
#![no_std]
|
||||
|
||||
@ -28,21 +28,26 @@ mod main_schedule;
|
||||
mod panic_handler;
|
||||
mod plugin;
|
||||
mod plugin_group;
|
||||
mod propagate;
|
||||
mod schedule_runner;
|
||||
mod sub_app;
|
||||
mod task_pool_plugin;
|
||||
#[cfg(all(any(unix, windows), feature = "std"))]
|
||||
#[cfg(all(any(all(unix, not(target_os = "horizon")), windows), feature = "std"))]
|
||||
mod terminal_ctrl_c_handler;
|
||||
|
||||
#[cfg(feature = "hotpatching")]
|
||||
pub mod hotpatch;
|
||||
|
||||
pub use app::*;
|
||||
pub use main_schedule::*;
|
||||
pub use panic_handler::*;
|
||||
pub use plugin::*;
|
||||
pub use plugin_group::*;
|
||||
pub use propagate::*;
|
||||
pub use schedule_runner::*;
|
||||
pub use sub_app::*;
|
||||
pub use task_pool_plugin::*;
|
||||
#[cfg(all(any(unix, windows), feature = "std"))]
|
||||
#[cfg(all(any(all(unix, not(target_os = "horizon")), windows), feature = "std"))]
|
||||
pub use terminal_ctrl_c_handler::*;
|
||||
|
||||
/// The app prelude.
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
//! This module provides panic handlers for [Bevy](https://bevyengine.org)
|
||||
//! This module provides panic handlers for [Bevy](https://bevy.org)
|
||||
//! apps, and automatically configures platform specifics (i.e. Wasm or Android).
|
||||
//!
|
||||
//! By default, the [`PanicHandlerPlugin`] from this crate is included in Bevy's `DefaultPlugins`.
|
||||
|
||||
551
crates/bevy_app/src/propagate.rs
Normal file
551
crates/bevy_app/src/propagate.rs
Normal file
@ -0,0 +1,551 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use crate::{App, Plugin, Update};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
hierarchy::ChildOf,
|
||||
query::{Changed, Or, QueryFilter, With, Without},
|
||||
relationship::{Relationship, RelationshipTarget},
|
||||
removal_detection::RemovedComponents,
|
||||
schedule::{IntoScheduleConfigs, SystemSet},
|
||||
system::{Commands, Local, Query},
|
||||
};
|
||||
|
||||
/// Plugin to automatically propagate a component value to all direct and transient relationship
|
||||
/// targets (e.g. [`bevy_ecs::hierarchy::Children`]) of entities with a [`Propagate`] component.
|
||||
///
|
||||
/// The plugin Will maintain the target component over hierarchy changes, adding or removing
|
||||
/// `C` when a relationship `R` (e.g. [`ChildOf`]) is added to or removed from a
|
||||
/// relationship tree with a [`Propagate<C>`] source, or if the [`Propagate<C>`] component
|
||||
/// is added, changed or removed.
|
||||
///
|
||||
/// Optionally you can include a query filter `F` to restrict the entities that are updated.
|
||||
/// Note that the filter is not rechecked dynamically: changes to the filter state will not be
|
||||
/// picked up until the [`Propagate`] component is touched, or the hierarchy is changed.
|
||||
/// All members of the tree between source and target must match the filter for propagation
|
||||
/// to reach a given target.
|
||||
/// Individual entities can be skipped or terminate the propagation with the [`PropagateOver`]
|
||||
/// and [`PropagateStop`] components.
|
||||
pub struct HierarchyPropagatePlugin<
|
||||
C: Component + Clone + PartialEq,
|
||||
F: QueryFilter = (),
|
||||
R: Relationship = ChildOf,
|
||||
>(PhantomData<fn() -> (C, F, R)>);
|
||||
|
||||
/// Causes the inner component to be added to this entity and all direct and transient relationship
|
||||
/// targets. A target with a [`Propagate<C>`] component of its own will override propagation from
|
||||
/// that point in the tree.
|
||||
#[derive(Component, Clone, PartialEq)]
|
||||
pub struct Propagate<C: Component + Clone + PartialEq>(pub C);
|
||||
|
||||
/// Stops the output component being added to this entity.
|
||||
/// Relationship targets will still inherit the component from this entity or its parents.
|
||||
#[derive(Component)]
|
||||
pub struct PropagateOver<C>(PhantomData<fn() -> C>);
|
||||
|
||||
/// Stops the propagation at this entity. Children will not inherit the component.
|
||||
#[derive(Component)]
|
||||
pub struct PropagateStop<C>(PhantomData<fn() -> C>);
|
||||
|
||||
/// The set in which propagation systems are added. You can schedule your logic relative to this set.
|
||||
#[derive(SystemSet, Clone, PartialEq, PartialOrd, Ord)]
|
||||
pub struct PropagateSet<C: Component + Clone + PartialEq> {
|
||||
_p: PhantomData<fn() -> C>,
|
||||
}
|
||||
|
||||
/// Internal struct for managing propagation
|
||||
#[derive(Component, Clone, PartialEq)]
|
||||
pub struct Inherited<C: Component + Clone + PartialEq>(pub C);
|
||||
|
||||
impl<C: Component + Clone + PartialEq, F: QueryFilter, R: Relationship> Default
|
||||
for HierarchyPropagatePlugin<C, F, R>
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> Default for PropagateOver<C> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> Default for PropagateStop<C> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Component + Clone + PartialEq> core::fmt::Debug for PropagateSet<C> {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
f.debug_struct("PropagateSet")
|
||||
.field("_p", &self._p)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Component + Clone + PartialEq> Eq for PropagateSet<C> {}
|
||||
impl<C: Component + Clone + PartialEq> core::hash::Hash for PropagateSet<C> {
|
||||
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
|
||||
self._p.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Component + Clone + PartialEq> Default for PropagateSet<C> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
_p: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Component + Clone + PartialEq, F: QueryFilter + 'static, R: Relationship> Plugin
|
||||
for HierarchyPropagatePlugin<C, F, R>
|
||||
{
|
||||
fn build(&self, app: &mut App) {
|
||||
app.add_systems(
|
||||
Update,
|
||||
(
|
||||
update_source::<C, F>,
|
||||
update_stopped::<C, F>,
|
||||
update_reparented::<C, F, R>,
|
||||
propagate_inherited::<C, F, R>,
|
||||
propagate_output::<C, F>,
|
||||
)
|
||||
.chain()
|
||||
.in_set(PropagateSet::<C>::default()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// add/remove `Inherited::<C>` and `C` for entities with a direct `Propagate::<C>`
|
||||
pub fn update_source<C: Component + Clone + PartialEq, F: QueryFilter>(
|
||||
mut commands: Commands,
|
||||
changed: Query<
|
||||
(Entity, &Propagate<C>),
|
||||
(
|
||||
Or<(Changed<Propagate<C>>, Without<Inherited<C>>)>,
|
||||
Without<PropagateStop<C>>,
|
||||
),
|
||||
>,
|
||||
mut removed: RemovedComponents<Propagate<C>>,
|
||||
) {
|
||||
for (entity, source) in &changed {
|
||||
commands
|
||||
.entity(entity)
|
||||
.try_insert(Inherited(source.0.clone()));
|
||||
}
|
||||
|
||||
for removed in removed.read() {
|
||||
if let Ok(mut commands) = commands.get_entity(removed) {
|
||||
commands.remove::<(Inherited<C>, C)>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// remove `Inherited::<C>` and `C` for entities with a `PropagateStop::<C>`
|
||||
pub fn update_stopped<C: Component + Clone + PartialEq, F: QueryFilter>(
|
||||
mut commands: Commands,
|
||||
q: Query<Entity, (With<Inherited<C>>, With<PropagateStop<C>>, F)>,
|
||||
) {
|
||||
for entity in q.iter() {
|
||||
let mut cmds = commands.entity(entity);
|
||||
cmds.remove::<(Inherited<C>, C)>();
|
||||
}
|
||||
}
|
||||
|
||||
/// add/remove `Inherited::<C>` and `C` for entities which have changed relationship
|
||||
pub fn update_reparented<C: Component + Clone + PartialEq, F: QueryFilter, R: Relationship>(
|
||||
mut commands: Commands,
|
||||
moved: Query<
|
||||
(Entity, &R, Option<&Inherited<C>>),
|
||||
(
|
||||
Changed<R>,
|
||||
Without<Propagate<C>>,
|
||||
Without<PropagateStop<C>>,
|
||||
F,
|
||||
),
|
||||
>,
|
||||
relations: Query<&Inherited<C>>,
|
||||
orphaned: Query<Entity, (With<Inherited<C>>, Without<Propagate<C>>, Without<R>, F)>,
|
||||
) {
|
||||
for (entity, relation, maybe_inherited) in &moved {
|
||||
if let Ok(inherited) = relations.get(relation.get()) {
|
||||
commands.entity(entity).try_insert(inherited.clone());
|
||||
} else if maybe_inherited.is_some() {
|
||||
commands.entity(entity).remove::<(Inherited<C>, C)>();
|
||||
}
|
||||
}
|
||||
|
||||
for orphan in &orphaned {
|
||||
commands.entity(orphan).remove::<(Inherited<C>, C)>();
|
||||
}
|
||||
}
|
||||
|
||||
/// add/remove `Inherited::<C>` for targets of entities with modified `Inherited::<C>`
|
||||
pub fn propagate_inherited<C: Component + Clone + PartialEq, F: QueryFilter, R: Relationship>(
|
||||
mut commands: Commands,
|
||||
changed: Query<
|
||||
(&Inherited<C>, &R::RelationshipTarget),
|
||||
(Changed<Inherited<C>>, Without<PropagateStop<C>>, F),
|
||||
>,
|
||||
recurse: Query<
|
||||
(Option<&R::RelationshipTarget>, Option<&Inherited<C>>),
|
||||
(Without<Propagate<C>>, Without<PropagateStop<C>>, F),
|
||||
>,
|
||||
mut removed: RemovedComponents<Inherited<C>>,
|
||||
mut to_process: Local<Vec<(Entity, Option<Inherited<C>>)>>,
|
||||
) {
|
||||
// gather changed
|
||||
for (inherited, targets) in &changed {
|
||||
to_process.extend(
|
||||
targets
|
||||
.iter()
|
||||
.map(|target| (target, Some(inherited.clone()))),
|
||||
);
|
||||
}
|
||||
|
||||
// and removed
|
||||
for entity in removed.read() {
|
||||
if let Ok((Some(targets), _)) = recurse.get(entity) {
|
||||
to_process.extend(targets.iter().map(|target| (target, None)));
|
||||
}
|
||||
}
|
||||
|
||||
// propagate
|
||||
while let Some((entity, maybe_inherited)) = (*to_process).pop() {
|
||||
let Ok((maybe_targets, maybe_current)) = recurse.get(entity) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if maybe_current == maybe_inherited.as_ref() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(targets) = maybe_targets {
|
||||
to_process.extend(
|
||||
targets
|
||||
.iter()
|
||||
.map(|target| (target, maybe_inherited.clone())),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(inherited) = maybe_inherited {
|
||||
commands.entity(entity).try_insert(inherited.clone());
|
||||
} else {
|
||||
commands.entity(entity).remove::<(Inherited<C>, C)>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// add `C` to entities with `Inherited::<C>`
|
||||
pub fn propagate_output<C: Component + Clone + PartialEq, F: QueryFilter>(
|
||||
mut commands: Commands,
|
||||
changed: Query<
|
||||
(Entity, &Inherited<C>, Option<&C>),
|
||||
(Changed<Inherited<C>>, Without<PropagateOver<C>>, F),
|
||||
>,
|
||||
) {
|
||||
for (entity, inherited, maybe_current) in &changed {
|
||||
if maybe_current.is_some_and(|c| &inherited.0 == c) {
|
||||
continue;
|
||||
}
|
||||
|
||||
commands.entity(entity).try_insert(inherited.0.clone());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use bevy_ecs::schedule::Schedule;
|
||||
|
||||
use crate::{App, Update};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[derive(Component, Clone, PartialEq, Debug)]
|
||||
struct TestValue(u32);
|
||||
|
||||
#[test]
|
||||
fn test_simple_propagate() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let intermediate = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(intermediate))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reparented() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reparented_with_prior() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator_a = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let propagator_b = app.world_mut().spawn(Propagate(TestValue(2))).id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagator_a))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
assert_eq!(
|
||||
app.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee),
|
||||
Ok(&TestValue(1))
|
||||
);
|
||||
app.world_mut()
|
||||
.commands()
|
||||
.entity(propagatee)
|
||||
.insert(ChildOf(propagator_b));
|
||||
app.update();
|
||||
assert_eq!(
|
||||
app.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee),
|
||||
Ok(&TestValue(2))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_orphan() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_ok());
|
||||
app.world_mut()
|
||||
.commands()
|
||||
.entity(propagatee)
|
||||
.remove::<ChildOf>();
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_propagated() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_ok());
|
||||
app.world_mut()
|
||||
.commands()
|
||||
.entity(propagator)
|
||||
.remove::<Propagate<TestValue>>();
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_propagate_over() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let propagate_over = app
|
||||
.world_mut()
|
||||
.spawn(TestValue(2))
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagate_over))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
assert_eq!(
|
||||
app.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee),
|
||||
Ok(&TestValue(1))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_propagate_stop() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let propagate_stop = app
|
||||
.world_mut()
|
||||
.spawn(PropagateStop::<TestValue>::default())
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
let no_propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagate_stop))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), no_propagatee)
|
||||
.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_intermediate_override() {
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let intermediate = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(intermediate))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
assert_eq!(
|
||||
app.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee),
|
||||
Ok(&TestValue(1))
|
||||
);
|
||||
|
||||
app.world_mut()
|
||||
.entity_mut(intermediate)
|
||||
.insert(Propagate(TestValue(2)));
|
||||
app.update();
|
||||
assert_eq!(
|
||||
app.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee),
|
||||
Ok(&TestValue(2))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter() {
|
||||
#[derive(Component)]
|
||||
struct Marker;
|
||||
|
||||
let mut app = App::new();
|
||||
app.add_schedule(Schedule::new(Update));
|
||||
app.add_plugins(HierarchyPropagatePlugin::<TestValue, With<Marker>>::default());
|
||||
|
||||
let propagator = app.world_mut().spawn(Propagate(TestValue(1))).id();
|
||||
let propagatee = app
|
||||
.world_mut()
|
||||
.spawn_empty()
|
||||
.insert(ChildOf(propagator))
|
||||
.id();
|
||||
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_err());
|
||||
|
||||
// NOTE: changes to the filter condition are not rechecked
|
||||
app.world_mut().entity_mut(propagator).insert(Marker);
|
||||
app.world_mut().entity_mut(propagatee).insert(Marker);
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_err());
|
||||
|
||||
app.world_mut()
|
||||
.entity_mut(propagator)
|
||||
.insert(Propagate(TestValue(1)));
|
||||
app.update();
|
||||
assert!(app
|
||||
.world_mut()
|
||||
.query::<&TestValue>()
|
||||
.get(app.world(), propagatee)
|
||||
.is_ok());
|
||||
}
|
||||
}
|
||||
@ -3,7 +3,7 @@ name = "bevy_asset"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides asset functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_asset_macros"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Derive implementations for bevy_asset"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -106,7 +106,7 @@ impl<'w, A: AsAssetId> AssetChangeCheck<'w, A> {
|
||||
/// - Removed assets are not detected.
|
||||
///
|
||||
/// The list of changed assets only gets updated in the [`AssetEventSystems`] system set,
|
||||
/// which runs in `Last`. Therefore, `AssetChanged` will only pick up asset changes in schedules
|
||||
/// which runs in `PostUpdate`. Therefore, `AssetChanged` will only pick up asset changes in schedules
|
||||
/// following [`AssetEventSystems`] or the next frame. Consider adding the system in the `Last` schedule
|
||||
/// after [`AssetEventSystems`] if you need to react without frame delay to asset changes.
|
||||
///
|
||||
|
||||
@ -437,6 +437,18 @@ impl<A: Asset> Assets<A> {
|
||||
result
|
||||
}
|
||||
|
||||
/// Retrieves a mutable reference to the [`Asset`] with the given `id`, if it exists.
|
||||
///
|
||||
/// This is the same as [`Assets::get_mut`] except it doesn't emit [`AssetEvent::Modified`].
|
||||
#[inline]
|
||||
pub fn get_mut_untracked(&mut self, id: impl Into<AssetId<A>>) -> Option<&mut A> {
|
||||
let id: AssetId<A> = id.into();
|
||||
match id {
|
||||
AssetId::Index { index, .. } => self.dense_storage.get_mut(index),
|
||||
AssetId::Uuid { uuid } => self.hash_map.get_mut(&uuid),
|
||||
}
|
||||
}
|
||||
|
||||
/// Removes (and returns) the [`Asset`] with the given `id`, if it exists.
|
||||
/// Note that this supports anything that implements `Into<AssetId<A>>`, which includes [`Handle`] and [`AssetId`].
|
||||
pub fn remove(&mut self, id: impl Into<AssetId<A>>) -> Option<A> {
|
||||
@ -450,6 +462,8 @@ impl<A: Asset> Assets<A> {
|
||||
|
||||
/// Removes (and returns) the [`Asset`] with the given `id`, if it exists. This skips emitting [`AssetEvent::Removed`].
|
||||
/// Note that this supports anything that implements `Into<AssetId<A>>`, which includes [`Handle`] and [`AssetId`].
|
||||
///
|
||||
/// This is the same as [`Assets::remove`] except it doesn't emit [`AssetEvent::Removed`].
|
||||
pub fn remove_untracked(&mut self, id: impl Into<AssetId<A>>) -> Option<A> {
|
||||
let id: AssetId<A> = id.into();
|
||||
self.duplicate_handles.remove(&id);
|
||||
|
||||
@ -81,7 +81,10 @@ impl HttpWasmAssetReader {
|
||||
let reader = VecReader::new(bytes);
|
||||
Ok(reader)
|
||||
}
|
||||
404 => Err(AssetReaderError::NotFound(path)),
|
||||
// Some web servers, including itch.io's CDN, return 403 when a requested file isn't present.
|
||||
// TODO: remove handling of 403 as not found when it's easier to configure
|
||||
// see https://github.com/bevyengine/bevy/pull/19268#pullrequestreview-2882410105
|
||||
403 | 404 => Err(AssetReaderError::NotFound(path)),
|
||||
status => Err(AssetReaderError::HttpError(status)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -141,8 +141,8 @@
|
||||
#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
#![no_std]
|
||||
|
||||
|
||||
@ -12,7 +12,7 @@ use alloc::{
|
||||
vec::Vec,
|
||||
};
|
||||
use atomicow::CowArc;
|
||||
use bevy_ecs::world::World;
|
||||
use bevy_ecs::{error::BevyError, world::World};
|
||||
use bevy_platform::collections::{HashMap, HashSet};
|
||||
use bevy_tasks::{BoxedFuture, ConditionalSendFuture};
|
||||
use core::any::{Any, TypeId};
|
||||
@ -34,7 +34,7 @@ pub trait AssetLoader: Send + Sync + 'static {
|
||||
/// The settings type used by this [`AssetLoader`].
|
||||
type Settings: Settings + Default + Serialize + for<'a> Deserialize<'a>;
|
||||
/// The type of [error](`std::error::Error`) which could be encountered by this loader.
|
||||
type Error: Into<Box<dyn core::error::Error + Send + Sync + 'static>>;
|
||||
type Error: Into<BevyError>;
|
||||
/// Asynchronously loads [`AssetLoader::Asset`] (and any other labeled assets) from the bytes provided by [`Reader`].
|
||||
fn load(
|
||||
&self,
|
||||
@ -58,10 +58,7 @@ pub trait ErasedAssetLoader: Send + Sync + 'static {
|
||||
reader: &'a mut dyn Reader,
|
||||
meta: &'a dyn AssetMetaDyn,
|
||||
load_context: LoadContext<'a>,
|
||||
) -> BoxedFuture<
|
||||
'a,
|
||||
Result<ErasedLoadedAsset, Box<dyn core::error::Error + Send + Sync + 'static>>,
|
||||
>;
|
||||
) -> BoxedFuture<'a, Result<ErasedLoadedAsset, BevyError>>;
|
||||
|
||||
/// Returns a list of extensions supported by this asset loader, without the preceding dot.
|
||||
fn extensions(&self) -> &[&str];
|
||||
@ -89,10 +86,7 @@ where
|
||||
reader: &'a mut dyn Reader,
|
||||
meta: &'a dyn AssetMetaDyn,
|
||||
mut load_context: LoadContext<'a>,
|
||||
) -> BoxedFuture<
|
||||
'a,
|
||||
Result<ErasedLoadedAsset, Box<dyn core::error::Error + Send + Sync + 'static>>,
|
||||
> {
|
||||
) -> BoxedFuture<'a, Result<ErasedLoadedAsset, BevyError>> {
|
||||
Box::pin(async move {
|
||||
let settings = meta
|
||||
.loader_settings()
|
||||
@ -394,15 +388,15 @@ impl<'a> LoadContext<'a> {
|
||||
/// result with [`LoadContext::add_labeled_asset`].
|
||||
///
|
||||
/// See [`AssetPath`] for more on labeled assets.
|
||||
pub fn labeled_asset_scope<A: Asset>(
|
||||
pub fn labeled_asset_scope<A: Asset, E>(
|
||||
&mut self,
|
||||
label: String,
|
||||
load: impl FnOnce(&mut LoadContext) -> A,
|
||||
) -> Handle<A> {
|
||||
load: impl FnOnce(&mut LoadContext) -> Result<A, E>,
|
||||
) -> Result<Handle<A>, E> {
|
||||
let mut context = self.begin_labeled_asset();
|
||||
let asset = load(&mut context);
|
||||
let asset = load(&mut context)?;
|
||||
let loaded_asset = context.finish(asset);
|
||||
self.add_loaded_labeled_asset(label, loaded_asset)
|
||||
Ok(self.add_loaded_labeled_asset(label, loaded_asset))
|
||||
}
|
||||
|
||||
/// This will add the given `asset` as a "labeled [`Asset`]" with the `label` label.
|
||||
@ -416,7 +410,8 @@ impl<'a> LoadContext<'a> {
|
||||
///
|
||||
/// See [`AssetPath`] for more on labeled assets.
|
||||
pub fn add_labeled_asset<A: Asset>(&mut self, label: String, asset: A) -> Handle<A> {
|
||||
self.labeled_asset_scope(label, |_| asset)
|
||||
self.labeled_asset_scope(label, |_| Ok::<_, ()>(asset))
|
||||
.expect("the closure returns Ok")
|
||||
}
|
||||
|
||||
/// Add a [`LoadedAsset`] that is a "labeled sub asset" of the root path of this load context.
|
||||
|
||||
@ -490,7 +490,7 @@ impl<'a> AssetPath<'a> {
|
||||
}
|
||||
|
||||
/// Returns `true` if this [`AssetPath`] points to a file that is
|
||||
/// outside of it's [`AssetSource`](crate::io::AssetSource) folder.
|
||||
/// outside of its [`AssetSource`](crate::io::AssetSource) folder.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```
|
||||
|
||||
@ -1945,7 +1945,7 @@ pub enum AssetLoadError {
|
||||
pub struct AssetLoaderError {
|
||||
path: AssetPath<'static>,
|
||||
loader_name: &'static str,
|
||||
error: Arc<dyn core::error::Error + Send + Sync + 'static>,
|
||||
error: Arc<BevyError>,
|
||||
}
|
||||
|
||||
impl AssetLoaderError {
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_audio"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides audio functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -57,6 +57,16 @@ pub struct PlaybackSettings {
|
||||
/// Optional scale factor applied to the positions of this audio source and the listener,
|
||||
/// overriding the default value configured on [`AudioPlugin::default_spatial_scale`](crate::AudioPlugin::default_spatial_scale).
|
||||
pub spatial_scale: Option<SpatialScale>,
|
||||
/// The point in time in the audio clip where playback should start. If set to `None`, it will
|
||||
/// play from the beginning of the clip.
|
||||
///
|
||||
/// If the playback mode is set to `Loop`, each loop will start from this position.
|
||||
pub start_position: Option<core::time::Duration>,
|
||||
/// How long the audio should play before stopping. If set, the clip will play for at most
|
||||
/// the specified duration. If set to `None`, it will play for as long as it can.
|
||||
///
|
||||
/// If the playback mode is set to `Loop`, each loop will last for this duration.
|
||||
pub duration: Option<core::time::Duration>,
|
||||
}
|
||||
|
||||
impl Default for PlaybackSettings {
|
||||
@ -81,6 +91,8 @@ impl PlaybackSettings {
|
||||
muted: false,
|
||||
spatial: false,
|
||||
spatial_scale: None,
|
||||
start_position: None,
|
||||
duration: None,
|
||||
};
|
||||
|
||||
/// Will play the associated audio source in a loop.
|
||||
@ -136,6 +148,18 @@ impl PlaybackSettings {
|
||||
self.spatial_scale = Some(spatial_scale);
|
||||
self
|
||||
}
|
||||
|
||||
/// Helper to use a custom playback start position.
|
||||
pub const fn with_start_position(mut self, start_position: core::time::Duration) -> Self {
|
||||
self.start_position = Some(start_position);
|
||||
self
|
||||
}
|
||||
|
||||
/// Helper to use a custom playback duration.
|
||||
pub const fn with_duration(mut self, duration: core::time::Duration) -> Self {
|
||||
self.duration = Some(duration);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Settings for the listener for spatial audio sources.
|
||||
|
||||
@ -156,12 +156,49 @@ pub(crate) fn play_queued_audio_system<Source: Asset + Decodable>(
|
||||
}
|
||||
};
|
||||
|
||||
let decoder = audio_source.decoder();
|
||||
|
||||
match settings.mode {
|
||||
PlaybackMode::Loop => sink.append(audio_source.decoder().repeat_infinite()),
|
||||
PlaybackMode::Loop => match (settings.start_position, settings.duration) {
|
||||
// custom start position and duration
|
||||
(Some(start_position), Some(duration)) => sink.append(
|
||||
decoder
|
||||
.skip_duration(start_position)
|
||||
.take_duration(duration)
|
||||
.repeat_infinite(),
|
||||
),
|
||||
|
||||
// custom start position
|
||||
(Some(start_position), None) => {
|
||||
sink.append(decoder.skip_duration(start_position).repeat_infinite());
|
||||
}
|
||||
|
||||
// custom duration
|
||||
(None, Some(duration)) => {
|
||||
sink.append(decoder.take_duration(duration).repeat_infinite());
|
||||
}
|
||||
|
||||
// full clip
|
||||
(None, None) => sink.append(decoder.repeat_infinite()),
|
||||
},
|
||||
PlaybackMode::Once | PlaybackMode::Despawn | PlaybackMode::Remove => {
|
||||
sink.append(audio_source.decoder());
|
||||
match (settings.start_position, settings.duration) {
|
||||
(Some(start_position), Some(duration)) => sink.append(
|
||||
decoder
|
||||
.skip_duration(start_position)
|
||||
.take_duration(duration),
|
||||
),
|
||||
|
||||
(Some(start_position), None) => {
|
||||
sink.append(decoder.skip_duration(start_position));
|
||||
}
|
||||
|
||||
(None, Some(duration)) => sink.append(decoder.take_duration(duration)),
|
||||
|
||||
(None, None) => sink.append(decoder),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let mut sink = SpatialAudioSink::new(sink);
|
||||
|
||||
@ -196,12 +233,49 @@ pub(crate) fn play_queued_audio_system<Source: Asset + Decodable>(
|
||||
}
|
||||
};
|
||||
|
||||
let decoder = audio_source.decoder();
|
||||
|
||||
match settings.mode {
|
||||
PlaybackMode::Loop => sink.append(audio_source.decoder().repeat_infinite()),
|
||||
PlaybackMode::Loop => match (settings.start_position, settings.duration) {
|
||||
// custom start position and duration
|
||||
(Some(start_position), Some(duration)) => sink.append(
|
||||
decoder
|
||||
.skip_duration(start_position)
|
||||
.take_duration(duration)
|
||||
.repeat_infinite(),
|
||||
),
|
||||
|
||||
// custom start position
|
||||
(Some(start_position), None) => {
|
||||
sink.append(decoder.skip_duration(start_position).repeat_infinite());
|
||||
}
|
||||
|
||||
// custom duration
|
||||
(None, Some(duration)) => {
|
||||
sink.append(decoder.take_duration(duration).repeat_infinite());
|
||||
}
|
||||
|
||||
// full clip
|
||||
(None, None) => sink.append(decoder.repeat_infinite()),
|
||||
},
|
||||
PlaybackMode::Once | PlaybackMode::Despawn | PlaybackMode::Remove => {
|
||||
sink.append(audio_source.decoder());
|
||||
match (settings.start_position, settings.duration) {
|
||||
(Some(start_position), Some(duration)) => sink.append(
|
||||
decoder
|
||||
.skip_duration(start_position)
|
||||
.take_duration(duration),
|
||||
),
|
||||
|
||||
(Some(start_position), None) => {
|
||||
sink.append(decoder.skip_duration(start_position));
|
||||
}
|
||||
|
||||
(None, Some(duration)) => sink.append(decoder.take_duration(duration)),
|
||||
|
||||
(None, None) => sink.append(decoder),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let mut sink = AudioSink::new(sink);
|
||||
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
#![forbid(unsafe_code)]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
//! Audio support for the game engine Bevy
|
||||
|
||||
@ -42,6 +42,14 @@ pub trait AudioSinkPlayback {
|
||||
/// No effect if not paused.
|
||||
fn play(&self);
|
||||
|
||||
/// Returns the position of the sound that's being played.
|
||||
///
|
||||
/// This takes into account any speedup or delay applied.
|
||||
///
|
||||
/// Example: if you [`set_speed(2.0)`](Self::set_speed) and [`position()`](Self::position) returns *5s*,
|
||||
/// then the position in the recording is *10s* from its start.
|
||||
fn position(&self) -> Duration;
|
||||
|
||||
/// Attempts to seek to a given position in the current source.
|
||||
///
|
||||
/// This blocks between 0 and ~5 milliseconds.
|
||||
@ -181,6 +189,10 @@ impl AudioSinkPlayback for AudioSink {
|
||||
self.sink.play();
|
||||
}
|
||||
|
||||
fn position(&self) -> Duration {
|
||||
self.sink.get_pos()
|
||||
}
|
||||
|
||||
fn try_seek(&self, pos: Duration) -> Result<(), SeekError> {
|
||||
self.sink.try_seek(pos)
|
||||
}
|
||||
@ -281,6 +293,10 @@ impl AudioSinkPlayback for SpatialAudioSink {
|
||||
self.sink.play();
|
||||
}
|
||||
|
||||
fn position(&self) -> Duration {
|
||||
self.sink.get_pos()
|
||||
}
|
||||
|
||||
fn try_seek(&self, pos: Duration) -> Result<(), SeekError> {
|
||||
self.sink.try_seek(pos)
|
||||
}
|
||||
|
||||
@ -34,7 +34,7 @@ impl GlobalVolume {
|
||||
#[derive(Clone, Copy, Debug, Reflect)]
|
||||
#[reflect(Clone, Debug, PartialEq)]
|
||||
pub enum Volume {
|
||||
/// Create a new [`Volume`] from the given volume in linear scale.
|
||||
/// Create a new [`Volume`] from the given volume in the linear scale.
|
||||
///
|
||||
/// In a linear scale, the value `1.0` represents the "normal" volume,
|
||||
/// meaning the audio is played at its original level. Values greater than
|
||||
@ -144,7 +144,7 @@ impl Volume {
|
||||
|
||||
/// Returns the volume in decibels as a float.
|
||||
///
|
||||
/// If the volume is silent / off / muted, i.e. it's underlying linear scale
|
||||
/// If the volume is silent / off / muted, i.e., its underlying linear scale
|
||||
/// is `0.0`, this method returns negative infinity.
|
||||
pub fn to_decibels(&self) -> f32 {
|
||||
match self {
|
||||
@ -155,57 +155,95 @@ impl Volume {
|
||||
|
||||
/// The silent volume. Also known as "off" or "muted".
|
||||
pub const SILENT: Self = Volume::Linear(0.0);
|
||||
}
|
||||
|
||||
impl core::ops::Add<Self> for Volume {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, rhs: Self) -> Self {
|
||||
use Volume::{Decibels, Linear};
|
||||
|
||||
match (self, rhs) {
|
||||
(Linear(a), Linear(b)) => Linear(a + b),
|
||||
(Decibels(a), Decibels(b)) => Decibels(linear_to_decibels(
|
||||
decibels_to_linear(a) + decibels_to_linear(b),
|
||||
)),
|
||||
// {Linear, Decibels} favors the left hand side of the operation by
|
||||
// first converting the right hand side to the same type as the left
|
||||
// hand side and then performing the operation.
|
||||
(Linear(..), Decibels(db)) => self + Linear(decibels_to_linear(db)),
|
||||
(Decibels(..), Linear(l)) => self + Decibels(linear_to_decibels(l)),
|
||||
}
|
||||
/// Increases the volume by the specified percentage.
|
||||
///
|
||||
/// This method works in the linear domain, where a 100% increase
|
||||
/// means doubling the volume (equivalent to +6.02dB).
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `percentage` - The percentage to increase (50.0 means 50% increase)
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use bevy_audio::Volume;
|
||||
///
|
||||
/// let volume = Volume::Linear(1.0);
|
||||
/// let increased = volume.increase_by_percentage(100.0);
|
||||
/// assert_eq!(increased.to_linear(), 2.0);
|
||||
/// ```
|
||||
pub fn increase_by_percentage(&self, percentage: f32) -> Self {
|
||||
let factor = 1.0 + (percentage / 100.0);
|
||||
Volume::Linear(self.to_linear() * factor)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::ops::AddAssign<Self> for Volume {
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
*self = *self + rhs;
|
||||
/// Decreases the volume by the specified percentage.
|
||||
///
|
||||
/// This method works in the linear domain, where a 50% decrease
|
||||
/// means halving the volume (equivalent to -6.02dB).
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `percentage` - The percentage to decrease (50.0 means 50% decrease)
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use bevy_audio::Volume;
|
||||
///
|
||||
/// let volume = Volume::Linear(1.0);
|
||||
/// let decreased = volume.decrease_by_percentage(50.0);
|
||||
/// assert_eq!(decreased.to_linear(), 0.5);
|
||||
/// ```
|
||||
pub fn decrease_by_percentage(&self, percentage: f32) -> Self {
|
||||
let factor = 1.0 - (percentage / 100.0).clamp(0.0, 1.0);
|
||||
Volume::Linear(self.to_linear() * factor)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::ops::Sub<Self> for Volume {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, rhs: Self) -> Self {
|
||||
use Volume::{Decibels, Linear};
|
||||
|
||||
match (self, rhs) {
|
||||
(Linear(a), Linear(b)) => Linear(a - b),
|
||||
(Decibels(a), Decibels(b)) => Decibels(linear_to_decibels(
|
||||
decibels_to_linear(a) - decibels_to_linear(b),
|
||||
)),
|
||||
// {Linear, Decibels} favors the left hand side of the operation by
|
||||
// first converting the right hand side to the same type as the left
|
||||
// hand side and then performing the operation.
|
||||
(Linear(..), Decibels(db)) => self - Linear(decibels_to_linear(db)),
|
||||
(Decibels(..), Linear(l)) => self - Decibels(linear_to_decibels(l)),
|
||||
}
|
||||
/// Scales the volume to a specific linear factor relative to the current volume.
|
||||
///
|
||||
/// This is different from `adjust_by_linear` as it sets the volume to be
|
||||
/// exactly the factor times the original volume, rather than applying
|
||||
/// the factor to the current volume.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `factor` - The scaling factor (2.0 = twice as loud, 0.5 = half as loud)
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use bevy_audio::Volume;
|
||||
///
|
||||
/// let volume = Volume::Linear(0.8);
|
||||
/// let scaled = volume.scale_to_factor(1.25);
|
||||
/// assert_eq!(scaled.to_linear(), 1.0);
|
||||
/// ```
|
||||
pub fn scale_to_factor(&self, factor: f32) -> Self {
|
||||
Volume::Linear(self.to_linear() * factor)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::ops::SubAssign<Self> for Volume {
|
||||
fn sub_assign(&mut self, rhs: Self) {
|
||||
*self = *self - rhs;
|
||||
/// Creates a fade effect by interpolating between current volume and target volume.
|
||||
///
|
||||
/// This method performs linear interpolation in the linear domain, which
|
||||
/// provides a more natural-sounding fade effect.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `target` - The target volume to fade towards
|
||||
/// * `factor` - The interpolation factor (0.0 = current volume, 1.0 = target volume)
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use bevy_audio::Volume;
|
||||
///
|
||||
/// let current = Volume::Linear(1.0);
|
||||
/// let target = Volume::Linear(0.0);
|
||||
/// let faded = current.fade_towards(target, 0.5);
|
||||
/// assert_eq!(faded.to_linear(), 0.5);
|
||||
/// ```
|
||||
pub fn fade_towards(&self, target: Volume, factor: f32) -> Self {
|
||||
let current_linear = self.to_linear();
|
||||
let target_linear = target.to_linear();
|
||||
let factor_clamped = factor.clamp(0.0, 1.0);
|
||||
|
||||
let interpolated = current_linear + (target_linear - current_linear) * factor_clamped;
|
||||
Volume::Linear(interpolated)
|
||||
}
|
||||
}
|
||||
|
||||
@ -337,8 +375,9 @@ mod tests {
|
||||
Linear(f32::NEG_INFINITY).to_decibels().is_infinite(),
|
||||
"Negative infinite linear scale is equivalent to infinite decibels"
|
||||
);
|
||||
assert!(
|
||||
Decibels(f32::NEG_INFINITY).to_linear().abs() == 0.0,
|
||||
assert_eq!(
|
||||
Decibels(f32::NEG_INFINITY).to_linear().abs(),
|
||||
0.0,
|
||||
"Negative infinity decibels is equivalent to zero linear scale"
|
||||
);
|
||||
|
||||
@ -361,6 +400,74 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increase_by_percentage() {
|
||||
let volume = Linear(1.0);
|
||||
|
||||
// 100% increase should double the volume
|
||||
let increased = volume.increase_by_percentage(100.0);
|
||||
assert_eq!(increased.to_linear(), 2.0);
|
||||
|
||||
// 50% increase
|
||||
let increased = volume.increase_by_percentage(50.0);
|
||||
assert_eq!(increased.to_linear(), 1.5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrease_by_percentage() {
|
||||
let volume = Linear(1.0);
|
||||
|
||||
// 50% decrease should halve the volume
|
||||
let decreased = volume.decrease_by_percentage(50.0);
|
||||
assert_eq!(decreased.to_linear(), 0.5);
|
||||
|
||||
// 25% decrease
|
||||
let decreased = volume.decrease_by_percentage(25.0);
|
||||
assert_eq!(decreased.to_linear(), 0.75);
|
||||
|
||||
// 100% decrease should result in silence
|
||||
let decreased = volume.decrease_by_percentage(100.0);
|
||||
assert_eq!(decreased.to_linear(), 0.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scale_to_factor() {
|
||||
let volume = Linear(0.8);
|
||||
let scaled = volume.scale_to_factor(1.25);
|
||||
assert_eq!(scaled.to_linear(), 1.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fade_towards() {
|
||||
let current = Linear(1.0);
|
||||
let target = Linear(0.0);
|
||||
|
||||
// 50% fade should result in 0.5 linear volume
|
||||
let faded = current.fade_towards(target, 0.5);
|
||||
assert_eq!(faded.to_linear(), 0.5);
|
||||
|
||||
// 0% fade should keep current volume
|
||||
let faded = current.fade_towards(target, 0.0);
|
||||
assert_eq!(faded.to_linear(), 1.0);
|
||||
|
||||
// 100% fade should reach target volume
|
||||
let faded = current.fade_towards(target, 1.0);
|
||||
assert_eq!(faded.to_linear(), 0.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decibel_math_properties() {
|
||||
let volume = Linear(1.0);
|
||||
|
||||
// Adding 20dB should multiply linear volume by 10
|
||||
let adjusted = volume * Decibels(20.0);
|
||||
assert_approx_eq(adjusted, Linear(10.0));
|
||||
|
||||
// Subtracting 20dB should divide linear volume by 10
|
||||
let adjusted = volume / Decibels(20.0);
|
||||
assert_approx_eq(adjusted, Linear(0.1));
|
||||
}
|
||||
|
||||
fn assert_approx_eq(a: Volume, b: Volume) {
|
||||
const EPSILON: f32 = 0.0001;
|
||||
|
||||
@ -380,52 +487,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn volume_ops_add() {
|
||||
// Linear to Linear.
|
||||
assert_approx_eq(Linear(0.5) + Linear(0.5), Linear(1.0));
|
||||
assert_approx_eq(Linear(0.5) + Linear(0.1), Linear(0.6));
|
||||
assert_approx_eq(Linear(0.5) + Linear(-0.5), Linear(0.0));
|
||||
|
||||
// Decibels to Decibels.
|
||||
assert_approx_eq(Decibels(0.0) + Decibels(0.0), Decibels(6.0206003));
|
||||
assert_approx_eq(Decibels(6.0) + Decibels(6.0), Decibels(12.020599));
|
||||
assert_approx_eq(Decibels(-6.0) + Decibels(-6.0), Decibels(0.020599423));
|
||||
|
||||
// {Linear, Decibels} favors the left hand side of the operation.
|
||||
assert_approx_eq(Linear(0.5) + Decibels(0.0), Linear(1.5));
|
||||
assert_approx_eq(Decibels(0.0) + Linear(0.5), Decibels(3.521825));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn volume_ops_add_assign() {
|
||||
// Linear to Linear.
|
||||
let mut volume = Linear(0.5);
|
||||
volume += Linear(0.5);
|
||||
assert_approx_eq(volume, Linear(1.0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn volume_ops_sub() {
|
||||
// Linear to Linear.
|
||||
assert_approx_eq(Linear(0.5) - Linear(0.5), Linear(0.0));
|
||||
assert_approx_eq(Linear(0.5) - Linear(0.1), Linear(0.4));
|
||||
assert_approx_eq(Linear(0.5) - Linear(-0.5), Linear(1.0));
|
||||
|
||||
// Decibels to Decibels.
|
||||
assert_eq!(Decibels(0.0) - Decibels(0.0), Decibels(f32::NEG_INFINITY));
|
||||
assert_approx_eq(Decibels(6.0) - Decibels(4.0), Decibels(-7.736506));
|
||||
assert_eq!(Decibels(-6.0) - Decibels(-6.0), Decibels(f32::NEG_INFINITY));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn volume_ops_sub_assign() {
|
||||
// Linear to Linear.
|
||||
let mut volume = Linear(0.5);
|
||||
volume -= Linear(0.5);
|
||||
assert_approx_eq(volume, Linear(0.0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn volume_ops_mul() {
|
||||
// Linear to Linear.
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_color"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Types for representing and manipulating color values"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy", "color"]
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![forbid(unsafe_code)]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
#![no_std]
|
||||
|
||||
@ -262,6 +262,7 @@ macro_rules! impl_componentwise_vector_space {
|
||||
}
|
||||
|
||||
impl bevy_math::VectorSpace for $ty {
|
||||
type Scalar = f32;
|
||||
const ZERO: Self = Self {
|
||||
$($element: 0.0,)+
|
||||
};
|
||||
|
||||
@ -177,8 +177,8 @@ impl Srgba {
|
||||
pub fn to_hex(&self) -> String {
|
||||
let [r, g, b, a] = self.to_u8_array();
|
||||
match a {
|
||||
255 => format!("#{:02X}{:02X}{:02X}", r, g, b),
|
||||
_ => format!("#{:02X}{:02X}{:02X}{:02X}", r, g, b, a),
|
||||
255 => format!("#{r:02X}{g:02X}{b:02X}"),
|
||||
_ => format!("#{r:02X}{g:02X}{b:02X}{a:02X}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -7,7 +7,7 @@ authors = [
|
||||
"Carter Anderson <mcanders1@gmail.com>",
|
||||
]
|
||||
description = "Provides a core render pipeline for Bevy Engine."
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{load_internal_asset, AssetApp, Assets, Handle};
|
||||
use bevy_asset::{embedded_asset, AssetApp, Assets, Handle};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_render::{
|
||||
extract_component::ExtractComponentPlugin,
|
||||
render_asset::RenderAssetPlugin,
|
||||
render_graph::RenderGraphApp,
|
||||
render_resource::{
|
||||
Buffer, BufferDescriptor, BufferUsages, PipelineCache, Shader, SpecializedComputePipelines,
|
||||
Buffer, BufferDescriptor, BufferUsages, PipelineCache, SpecializedComputePipelines,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
ExtractSchedule, Render, RenderApp, RenderSystems,
|
||||
@ -21,9 +21,7 @@ mod settings;
|
||||
use buffers::{extract_buffers, prepare_buffers, AutoExposureBuffers};
|
||||
pub use compensation_curve::{AutoExposureCompensationCurve, AutoExposureCompensationCurveError};
|
||||
use node::AutoExposureNode;
|
||||
use pipeline::{
|
||||
AutoExposurePass, AutoExposurePipeline, ViewAutoExposurePipeline, METERING_SHADER_HANDLE,
|
||||
};
|
||||
use pipeline::{AutoExposurePass, AutoExposurePipeline, ViewAutoExposurePipeline};
|
||||
pub use settings::AutoExposure;
|
||||
|
||||
use crate::{
|
||||
@ -43,12 +41,7 @@ struct AutoExposureResources {
|
||||
|
||||
impl Plugin for AutoExposurePlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
METERING_SHADER_HANDLE,
|
||||
"auto_exposure.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
embedded_asset!(app, "auto_exposure.wgsl");
|
||||
|
||||
app.add_plugins(RenderAssetPlugin::<GpuAutoExposureCompensationCurve>::default())
|
||||
.register_type::<AutoExposureCompensationCurve>()
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use super::compensation_curve::{
|
||||
AutoExposureCompensationCurve, AutoExposureCompensationCurveUniform,
|
||||
};
|
||||
use bevy_asset::{prelude::*, weak_handle};
|
||||
use bevy_asset::{load_embedded_asset, prelude::*};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_image::Image;
|
||||
use bevy_render::{
|
||||
@ -44,9 +44,6 @@ pub enum AutoExposurePass {
|
||||
Average,
|
||||
}
|
||||
|
||||
pub const METERING_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("05c84384-afa4-41d9-844e-e9cd5e7609af");
|
||||
|
||||
pub const HISTOGRAM_BIN_COUNT: u64 = 64;
|
||||
|
||||
impl FromWorld for AutoExposurePipeline {
|
||||
@ -71,7 +68,7 @@ impl FromWorld for AutoExposurePipeline {
|
||||
),
|
||||
),
|
||||
),
|
||||
histogram_shader: METERING_SHADER_HANDLE.clone(),
|
||||
histogram_shader: load_embedded_asset!(world, "auto_exposure.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,7 +5,7 @@ use bevy_asset::Handle;
|
||||
use bevy_ecs::{prelude::Component, reflect::ReflectComponent};
|
||||
use bevy_image::Image;
|
||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::extract_component::ExtractComponent;
|
||||
use bevy_render::{extract_component::ExtractComponent, view::Hdr};
|
||||
use bevy_utils::default;
|
||||
|
||||
/// Component that enables auto exposure for an HDR-enabled 2d or 3d camera.
|
||||
@ -25,6 +25,7 @@ use bevy_utils::default;
|
||||
/// **Auto Exposure requires compute shaders and is not compatible with WebGL2.**
|
||||
#[derive(Component, Clone, Reflect, ExtractComponent)]
|
||||
#[reflect(Component, Default, Clone)]
|
||||
#[require(Hdr)]
|
||||
pub struct AutoExposure {
|
||||
/// The range of exposure values for the histogram.
|
||||
///
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_render::{
|
||||
render_resource::{
|
||||
@ -12,14 +12,12 @@ use bevy_render::{
|
||||
|
||||
use crate::fullscreen_vertex_shader::fullscreen_shader_vertex_state;
|
||||
|
||||
pub const BLIT_SHADER_HANDLE: Handle<Shader> = weak_handle!("59be3075-c34e-43e7-bf24-c8fe21a0192e");
|
||||
|
||||
/// Adds support for specialized "blit pipelines", which can be used to write one texture to another.
|
||||
pub struct BlitPlugin;
|
||||
|
||||
impl Plugin for BlitPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, BLIT_SHADER_HANDLE, "blit.wgsl", Shader::from_wgsl);
|
||||
embedded_asset!(app, "blit.wgsl");
|
||||
|
||||
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
|
||||
render_app.allow_ambiguous_resource::<SpecializedRenderPipelines<BlitPipeline>>();
|
||||
@ -40,6 +38,7 @@ impl Plugin for BlitPlugin {
|
||||
pub struct BlitPipeline {
|
||||
pub texture_bind_group: BindGroupLayout,
|
||||
pub sampler: Sampler,
|
||||
pub shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl FromWorld for BlitPipeline {
|
||||
@ -62,6 +61,7 @@ impl FromWorld for BlitPipeline {
|
||||
BlitPipeline {
|
||||
texture_bind_group,
|
||||
sampler,
|
||||
shader: load_embedded_asset!(render_world, "blit.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -82,7 +82,7 @@ impl SpecializedRenderPipeline for BlitPipeline {
|
||||
layout: vec![self.texture_bind_group.clone()],
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: BLIT_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "fs_main".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use super::{Bloom, BLOOM_SHADER_HANDLE, BLOOM_TEXTURE_FORMAT};
|
||||
use super::{Bloom, BLOOM_TEXTURE_FORMAT};
|
||||
use crate::fullscreen_vertex_shader::fullscreen_shader_vertex_state;
|
||||
use bevy_asset::{load_embedded_asset, Handle};
|
||||
use bevy_ecs::{
|
||||
prelude::{Component, Entity},
|
||||
resource::Resource,
|
||||
@ -26,6 +27,8 @@ pub struct BloomDownsamplingPipeline {
|
||||
/// Layout with a texture, a sampler, and uniforms
|
||||
pub bind_group_layout: BindGroupLayout,
|
||||
pub sampler: Sampler,
|
||||
/// The shader asset handle.
|
||||
pub shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone)]
|
||||
@ -78,6 +81,7 @@ impl FromWorld for BloomDownsamplingPipeline {
|
||||
BloomDownsamplingPipeline {
|
||||
bind_group_layout,
|
||||
sampler,
|
||||
shader: load_embedded_asset!(world, "bloom.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -120,7 +124,7 @@ impl SpecializedRenderPipeline for BloomDownsamplingPipeline {
|
||||
layout,
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: BLOOM_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point,
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -9,7 +9,7 @@ use crate::{
|
||||
core_3d::graph::{Core3d, Node3d},
|
||||
};
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::embedded_asset;
|
||||
use bevy_color::{Gray, LinearRgba};
|
||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||
use bevy_math::{ops, UVec2};
|
||||
@ -36,15 +36,13 @@ use upsampling_pipeline::{
|
||||
prepare_upsampling_pipeline, BloomUpsamplingPipeline, UpsamplingPipelineIds,
|
||||
};
|
||||
|
||||
const BLOOM_SHADER_HANDLE: Handle<Shader> = weak_handle!("c9190ddc-573b-4472-8b21-573cab502b73");
|
||||
|
||||
const BLOOM_TEXTURE_FORMAT: TextureFormat = TextureFormat::Rg11b10Ufloat;
|
||||
|
||||
pub struct BloomPlugin;
|
||||
|
||||
impl Plugin for BloomPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, BLOOM_SHADER_HANDLE, "bloom.wgsl", Shader::from_wgsl);
|
||||
embedded_asset!(app, "bloom.wgsl");
|
||||
|
||||
app.register_type::<Bloom>();
|
||||
app.register_type::<BloomPrefilter>();
|
||||
|
||||
@ -1,8 +1,12 @@
|
||||
use super::downsampling_pipeline::BloomUniforms;
|
||||
use bevy_ecs::{prelude::Component, query::QueryItem, reflect::ReflectComponent};
|
||||
use bevy_ecs::{
|
||||
prelude::Component,
|
||||
query::{QueryItem, With},
|
||||
reflect::ReflectComponent,
|
||||
};
|
||||
use bevy_math::{AspectRatio, URect, UVec4, Vec2, Vec4};
|
||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{extract_component::ExtractComponent, prelude::Camera};
|
||||
use bevy_render::{extract_component::ExtractComponent, prelude::Camera, view::Hdr};
|
||||
|
||||
/// Applies a bloom effect to an HDR-enabled 2d or 3d camera.
|
||||
///
|
||||
@ -26,6 +30,7 @@ use bevy_render::{extract_component::ExtractComponent, prelude::Camera};
|
||||
/// used in Bevy as well as a visualization of the curve's respective scattering profile.
|
||||
#[derive(Component, Reflect, Clone)]
|
||||
#[reflect(Component, Default, Clone)]
|
||||
#[require(Hdr)]
|
||||
pub struct Bloom {
|
||||
/// Controls the baseline of how much the image is scattered (default: 0.15).
|
||||
///
|
||||
@ -219,7 +224,7 @@ pub enum BloomCompositeMode {
|
||||
impl ExtractComponent for Bloom {
|
||||
type QueryData = (&'static Self, &'static Camera);
|
||||
|
||||
type QueryFilter = ();
|
||||
type QueryFilter = With<Hdr>;
|
||||
type Out = (Self, BloomUniforms);
|
||||
|
||||
fn extract_component((bloom, camera): QueryItem<'_, Self::QueryData>) -> Option<Self::Out> {
|
||||
@ -228,9 +233,8 @@ impl ExtractComponent for Bloom {
|
||||
camera.physical_viewport_size(),
|
||||
camera.physical_target_size(),
|
||||
camera.is_active,
|
||||
camera.hdr,
|
||||
) {
|
||||
(Some(URect { min: origin, .. }), Some(size), Some(target_size), true, true)
|
||||
(Some(URect { min: origin, .. }), Some(size), Some(target_size), true)
|
||||
if size.x != 0 && size.y != 0 =>
|
||||
{
|
||||
let threshold = bloom.prefilter.threshold;
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
use super::{
|
||||
downsampling_pipeline::BloomUniforms, Bloom, BloomCompositeMode, BLOOM_SHADER_HANDLE,
|
||||
BLOOM_TEXTURE_FORMAT,
|
||||
downsampling_pipeline::BloomUniforms, Bloom, BloomCompositeMode, BLOOM_TEXTURE_FORMAT,
|
||||
};
|
||||
use crate::fullscreen_vertex_shader::fullscreen_shader_vertex_state;
|
||||
use bevy_asset::{load_embedded_asset, Handle};
|
||||
use bevy_ecs::{
|
||||
prelude::{Component, Entity},
|
||||
resource::Resource,
|
||||
@ -27,6 +27,8 @@ pub struct UpsamplingPipelineIds {
|
||||
#[derive(Resource)]
|
||||
pub struct BloomUpsamplingPipeline {
|
||||
pub bind_group_layout: BindGroupLayout,
|
||||
/// The shader asset handle.
|
||||
pub shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone)]
|
||||
@ -54,7 +56,10 @@ impl FromWorld for BloomUpsamplingPipeline {
|
||||
),
|
||||
);
|
||||
|
||||
BloomUpsamplingPipeline { bind_group_layout }
|
||||
BloomUpsamplingPipeline {
|
||||
bind_group_layout,
|
||||
shader: load_embedded_asset!(world, "bloom.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -105,7 +110,7 @@ impl SpecializedRenderPipeline for BloomUpsamplingPipeline {
|
||||
layout: vec![self.bind_group_layout.clone()],
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: BLOOM_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "upsample".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -3,7 +3,7 @@ use crate::{
|
||||
prepass::{DeferredPrepass, ViewPrepassTextures},
|
||||
};
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_math::UVec2;
|
||||
use bevy_render::{
|
||||
@ -23,18 +23,11 @@ use bevy_render::{
|
||||
|
||||
use super::DEFERRED_LIGHTING_PASS_ID_DEPTH_FORMAT;
|
||||
|
||||
pub const COPY_DEFERRED_LIGHTING_ID_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("70d91342-1c43-4b20-973f-aa6ce93aa617");
|
||||
pub struct CopyDeferredLightingIdPlugin;
|
||||
|
||||
impl Plugin for CopyDeferredLightingIdPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
COPY_DEFERRED_LIGHTING_ID_SHADER_HANDLE,
|
||||
"copy_deferred_lighting_id.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
embedded_asset!(app, "copy_deferred_lighting_id.wgsl");
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
};
|
||||
@ -137,6 +130,8 @@ impl FromWorld for CopyDeferredLightingIdPipeline {
|
||||
),
|
||||
);
|
||||
|
||||
let shader = load_embedded_asset!(world, "copy_deferred_lighting_id.wgsl");
|
||||
|
||||
let pipeline_id =
|
||||
world
|
||||
.resource_mut::<PipelineCache>()
|
||||
@ -145,7 +140,7 @@ impl FromWorld for CopyDeferredLightingIdPipeline {
|
||||
layout: vec![layout.clone()],
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: COPY_DEFERRED_LIGHTING_ID_SHADER_HANDLE,
|
||||
shader,
|
||||
shader_defs: vec![],
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![],
|
||||
|
||||
@ -15,7 +15,7 @@
|
||||
//! [Depth of field]: https://en.wikipedia.org/wiki/Depth_of_field
|
||||
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer, Handle};
|
||||
use bevy_derive::{Deref, DerefMut};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
@ -69,8 +69,6 @@ use crate::{
|
||||
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
||||
};
|
||||
|
||||
const DOF_SHADER_HANDLE: Handle<Shader> = weak_handle!("c3580ddc-2cbc-4535-a02b-9a2959066b52");
|
||||
|
||||
/// A plugin that adds support for the depth of field effect to Bevy.
|
||||
pub struct DepthOfFieldPlugin;
|
||||
|
||||
@ -206,7 +204,7 @@ enum DofPass {
|
||||
|
||||
impl Plugin for DepthOfFieldPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, DOF_SHADER_HANDLE, "dof.wgsl", Shader::from_wgsl);
|
||||
embedded_asset!(app, "dof.wgsl");
|
||||
|
||||
app.register_type::<DepthOfField>();
|
||||
app.register_type::<DepthOfFieldMode>();
|
||||
@ -327,6 +325,8 @@ pub struct DepthOfFieldPipeline {
|
||||
/// The bind group layout shared among all invocations of the depth of field
|
||||
/// shader.
|
||||
global_bind_group_layout: BindGroupLayout,
|
||||
/// The shader asset handle.
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl ViewNode for DepthOfFieldNode {
|
||||
@ -678,11 +678,13 @@ pub fn prepare_depth_of_field_pipelines(
|
||||
&ViewDepthOfFieldBindGroupLayouts,
|
||||
&Msaa,
|
||||
)>,
|
||||
asset_server: Res<AssetServer>,
|
||||
) {
|
||||
for (entity, view, depth_of_field, view_bind_group_layouts, msaa) in view_targets.iter() {
|
||||
let dof_pipeline = DepthOfFieldPipeline {
|
||||
view_bind_group_layouts: view_bind_group_layouts.clone(),
|
||||
global_bind_group_layout: global_bind_group_layout.layout.clone(),
|
||||
shader: load_embedded_asset!(asset_server.as_ref(), "dof.wgsl"),
|
||||
};
|
||||
|
||||
// We'll need these two flags to create the `DepthOfFieldPipelineKey`s.
|
||||
@ -800,7 +802,7 @@ impl SpecializedRenderPipeline for DepthOfFieldPipeline {
|
||||
depth_stencil: None,
|
||||
multisample: default(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: DOF_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: match key.pass {
|
||||
DofPass::GaussianHorizontal => "gaussian_horizontal".into(),
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
#![forbid(unsafe_code)]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
pub mod auto_exposure;
|
||||
|
||||
@ -7,7 +7,7 @@ use crate::{
|
||||
prepass::{DepthPrepass, MotionVectorPrepass},
|
||||
};
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::embedded_asset;
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
query::{QueryItem, With},
|
||||
@ -19,7 +19,7 @@ use bevy_render::{
|
||||
camera::Camera,
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin},
|
||||
render_graph::{RenderGraphApp, ViewNodeRunner},
|
||||
render_resource::{Shader, ShaderType, SpecializedRenderPipelines},
|
||||
render_resource::{ShaderType, SpecializedRenderPipelines},
|
||||
Render, RenderApp, RenderSystems,
|
||||
};
|
||||
|
||||
@ -126,19 +126,12 @@ pub struct MotionBlurUniform {
|
||||
_webgl2_padding: bevy_math::Vec2,
|
||||
}
|
||||
|
||||
pub const MOTION_BLUR_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("d9ca74af-fa0a-4f11-b0f2-19613b618b93");
|
||||
|
||||
/// Adds support for per-object motion blur to the app. See [`MotionBlur`] for details.
|
||||
pub struct MotionBlurPlugin;
|
||||
impl Plugin for MotionBlurPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
MOTION_BLUR_SHADER_HANDLE,
|
||||
"motion_blur.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
embedded_asset!(app, "motion_blur.wgsl");
|
||||
|
||||
app.add_plugins((
|
||||
ExtractComponentPlugin::<MotionBlur>::default(),
|
||||
UniformComponentPlugin::<MotionBlurUniform>::default(),
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
use bevy_asset::{load_embedded_asset, Handle};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
@ -16,9 +17,9 @@ use bevy_render::{
|
||||
},
|
||||
BindGroupLayout, BindGroupLayoutEntries, CachedRenderPipelineId, ColorTargetState,
|
||||
ColorWrites, FragmentState, MultisampleState, PipelineCache, PrimitiveState,
|
||||
RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, ShaderDefVal,
|
||||
ShaderStages, ShaderType, SpecializedRenderPipeline, SpecializedRenderPipelines,
|
||||
TextureFormat, TextureSampleType,
|
||||
RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, Shader,
|
||||
ShaderDefVal, ShaderStages, ShaderType, SpecializedRenderPipeline,
|
||||
SpecializedRenderPipelines, TextureFormat, TextureSampleType,
|
||||
},
|
||||
renderer::RenderDevice,
|
||||
view::{ExtractedView, Msaa, ViewTarget},
|
||||
@ -26,17 +27,18 @@ use bevy_render::{
|
||||
|
||||
use crate::fullscreen_vertex_shader::fullscreen_shader_vertex_state;
|
||||
|
||||
use super::{MotionBlurUniform, MOTION_BLUR_SHADER_HANDLE};
|
||||
use super::MotionBlurUniform;
|
||||
|
||||
#[derive(Resource)]
|
||||
pub struct MotionBlurPipeline {
|
||||
pub(crate) sampler: Sampler,
|
||||
pub(crate) layout: BindGroupLayout,
|
||||
pub(crate) layout_msaa: BindGroupLayout,
|
||||
pub(crate) shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl MotionBlurPipeline {
|
||||
pub(crate) fn new(render_device: &RenderDevice) -> Self {
|
||||
pub(crate) fn new(render_device: &RenderDevice, shader: Handle<Shader>) -> Self {
|
||||
let mb_layout = &BindGroupLayoutEntries::sequential(
|
||||
ShaderStages::FRAGMENT,
|
||||
(
|
||||
@ -82,6 +84,7 @@ impl MotionBlurPipeline {
|
||||
sampler,
|
||||
layout,
|
||||
layout_msaa,
|
||||
shader,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -89,7 +92,9 @@ impl MotionBlurPipeline {
|
||||
impl FromWorld for MotionBlurPipeline {
|
||||
fn from_world(render_world: &mut bevy_ecs::world::World) -> Self {
|
||||
let render_device = render_world.resource::<RenderDevice>().clone();
|
||||
MotionBlurPipeline::new(&render_device)
|
||||
|
||||
let shader = load_embedded_asset!(render_world, "motion_blur.wgsl");
|
||||
MotionBlurPipeline::new(&render_device, shader)
|
||||
}
|
||||
}
|
||||
|
||||
@ -125,7 +130,7 @@ impl SpecializedRenderPipeline for MotionBlurPipeline {
|
||||
layout,
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: MOTION_BLUR_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
//! Order Independent Transparency (OIT) for 3d rendering. See [`OrderIndependentTransparencyPlugin`] for more details.
|
||||
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_ecs::{component::*, prelude::*};
|
||||
use bevy_math::UVec2;
|
||||
use bevy_platform::collections::HashSet;
|
||||
@ -10,10 +9,9 @@ use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
camera::{Camera, ExtractedCamera},
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin},
|
||||
load_shader_library,
|
||||
render_graph::{RenderGraphApp, ViewNodeRunner},
|
||||
render_resource::{
|
||||
BufferUsages, BufferVec, DynamicUniformBuffer, Shader, ShaderType, TextureUsages,
|
||||
},
|
||||
render_resource::{BufferUsages, BufferVec, DynamicUniformBuffer, ShaderType, TextureUsages},
|
||||
renderer::{RenderDevice, RenderQueue},
|
||||
view::Msaa,
|
||||
Render, RenderApp, RenderSystems,
|
||||
@ -33,10 +31,6 @@ use crate::core_3d::{
|
||||
/// Module that defines the necessary systems to resolve the OIT buffer and render it to the screen.
|
||||
pub mod resolve;
|
||||
|
||||
/// Shader handle for the shader that draws the transparent meshes to the OIT layers buffer.
|
||||
pub const OIT_DRAW_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("0cd3c764-39b8-437b-86b4-4e45635fc03d");
|
||||
|
||||
/// Used to identify which camera will use OIT to render transparent meshes
|
||||
/// and to configure OIT.
|
||||
// TODO consider supporting multiple OIT techniques like WBOIT, Moment Based OIT,
|
||||
@ -105,12 +99,7 @@ impl Component for OrderIndependentTransparencySettings {
|
||||
pub struct OrderIndependentTransparencyPlugin;
|
||||
impl Plugin for OrderIndependentTransparencyPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
OIT_DRAW_SHADER_HANDLE,
|
||||
"oit_draw.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
load_shader_library!(app, "oit_draw.wgsl");
|
||||
|
||||
app.add_plugins((
|
||||
ExtractComponentPlugin::<OrderIndependentTransparencySettings>::default(),
|
||||
|
||||
@ -3,7 +3,7 @@ use crate::{
|
||||
oit::OrderIndependentTransparencySettings,
|
||||
};
|
||||
use bevy_app::Plugin;
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer};
|
||||
use bevy_derive::Deref;
|
||||
use bevy_ecs::{
|
||||
entity::{EntityHashMap, EntityHashSet},
|
||||
@ -16,7 +16,7 @@ use bevy_render::{
|
||||
BindGroup, BindGroupEntries, BindGroupLayout, BindGroupLayoutEntries, BlendComponent,
|
||||
BlendState, CachedRenderPipelineId, ColorTargetState, ColorWrites, DownlevelFlags,
|
||||
FragmentState, MultisampleState, PipelineCache, PrimitiveState, RenderPipelineDescriptor,
|
||||
Shader, ShaderDefVal, ShaderStages, TextureFormat,
|
||||
ShaderDefVal, ShaderStages, TextureFormat,
|
||||
},
|
||||
renderer::{RenderAdapter, RenderDevice},
|
||||
view::{ExtractedView, ViewTarget, ViewUniform, ViewUniforms},
|
||||
@ -26,10 +26,6 @@ use tracing::warn;
|
||||
|
||||
use super::OitBuffers;
|
||||
|
||||
/// Shader handle for the shader that sorts the OIT layers, blends the colors based on depth and renders them to the screen.
|
||||
pub const OIT_RESOLVE_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("562d2917-eb06-444d-9ade-41de76b0f5ae");
|
||||
|
||||
/// Contains the render node used to run the resolve pass.
|
||||
pub mod node;
|
||||
|
||||
@ -40,12 +36,7 @@ pub const OIT_REQUIRED_STORAGE_BUFFERS: u32 = 2;
|
||||
pub struct OitResolvePlugin;
|
||||
impl Plugin for OitResolvePlugin {
|
||||
fn build(&self, app: &mut bevy_app::App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
OIT_RESOLVE_SHADER_HANDLE,
|
||||
"oit_resolve.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
embedded_asset!(app, "oit_resolve.wgsl");
|
||||
}
|
||||
|
||||
fn finish(&self, app: &mut bevy_app::App) {
|
||||
@ -165,6 +156,7 @@ pub fn queue_oit_resolve_pipeline(
|
||||
),
|
||||
With<OrderIndependentTransparencySettings>,
|
||||
>,
|
||||
asset_server: Res<AssetServer>,
|
||||
// Store the key with the id to make the clean up logic easier.
|
||||
// This also means it will always replace the entry if the key changes so nothing to clean up.
|
||||
mut cached_pipeline_id: Local<EntityHashMap<(OitResolvePipelineKey, CachedRenderPipelineId)>>,
|
||||
@ -184,7 +176,7 @@ pub fn queue_oit_resolve_pipeline(
|
||||
}
|
||||
}
|
||||
|
||||
let desc = specialize_oit_resolve_pipeline(key, &resolve_pipeline);
|
||||
let desc = specialize_oit_resolve_pipeline(key, &resolve_pipeline, &asset_server);
|
||||
|
||||
let pipeline_id = pipeline_cache.queue_render_pipeline(desc);
|
||||
commands.entity(e).insert(OitResolvePipelineId(pipeline_id));
|
||||
@ -202,6 +194,7 @@ pub fn queue_oit_resolve_pipeline(
|
||||
fn specialize_oit_resolve_pipeline(
|
||||
key: OitResolvePipelineKey,
|
||||
resolve_pipeline: &OitResolvePipeline,
|
||||
asset_server: &AssetServer,
|
||||
) -> RenderPipelineDescriptor {
|
||||
let format = if key.hdr {
|
||||
ViewTarget::TEXTURE_FORMAT_HDR
|
||||
@ -217,7 +210,7 @@ fn specialize_oit_resolve_pipeline(
|
||||
],
|
||||
fragment: Some(FragmentState {
|
||||
entry_point: "fragment".into(),
|
||||
shader: OIT_RESOLVE_SHADER_HANDLE,
|
||||
shader: load_embedded_asset!(asset_server, "oit_resolve.wgsl"),
|
||||
shader_defs: vec![ShaderDefVal::UInt(
|
||||
"LAYER_COUNT".into(),
|
||||
key.layer_count as u32,
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
//! Currently, this consists only of chromatic aberration.
|
||||
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Assets, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, weak_handle, Assets, Handle};
|
||||
use bevy_derive::{Deref, DerefMut};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
@ -20,6 +20,7 @@ use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
use bevy_render::{
|
||||
camera::Camera,
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin},
|
||||
load_shader_library,
|
||||
render_asset::{RenderAssetUsages, RenderAssets},
|
||||
render_graph::{
|
||||
NodeRunError, RenderGraphApp as _, RenderGraphContext, ViewNode, ViewNodeRunner,
|
||||
@ -46,13 +47,6 @@ use crate::{
|
||||
fullscreen_vertex_shader,
|
||||
};
|
||||
|
||||
/// The handle to the built-in postprocessing shader `post_process.wgsl`.
|
||||
const POST_PROCESSING_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("5e8e627a-7531-484d-a988-9a38acb34e52");
|
||||
/// The handle to the chromatic aberration shader `chromatic_aberration.wgsl`.
|
||||
const CHROMATIC_ABERRATION_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("e598550e-71c3-4f5a-ba29-aebc3f88c7b5");
|
||||
|
||||
/// The handle to the default chromatic aberration lookup texture.
|
||||
///
|
||||
/// This is just a 3x1 image consisting of one red pixel, one green pixel, and
|
||||
@ -136,6 +130,8 @@ pub struct PostProcessingPipeline {
|
||||
source_sampler: Sampler,
|
||||
/// Specifies how to sample the chromatic aberration gradient.
|
||||
chromatic_aberration_lut_sampler: Sampler,
|
||||
/// The shader asset handle.
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
/// A key that uniquely identifies a built-in postprocessing pipeline.
|
||||
@ -188,18 +184,9 @@ pub struct PostProcessingNode;
|
||||
|
||||
impl Plugin for PostProcessingPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
POST_PROCESSING_SHADER_HANDLE,
|
||||
"post_process.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
load_internal_asset!(
|
||||
app,
|
||||
CHROMATIC_ABERRATION_SHADER_HANDLE,
|
||||
"chromatic_aberration.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
load_shader_library!(app, "chromatic_aberration.wgsl");
|
||||
|
||||
embedded_asset!(app, "post_process.wgsl");
|
||||
|
||||
// Load the default chromatic aberration LUT.
|
||||
let mut assets = app.world_mut().resource_mut::<Assets<_>>();
|
||||
@ -321,6 +308,7 @@ impl FromWorld for PostProcessingPipeline {
|
||||
bind_group_layout,
|
||||
source_sampler,
|
||||
chromatic_aberration_lut_sampler,
|
||||
shader: load_embedded_asset!(world, "post_process.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -334,7 +322,7 @@ impl SpecializedRenderPipeline for PostProcessingPipeline {
|
||||
layout: vec![self.bind_group_layout.clone()],
|
||||
vertex: fullscreen_vertex_shader::fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: POST_PROCESSING_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "fragment_main".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use bevy_app::{App, Plugin};
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Handle};
|
||||
use bevy_ecs::{
|
||||
prelude::{Component, Entity},
|
||||
query::{QueryItem, With},
|
||||
@ -28,25 +28,18 @@ use bevy_render::{
|
||||
Render, RenderApp, RenderSystems,
|
||||
};
|
||||
use bevy_transform::components::Transform;
|
||||
use prepass::{SkyboxPrepassPipeline, SKYBOX_PREPASS_SHADER_HANDLE};
|
||||
use prepass::SkyboxPrepassPipeline;
|
||||
|
||||
use crate::{core_3d::CORE_3D_DEPTH_FORMAT, prepass::PreviousViewUniforms};
|
||||
|
||||
const SKYBOX_SHADER_HANDLE: Handle<Shader> = weak_handle!("a66cf9cc-cab8-47f8-ac32-db82fdc4f29b");
|
||||
|
||||
pub mod prepass;
|
||||
|
||||
pub struct SkyboxPlugin;
|
||||
|
||||
impl Plugin for SkyboxPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(app, SKYBOX_SHADER_HANDLE, "skybox.wgsl", Shader::from_wgsl);
|
||||
load_internal_asset!(
|
||||
app,
|
||||
SKYBOX_PREPASS_SHADER_HANDLE,
|
||||
"skybox_prepass.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
embedded_asset!(app, "skybox.wgsl");
|
||||
embedded_asset!(app, "skybox_prepass.wgsl");
|
||||
|
||||
app.register_type::<Skybox>().add_plugins((
|
||||
ExtractComponentPlugin::<Skybox>::default(),
|
||||
@ -76,9 +69,10 @@ impl Plugin for SkyboxPlugin {
|
||||
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
|
||||
return;
|
||||
};
|
||||
let shader = load_embedded_asset!(render_app.world(), "skybox.wgsl");
|
||||
let render_device = render_app.world().resource::<RenderDevice>().clone();
|
||||
render_app
|
||||
.insert_resource(SkyboxPipeline::new(&render_device))
|
||||
.insert_resource(SkyboxPipeline::new(&render_device, shader))
|
||||
.init_resource::<SkyboxPrepassPipeline>();
|
||||
}
|
||||
}
|
||||
@ -158,10 +152,11 @@ pub struct SkyboxUniforms {
|
||||
#[derive(Resource)]
|
||||
struct SkyboxPipeline {
|
||||
bind_group_layout: BindGroupLayout,
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
impl SkyboxPipeline {
|
||||
fn new(render_device: &RenderDevice) -> Self {
|
||||
fn new(render_device: &RenderDevice, shader: Handle<Shader>) -> Self {
|
||||
Self {
|
||||
bind_group_layout: render_device.create_bind_group_layout(
|
||||
"skybox_bind_group_layout",
|
||||
@ -176,6 +171,7 @@ impl SkyboxPipeline {
|
||||
),
|
||||
),
|
||||
),
|
||||
shader,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -196,7 +192,7 @@ impl SpecializedRenderPipeline for SkyboxPipeline {
|
||||
layout: vec![self.bind_group_layout.clone()],
|
||||
push_constant_ranges: Vec::new(),
|
||||
vertex: VertexState {
|
||||
shader: SKYBOX_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: Vec::new(),
|
||||
entry_point: "skybox_vertex".into(),
|
||||
buffers: Vec::new(),
|
||||
@ -224,7 +220,7 @@ impl SpecializedRenderPipeline for SkyboxPipeline {
|
||||
alpha_to_coverage_enabled: false,
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: SKYBOX_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: Vec::new(),
|
||||
entry_point: "skybox_fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
//! Adds motion vector support to skyboxes. See [`SkyboxPrepassPipeline`] for details.
|
||||
|
||||
use bevy_asset::{weak_handle, Handle};
|
||||
use bevy_asset::{load_embedded_asset, Handle};
|
||||
use bevy_ecs::{
|
||||
component::Component,
|
||||
entity::Entity,
|
||||
@ -30,9 +30,6 @@ use crate::{
|
||||
Skybox,
|
||||
};
|
||||
|
||||
pub const SKYBOX_PREPASS_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("7a292435-bfe6-4ed9-8d30-73bf7aa673b0");
|
||||
|
||||
/// This pipeline writes motion vectors to the prepass for all [`Skybox`]es.
|
||||
///
|
||||
/// This allows features like motion blur and TAA to work correctly on the skybox. Without this, for
|
||||
@ -41,6 +38,7 @@ pub const SKYBOX_PREPASS_SHADER_HANDLE: Handle<Shader> =
|
||||
#[derive(Resource)]
|
||||
pub struct SkyboxPrepassPipeline {
|
||||
bind_group_layout: BindGroupLayout,
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
/// Used to specialize the [`SkyboxPrepassPipeline`].
|
||||
@ -75,6 +73,7 @@ impl FromWorld for SkyboxPrepassPipeline {
|
||||
),
|
||||
),
|
||||
),
|
||||
shader: load_embedded_asset!(world, "skybox_prepass.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -102,7 +101,7 @@ impl SpecializedRenderPipeline for SkyboxPrepassPipeline {
|
||||
alpha_to_coverage_enabled: false,
|
||||
},
|
||||
fragment: Some(FragmentState {
|
||||
shader: SKYBOX_PREPASS_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs: vec![],
|
||||
entry_point: "fragment".into(),
|
||||
targets: prepass_target_descriptors(key.normal_prepass, true, false),
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use crate::fullscreen_vertex_shader::fullscreen_shader_vertex_state;
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::{load_internal_asset, weak_handle, Assets, Handle};
|
||||
use bevy_asset::{embedded_asset, load_embedded_asset, Assets, Handle};
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_image::{CompressedImageFormats, Image, ImageSampler, ImageType};
|
||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||
@ -8,6 +8,7 @@ use bevy_render::{
|
||||
camera::Camera,
|
||||
extract_component::{ExtractComponent, ExtractComponentPlugin},
|
||||
extract_resource::{ExtractResource, ExtractResourcePlugin},
|
||||
load_shader_library,
|
||||
render_asset::{RenderAssetUsages, RenderAssets},
|
||||
render_resource::{
|
||||
binding_types::{sampler, texture_2d, texture_3d, uniform_buffer},
|
||||
@ -27,45 +28,22 @@ mod node;
|
||||
use bevy_utils::default;
|
||||
pub use node::TonemappingNode;
|
||||
|
||||
const TONEMAPPING_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("e239c010-c25c-42a1-b4e8-08818764d667");
|
||||
|
||||
const TONEMAPPING_SHARED_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("61dbc544-4b30-4ca9-83bd-4751b5cfb1b1");
|
||||
|
||||
const TONEMAPPING_LUT_BINDINGS_SHADER_HANDLE: Handle<Shader> =
|
||||
weak_handle!("d50e3a70-c85e-4725-a81e-72fc83281145");
|
||||
|
||||
/// 3D LUT (look up table) textures used for tonemapping
|
||||
#[derive(Resource, Clone, ExtractResource)]
|
||||
pub struct TonemappingLuts {
|
||||
blender_filmic: Handle<Image>,
|
||||
agx: Handle<Image>,
|
||||
tony_mc_mapface: Handle<Image>,
|
||||
pub blender_filmic: Handle<Image>,
|
||||
pub agx: Handle<Image>,
|
||||
pub tony_mc_mapface: Handle<Image>,
|
||||
}
|
||||
|
||||
pub struct TonemappingPlugin;
|
||||
|
||||
impl Plugin for TonemappingPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
load_internal_asset!(
|
||||
app,
|
||||
TONEMAPPING_SHADER_HANDLE,
|
||||
"tonemapping.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
load_internal_asset!(
|
||||
app,
|
||||
TONEMAPPING_SHARED_SHADER_HANDLE,
|
||||
"tonemapping_shared.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
load_internal_asset!(
|
||||
app,
|
||||
TONEMAPPING_LUT_BINDINGS_SHADER_HANDLE,
|
||||
"lut_bindings.wgsl",
|
||||
Shader::from_wgsl
|
||||
);
|
||||
load_shader_library!(app, "tonemapping_shared.wgsl");
|
||||
load_shader_library!(app, "lut_bindings.wgsl");
|
||||
|
||||
embedded_asset!(app, "tonemapping.wgsl");
|
||||
|
||||
if !app.world().is_resource_added::<TonemappingLuts>() {
|
||||
let mut images = app.world_mut().resource_mut::<Assets<Image>>();
|
||||
@ -134,6 +112,7 @@ impl Plugin for TonemappingPlugin {
|
||||
pub struct TonemappingPipeline {
|
||||
texture_bind_group: BindGroupLayout,
|
||||
sampler: Sampler,
|
||||
shader: Handle<Shader>,
|
||||
}
|
||||
|
||||
/// Optionally enables a tonemapping shader that attempts to map linear input stimulus into a perceptually uniform image for a given [`Camera`] entity.
|
||||
@ -296,7 +275,7 @@ impl SpecializedRenderPipeline for TonemappingPipeline {
|
||||
layout: vec![self.texture_bind_group.clone()],
|
||||
vertex: fullscreen_shader_vertex_state(),
|
||||
fragment: Some(FragmentState {
|
||||
shader: TONEMAPPING_SHADER_HANDLE,
|
||||
shader: self.shader.clone(),
|
||||
shader_defs,
|
||||
entry_point: "fragment".into(),
|
||||
targets: vec![Some(ColorTargetState {
|
||||
@ -340,6 +319,7 @@ impl FromWorld for TonemappingPipeline {
|
||||
TonemappingPipeline {
|
||||
texture_bind_group: tonemap_texture_bind_group,
|
||||
sampler,
|
||||
shader: load_embedded_asset!(render_world, "tonemapping.wgsl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_derive"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides derive implementations for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
name = "bevy_derive_compile_fail"
|
||||
edition = "2024"
|
||||
description = "Compile fail tests for Bevy Engine's various macros"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
publish = false
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
|
||||
//! Assorted proc macro derive functions.
|
||||
|
||||
#![forbid(unsafe_code)]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
extern crate proc_macro;
|
||||
@ -188,11 +189,34 @@ pub fn derive_deref_mut(input: TokenStream) -> TokenStream {
|
||||
derefs::derive_deref_mut(input)
|
||||
}
|
||||
|
||||
/// Generates the required main function boilerplate for Android.
|
||||
#[proc_macro_attribute]
|
||||
pub fn bevy_main(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
bevy_main::bevy_main(attr, item)
|
||||
}
|
||||
|
||||
/// Adds `enum_variant_index` and `enum_variant_name` functions to enums.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use bevy_derive::{EnumVariantMeta};
|
||||
///
|
||||
/// #[derive(EnumVariantMeta)]
|
||||
/// enum MyEnum {
|
||||
/// A,
|
||||
/// B,
|
||||
/// }
|
||||
///
|
||||
/// let a = MyEnum::A;
|
||||
/// let b = MyEnum::B;
|
||||
///
|
||||
/// assert_eq!(0, a.enum_variant_index());
|
||||
/// assert_eq!("A", a.enum_variant_name());
|
||||
///
|
||||
/// assert_eq!(1, b.enum_variant_index());
|
||||
/// assert_eq!("B", b.enum_variant_name());
|
||||
/// ```
|
||||
#[proc_macro_derive(EnumVariantMeta)]
|
||||
pub fn derive_enum_variant_meta(input: TokenStream) -> TokenStream {
|
||||
enum_variant_meta::derive_enum_variant_meta(input)
|
||||
@ -205,8 +229,6 @@ pub fn derive_enum_variant_meta(input: TokenStream) -> TokenStream {
|
||||
pub fn derive_app_label(input: TokenStream) -> TokenStream {
|
||||
let input = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let mut trait_path = BevyManifest::shared().get_path("bevy_app");
|
||||
let mut dyn_eq_path = trait_path.clone();
|
||||
trait_path.segments.push(format_ident!("AppLabel").into());
|
||||
dyn_eq_path.segments.push(format_ident!("DynEq").into());
|
||||
derive_label(input, "AppLabel", &trait_path, &dyn_eq_path)
|
||||
derive_label(input, "AppLabel", &trait_path)
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_dev_tools"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Collection of developer tools for the Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![forbid(unsafe_code)]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
//! This crate provides additional utilities for the [Bevy game engine](https://bevyengine.org),
|
||||
//! This crate provides additional utilities for the [Bevy game engine](https://bevy.org),
|
||||
//! focused on improving developer experience.
|
||||
|
||||
use bevy_app::prelude::*;
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
//! Text and on-screen debugging tools
|
||||
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_asset::prelude::*;
|
||||
use bevy_color::prelude::*;
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_picking::backend::HitData;
|
||||
@ -248,25 +247,18 @@ pub fn debug_draw(
|
||||
pointers: Query<(Entity, &PointerId, &PointerDebug)>,
|
||||
scale: Res<UiScale>,
|
||||
) {
|
||||
let font_handle: Handle<Font> = Default::default();
|
||||
for (entity, id, debug) in pointers.iter() {
|
||||
for (entity, id, debug) in &pointers {
|
||||
let Some(pointer_location) = &debug.location else {
|
||||
continue;
|
||||
};
|
||||
let text = format!("{id:?}\n{debug}");
|
||||
|
||||
for camera in camera_query
|
||||
.iter()
|
||||
.map(|(entity, camera)| {
|
||||
(
|
||||
entity,
|
||||
camera.target.normalize(primary_window.single().ok()),
|
||||
)
|
||||
})
|
||||
.filter_map(|(entity, target)| Some(entity).zip(target))
|
||||
.filter(|(_entity, target)| target == &pointer_location.target)
|
||||
.map(|(cam_entity, _target)| cam_entity)
|
||||
{
|
||||
for (camera, _) in camera_query.iter().filter(|(_, camera)| {
|
||||
camera
|
||||
.target
|
||||
.normalize(primary_window.single().ok())
|
||||
.is_some_and(|target| target == pointer_location.target)
|
||||
}) {
|
||||
let mut pointer_pos = pointer_location.position;
|
||||
if let Some(viewport) = camera_query
|
||||
.get(camera)
|
||||
@ -278,23 +270,21 @@ pub fn debug_draw(
|
||||
|
||||
commands
|
||||
.entity(entity)
|
||||
.despawn_related::<Children>()
|
||||
.insert((
|
||||
Text::new(text.clone()),
|
||||
TextFont {
|
||||
font: font_handle.clone(),
|
||||
font_size: 12.0,
|
||||
..Default::default()
|
||||
},
|
||||
TextColor(Color::WHITE),
|
||||
Node {
|
||||
position_type: PositionType::Absolute,
|
||||
left: Val::Px(pointer_pos.x + 5.0) / scale.0,
|
||||
top: Val::Px(pointer_pos.y + 5.0) / scale.0,
|
||||
padding: UiRect::px(10.0, 10.0, 8.0, 6.0),
|
||||
..Default::default()
|
||||
},
|
||||
))
|
||||
.insert(Pickable::IGNORE)
|
||||
.insert(UiTargetCamera(camera));
|
||||
BackgroundColor(Color::BLACK.with_alpha(0.75)),
|
||||
GlobalZIndex(i32::MAX),
|
||||
Pickable::IGNORE,
|
||||
UiTargetCamera(camera),
|
||||
children![(Text::new(text.clone()), TextFont::from_font_size(12.0))],
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_diagnostic"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Provides diagnostic functionality for Bevy Engine"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
@ -18,7 +18,6 @@ serialize = [
|
||||
"dep:serde",
|
||||
"bevy_ecs/serialize",
|
||||
"bevy_time/serialize",
|
||||
"bevy_utils/serde",
|
||||
"bevy_platform/serialize",
|
||||
]
|
||||
|
||||
@ -39,7 +38,6 @@ std = [
|
||||
"bevy_app/std",
|
||||
"bevy_platform/std",
|
||||
"bevy_time/std",
|
||||
"bevy_utils/std",
|
||||
"bevy_tasks/std",
|
||||
]
|
||||
|
||||
@ -50,7 +48,6 @@ critical-section = [
|
||||
"bevy_app/critical-section",
|
||||
"bevy_platform/critical-section",
|
||||
"bevy_time/critical-section",
|
||||
"bevy_utils/critical-section",
|
||||
"bevy_tasks/critical-section",
|
||||
]
|
||||
|
||||
@ -59,9 +56,7 @@ critical-section = [
|
||||
bevy_app = { path = "../bevy_app", version = "0.16.0-dev", default-features = false }
|
||||
bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev", default-features = false }
|
||||
bevy_time = { path = "../bevy_time", version = "0.16.0-dev", default-features = false }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false }
|
||||
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev", default-features = false }
|
||||
bevy_platform = { path = "../bevy_platform", version = "0.16.0-dev", default-features = false, features = [
|
||||
"alloc",
|
||||
|
||||
@ -113,7 +113,9 @@ impl core::fmt::Display for DiagnosticPath {
|
||||
/// A single measurement of a [`Diagnostic`].
|
||||
#[derive(Debug)]
|
||||
pub struct DiagnosticMeasurement {
|
||||
/// When this measurement was taken.
|
||||
pub time: Instant,
|
||||
/// Value of the measurement.
|
||||
pub value: f64,
|
||||
}
|
||||
|
||||
@ -122,12 +124,14 @@ pub struct DiagnosticMeasurement {
|
||||
#[derive(Debug)]
|
||||
pub struct Diagnostic {
|
||||
path: DiagnosticPath,
|
||||
/// Suffix to use when logging measurements for this [`Diagnostic`], for example to show units.
|
||||
pub suffix: Cow<'static, str>,
|
||||
history: VecDeque<DiagnosticMeasurement>,
|
||||
sum: f64,
|
||||
ema: f64,
|
||||
ema_smoothing_factor: f64,
|
||||
max_history_length: usize,
|
||||
/// Disabled [`Diagnostic`]s are not measured or logged.
|
||||
pub is_enabled: bool,
|
||||
}
|
||||
|
||||
@ -219,6 +223,7 @@ impl Diagnostic {
|
||||
self
|
||||
}
|
||||
|
||||
/// Get the [`DiagnosticPath`] that identifies this [`Diagnostic`].
|
||||
pub fn path(&self) -> &DiagnosticPath {
|
||||
&self.path
|
||||
}
|
||||
@ -282,10 +287,12 @@ impl Diagnostic {
|
||||
self.max_history_length
|
||||
}
|
||||
|
||||
/// All measured values from this [`Diagnostic`], up to the configured maximum history length.
|
||||
pub fn values(&self) -> impl Iterator<Item = &f64> {
|
||||
self.history.iter().map(|x| &x.value)
|
||||
}
|
||||
|
||||
/// All measurements from this [`Diagnostic`], up to the configured maximum history length.
|
||||
pub fn measurements(&self) -> impl Iterator<Item = &DiagnosticMeasurement> {
|
||||
self.history.iter()
|
||||
}
|
||||
@ -293,6 +300,8 @@ impl Diagnostic {
|
||||
/// Clear the history of this diagnostic.
|
||||
pub fn clear_history(&mut self) {
|
||||
self.history.clear();
|
||||
self.sum = 0.0;
|
||||
self.ema = 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
@ -310,10 +319,12 @@ impl DiagnosticsStore {
|
||||
self.diagnostics.insert(diagnostic.path.clone(), diagnostic);
|
||||
}
|
||||
|
||||
/// Get the [`DiagnosticMeasurement`] with the given [`DiagnosticPath`], if it exists.
|
||||
pub fn get(&self, path: &DiagnosticPath) -> Option<&Diagnostic> {
|
||||
self.diagnostics.get(path)
|
||||
}
|
||||
|
||||
/// Mutably get the [`DiagnosticMeasurement`] with the given [`DiagnosticPath`], if it exists.
|
||||
pub fn get_mut(&mut self, path: &DiagnosticPath) -> Option<&mut Diagnostic> {
|
||||
self.diagnostics.get_mut(path)
|
||||
}
|
||||
@ -420,3 +431,31 @@ impl RegisterDiagnostic for App {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_clear_history() {
|
||||
const MEASUREMENT: f64 = 20.0;
|
||||
|
||||
let mut diagnostic =
|
||||
Diagnostic::new(DiagnosticPath::new("test")).with_max_history_length(5);
|
||||
let mut now = Instant::now();
|
||||
|
||||
for _ in 0..3 {
|
||||
for _ in 0..5 {
|
||||
diagnostic.add_measurement(DiagnosticMeasurement {
|
||||
time: now,
|
||||
value: MEASUREMENT,
|
||||
});
|
||||
// Increase time to test smoothed average.
|
||||
now += Duration::from_secs(1);
|
||||
}
|
||||
assert!((diagnostic.average().unwrap() - MEASUREMENT).abs() < 0.1);
|
||||
assert!((diagnostic.smoothed().unwrap() - MEASUREMENT).abs() < 0.1);
|
||||
diagnostic.clear_history();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -19,8 +19,10 @@ impl Plugin for EntityCountDiagnosticsPlugin {
|
||||
}
|
||||
|
||||
impl EntityCountDiagnosticsPlugin {
|
||||
/// Number of currently allocated entities.
|
||||
pub const ENTITY_COUNT: DiagnosticPath = DiagnosticPath::const_new("entity_count");
|
||||
|
||||
/// Updates entity count measurement.
|
||||
pub fn diagnostic_system(mut diagnostics: Diagnostics, entities: &Entities) {
|
||||
diagnostics.add_measurement(&Self::ENTITY_COUNT, || entities.len() as f64);
|
||||
}
|
||||
|
||||
@ -58,10 +58,16 @@ impl Plugin for FrameTimeDiagnosticsPlugin {
|
||||
}
|
||||
|
||||
impl FrameTimeDiagnosticsPlugin {
|
||||
/// Frames per second.
|
||||
pub const FPS: DiagnosticPath = DiagnosticPath::const_new("fps");
|
||||
|
||||
/// Total frames since application start.
|
||||
pub const FRAME_COUNT: DiagnosticPath = DiagnosticPath::const_new("frame_count");
|
||||
|
||||
/// Frame time in ms.
|
||||
pub const FRAME_TIME: DiagnosticPath = DiagnosticPath::const_new("frame_time");
|
||||
|
||||
/// Updates frame count, frame time and fps measurements.
|
||||
pub fn diagnostic_system(
|
||||
mut diagnostics: Diagnostics,
|
||||
time: Res<Time<Real>>,
|
||||
|
||||
@ -1,13 +1,12 @@
|
||||
#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![forbid(unsafe_code)]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
#![no_std]
|
||||
|
||||
//! This crate provides a straightforward solution for integrating diagnostics in the [Bevy game engine](https://bevyengine.org/).
|
||||
//! This crate provides a straightforward solution for integrating diagnostics in the [Bevy game engine](https://bevy.org/).
|
||||
//! It allows users to easily add diagnostic functionality to their Bevy applications, enhancing
|
||||
//! their ability to monitor and optimize their game's.
|
||||
|
||||
@ -29,7 +28,7 @@ pub use diagnostic::*;
|
||||
pub use entity_count_diagnostics_plugin::EntityCountDiagnosticsPlugin;
|
||||
pub use frame_count_diagnostics_plugin::{update_frame_count, FrameCount, FrameCountPlugin};
|
||||
pub use frame_time_diagnostics_plugin::FrameTimeDiagnosticsPlugin;
|
||||
pub use log_diagnostics_plugin::LogDiagnosticsPlugin;
|
||||
pub use log_diagnostics_plugin::{LogDiagnosticsPlugin, LogDiagnosticsState};
|
||||
#[cfg(feature = "sysinfo_plugin")]
|
||||
pub use system_information_diagnostics_plugin::{SystemInfo, SystemInformationDiagnosticsPlugin};
|
||||
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
use super::{Diagnostic, DiagnosticPath, DiagnosticsStore};
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use bevy_app::prelude::*;
|
||||
use bevy_ecs::prelude::*;
|
||||
use bevy_platform::collections::HashSet;
|
||||
use bevy_time::{Real, Time, Timer, TimerMode};
|
||||
use core::time::Duration;
|
||||
use log::{debug, info};
|
||||
@ -14,16 +15,76 @@ use log::{debug, info};
|
||||
///
|
||||
/// When no diagnostics are provided, this plugin does nothing.
|
||||
pub struct LogDiagnosticsPlugin {
|
||||
/// If `true` then the `Debug` representation of each `Diagnostic` is logged.
|
||||
/// If `false` then a (smoothed) current value and historical average are logged.
|
||||
///
|
||||
/// Defaults to `false`.
|
||||
pub debug: bool,
|
||||
/// Time to wait between logging diagnostics and logging them again.
|
||||
pub wait_duration: Duration,
|
||||
pub filter: Option<Vec<DiagnosticPath>>,
|
||||
/// If `Some` then only these diagnostics are logged.
|
||||
pub filter: Option<HashSet<DiagnosticPath>>,
|
||||
}
|
||||
|
||||
/// State used by the [`LogDiagnosticsPlugin`]
|
||||
#[derive(Resource)]
|
||||
struct LogDiagnosticsState {
|
||||
pub struct LogDiagnosticsState {
|
||||
timer: Timer,
|
||||
filter: Option<Vec<DiagnosticPath>>,
|
||||
filter: Option<HashSet<DiagnosticPath>>,
|
||||
}
|
||||
|
||||
impl LogDiagnosticsState {
|
||||
/// Sets a new duration for the log timer
|
||||
pub fn set_timer_duration(&mut self, duration: Duration) {
|
||||
self.timer.set_duration(duration);
|
||||
self.timer.set_elapsed(Duration::ZERO);
|
||||
}
|
||||
|
||||
/// Add a filter to the log state, returning `true` if the [`DiagnosticPath`]
|
||||
/// was not present
|
||||
pub fn add_filter(&mut self, diagnostic_path: DiagnosticPath) -> bool {
|
||||
if let Some(filter) = &mut self.filter {
|
||||
filter.insert(diagnostic_path)
|
||||
} else {
|
||||
self.filter = Some(HashSet::from_iter([diagnostic_path]));
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// Extends the filter of the log state with multiple [`DiagnosticPaths`](DiagnosticPath)
|
||||
pub fn extend_filter(&mut self, iter: impl IntoIterator<Item = DiagnosticPath>) {
|
||||
if let Some(filter) = &mut self.filter {
|
||||
filter.extend(iter);
|
||||
} else {
|
||||
self.filter = Some(HashSet::from_iter(iter));
|
||||
}
|
||||
}
|
||||
|
||||
/// Removes a filter from the log state, returning `true` if it was present
|
||||
pub fn remove_filter(&mut self, diagnostic_path: &DiagnosticPath) -> bool {
|
||||
if let Some(filter) = &mut self.filter {
|
||||
filter.remove(diagnostic_path)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Clears the filters of the log state
|
||||
pub fn clear_filter(&mut self) {
|
||||
if let Some(filter) = &mut self.filter {
|
||||
filter.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Enables filtering with empty filters
|
||||
pub fn enable_filtering(&mut self) {
|
||||
self.filter = Some(HashSet::new());
|
||||
}
|
||||
|
||||
/// Disables filtering
|
||||
pub fn disable_filtering(&mut self) {
|
||||
self.filter = None;
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LogDiagnosticsPlugin {
|
||||
@ -52,7 +113,8 @@ impl Plugin for LogDiagnosticsPlugin {
|
||||
}
|
||||
|
||||
impl LogDiagnosticsPlugin {
|
||||
pub fn filtered(filter: Vec<DiagnosticPath>) -> Self {
|
||||
/// Filter logging to only the paths in `filter`.
|
||||
pub fn filtered(filter: HashSet<DiagnosticPath>) -> Self {
|
||||
LogDiagnosticsPlugin {
|
||||
filter: Some(filter),
|
||||
..Default::default()
|
||||
@ -65,7 +127,7 @@ impl LogDiagnosticsPlugin {
|
||||
mut callback: impl FnMut(&Diagnostic),
|
||||
) {
|
||||
if let Some(filter) = &state.filter {
|
||||
for path in filter {
|
||||
for path in filter.iter() {
|
||||
if let Some(diagnostic) = diagnostics.get(path) {
|
||||
if diagnostic.is_enabled {
|
||||
callback(diagnostic);
|
||||
@ -128,7 +190,7 @@ impl LogDiagnosticsPlugin {
|
||||
time: Res<Time<Real>>,
|
||||
diagnostics: Res<DiagnosticsStore>,
|
||||
) {
|
||||
if state.timer.tick(time.delta()).finished() {
|
||||
if state.timer.tick(time.delta()).is_finished() {
|
||||
Self::log_diagnostics(&state, &diagnostics);
|
||||
}
|
||||
}
|
||||
@ -138,7 +200,7 @@ impl LogDiagnosticsPlugin {
|
||||
time: Res<Time<Real>>,
|
||||
diagnostics: Res<DiagnosticsStore>,
|
||||
) {
|
||||
if state.timer.tick(time.delta()).finished() {
|
||||
if state.timer.tick(time.delta()).is_finished() {
|
||||
Self::for_each_diagnostic(&state, &diagnostics, |diagnostic| {
|
||||
debug!("{:#?}\n", diagnostic);
|
||||
});
|
||||
|
||||
@ -46,10 +46,15 @@ impl SystemInformationDiagnosticsPlugin {
|
||||
/// [`SystemInformationDiagnosticsPlugin`] for more information.
|
||||
#[derive(Debug, Resource)]
|
||||
pub struct SystemInfo {
|
||||
/// OS name and version.
|
||||
pub os: String,
|
||||
/// System kernel version.
|
||||
pub kernel: String,
|
||||
/// CPU model name.
|
||||
pub cpu: String,
|
||||
/// Physical core count.
|
||||
pub core_count: String,
|
||||
/// System RAM.
|
||||
pub memory: String,
|
||||
}
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ name = "bevy_dylib"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Force the Bevy Engine to be dynamically linked for faster linking"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["bevy"]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc(
|
||||
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||
html_logo_url = "https://bevy.org/assets/icon.png",
|
||||
html_favicon_url = "https://bevy.org/assets/icon.png"
|
||||
)]
|
||||
|
||||
//! Forces dynamic linking of Bevy.
|
||||
|
||||
@ -3,12 +3,12 @@ name = "bevy_ecs"
|
||||
version = "0.16.0-dev"
|
||||
edition = "2024"
|
||||
description = "Bevy Engine's entity component system"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
keywords = ["ecs", "game", "bevy"]
|
||||
categories = ["game-engines", "data-structures"]
|
||||
rust-version = "1.85.0"
|
||||
rust-version = "1.86.0"
|
||||
|
||||
[features]
|
||||
default = ["std", "bevy_reflect", "async_executor", "backtrace"]
|
||||
@ -20,12 +20,7 @@ default = ["std", "bevy_reflect", "async_executor", "backtrace"]
|
||||
multi_threaded = ["bevy_tasks/multi_threaded", "dep:arrayvec"]
|
||||
|
||||
## Adds serialization support through `serde`.
|
||||
serialize = [
|
||||
"dep:serde",
|
||||
"bevy_utils/serde",
|
||||
"bevy_platform/serialize",
|
||||
"indexmap/serde",
|
||||
]
|
||||
serialize = ["dep:serde", "bevy_platform/serialize", "indexmap/serde"]
|
||||
|
||||
## Adds runtime reflection support using `bevy_reflect`.
|
||||
bevy_reflect = ["dep:bevy_reflect"]
|
||||
@ -68,7 +63,7 @@ async_executor = ["std", "bevy_tasks/async_executor"]
|
||||
std = [
|
||||
"bevy_reflect?/std",
|
||||
"bevy_tasks/std",
|
||||
"bevy_utils/std",
|
||||
"bevy_utils/parallel",
|
||||
"bitflags/std",
|
||||
"concurrent-queue/std",
|
||||
"disqualified/alloc",
|
||||
@ -89,15 +84,15 @@ critical-section = [
|
||||
"bevy_reflect?/critical-section",
|
||||
]
|
||||
|
||||
hotpatching = ["dep:subsecond"]
|
||||
|
||||
[dependencies]
|
||||
bevy_ptr = { path = "../bevy_ptr", version = "0.16.0-dev" }
|
||||
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", features = [
|
||||
"smallvec",
|
||||
], default-features = false, optional = true }
|
||||
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev", default-features = false }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false, features = [
|
||||
"alloc",
|
||||
] }
|
||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false }
|
||||
bevy_ecs_macros = { path = "macros", version = "0.16.0-dev" }
|
||||
bevy_platform = { path = "../bevy_platform", version = "0.16.0-dev", default-features = false, features = [
|
||||
"alloc",
|
||||
@ -125,6 +120,7 @@ variadics_please = { version = "1.1", default-features = false }
|
||||
tracing = { version = "0.1", default-features = false, optional = true }
|
||||
log = { version = "0.4", default-features = false }
|
||||
bumpalo = "3"
|
||||
subsecond = { version = "0.7.0-alpha.1", optional = true }
|
||||
|
||||
concurrent-queue = { version = "2.5.0", default-features = false }
|
||||
[target.'cfg(not(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr")))'.dependencies]
|
||||
|
||||
@ -349,4 +349,4 @@ world.flush();
|
||||
world.trigger_targets(Explode, entity);
|
||||
```
|
||||
|
||||
[bevy]: https://bevyengine.org/
|
||||
[bevy]: https://bevy.org/
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
name = "bevy_ecs_compile_fail"
|
||||
edition = "2024"
|
||||
description = "Compile fail tests for Bevy Engine's entity component system"
|
||||
homepage = "https://bevyengine.org"
|
||||
homepage = "https://bevy.org"
|
||||
repository = "https://github.com/bevyengine/bevy"
|
||||
license = "MIT OR Apache-2.0"
|
||||
publish = false
|
||||
|
||||
@ -84,7 +84,7 @@ fn print_changed_entities(
|
||||
entity_with_mutated_component: Query<(Entity, &Age), Changed<Age>>,
|
||||
) {
|
||||
for entity in &entity_with_added_component {
|
||||
println!(" {entity} has it's first birthday!");
|
||||
println!(" {entity} has its first birthday!");
|
||||
}
|
||||
for (entity, value) in &entity_with_mutated_component {
|
||||
println!(" {entity} is now {value:?} frames old");
|
||||
|
||||
@ -38,7 +38,7 @@ pub fn derive_event(input: TokenStream) -> TokenStream {
|
||||
traversal = meta.value()?.parse()?;
|
||||
Ok(())
|
||||
}
|
||||
Some(ident) => Err(meta.error(format!("unsupported attribute: {}", ident))),
|
||||
Some(ident) => Err(meta.error(format!("unsupported attribute: {ident}"))),
|
||||
None => Err(meta.error("expected identifier")),
|
||||
}) {
|
||||
return e.to_compile_error().into();
|
||||
|
||||
@ -34,7 +34,7 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
|
||||
let ast = parse_macro_input!(input as DeriveInput);
|
||||
let ecs_path = bevy_ecs_path();
|
||||
|
||||
let named_fields = match get_struct_fields(&ast.data) {
|
||||
let named_fields = match get_struct_fields(&ast.data, "derive(Bundle)") {
|
||||
Ok(fields) => fields,
|
||||
Err(e) => return e.into_compile_error().into(),
|
||||
};
|
||||
@ -191,12 +191,14 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
|
||||
pub fn derive_map_entities(input: TokenStream) -> TokenStream {
|
||||
let ast = parse_macro_input!(input as DeriveInput);
|
||||
let ecs_path = bevy_ecs_path();
|
||||
|
||||
let map_entities_impl = map_entities(
|
||||
&ast.data,
|
||||
Ident::new("self", Span::call_site()),
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
let struct_name = &ast.ident;
|
||||
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
|
||||
TokenStream::from(quote! {
|
||||
@ -240,7 +242,7 @@ pub fn derive_system_param(input: TokenStream) -> TokenStream {
|
||||
.as_ref()
|
||||
.map(|f| quote! { #f })
|
||||
.unwrap_or_else(|| quote! { #i });
|
||||
field_names.push(format!("::{}", field_value));
|
||||
field_names.push(format!("::{field_value}"));
|
||||
fields.push(field_value);
|
||||
field_types.push(&field.ty);
|
||||
let mut field_message = None;
|
||||
@ -430,11 +432,6 @@ pub fn derive_system_param(input: TokenStream) -> TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn new_archetype(state: &mut Self::State, archetype: &#path::archetype::Archetype, system_meta: &mut #path::system::SystemMeta) {
|
||||
// SAFETY: The caller ensures that `archetype` is from the World the state was initialized from in `init_state`.
|
||||
unsafe { <#fields_alias::<'_, '_, #punctuated_generic_idents> as #path::system::SystemParam>::new_archetype(&mut state.state, archetype, system_meta) }
|
||||
}
|
||||
|
||||
fn apply(state: &mut Self::State, system_meta: &#path::system::SystemMeta, world: &mut #path::world::World) {
|
||||
<#fields_alias::<'_, '_, #punctuated_generic_idents> as #path::system::SystemParam>::apply(&mut state.state, system_meta, world);
|
||||
}
|
||||
@ -445,7 +442,7 @@ pub fn derive_system_param(input: TokenStream) -> TokenStream {
|
||||
|
||||
#[inline]
|
||||
unsafe fn validate_param<'w, 's>(
|
||||
state: &'s Self::State,
|
||||
state: &'s mut Self::State,
|
||||
_system_meta: &#path::system::SystemMeta,
|
||||
_world: #path::world::unsafe_world_cell::UnsafeWorldCell<'w>,
|
||||
) -> Result<(), #path::system::SystemParamValidationError> {
|
||||
@ -503,12 +500,10 @@ pub fn derive_schedule_label(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let mut trait_path = bevy_ecs_path();
|
||||
trait_path.segments.push(format_ident!("schedule").into());
|
||||
let mut dyn_eq_path = trait_path.clone();
|
||||
trait_path
|
||||
.segments
|
||||
.push(format_ident!("ScheduleLabel").into());
|
||||
dyn_eq_path.segments.push(format_ident!("DynEq").into());
|
||||
derive_label(input, "ScheduleLabel", &trait_path, &dyn_eq_path)
|
||||
derive_label(input, "ScheduleLabel", &trait_path)
|
||||
}
|
||||
|
||||
/// Derive macro generating an impl of the trait `SystemSet`.
|
||||
@ -519,10 +514,8 @@ pub fn derive_system_set(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let mut trait_path = bevy_ecs_path();
|
||||
trait_path.segments.push(format_ident!("schedule").into());
|
||||
let mut dyn_eq_path = trait_path.clone();
|
||||
trait_path.segments.push(format_ident!("SystemSet").into());
|
||||
dyn_eq_path.segments.push(format_ident!("DynEq").into());
|
||||
derive_label(input, "SystemSet", &trait_path, &dyn_eq_path)
|
||||
derive_label(input, "SystemSet", &trait_path)
|
||||
}
|
||||
|
||||
pub(crate) fn bevy_ecs_path() -> syn::Path {
|
||||
|
||||
@ -92,7 +92,7 @@ pub(crate) fn world_query_impl(
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: `update_component_access` and `update_archetype_component_access` are called on every field
|
||||
// SAFETY: `update_component_access` is called on every field
|
||||
unsafe impl #user_impl_generics #path::query::WorldQuery
|
||||
for #struct_name #user_ty_generics #user_where_clauses {
|
||||
|
||||
|
||||
@ -23,15 +23,21 @@ use crate::{
|
||||
bundle::BundleId,
|
||||
component::{ComponentId, Components, RequiredComponentConstructor, StorageType},
|
||||
entity::{Entity, EntityLocation},
|
||||
event::Event,
|
||||
observer::Observers,
|
||||
storage::{ImmutableSparseSet, SparseArray, SparseSet, SparseSetIndex, TableId, TableRow},
|
||||
storage::{ImmutableSparseSet, SparseArray, SparseSet, TableId, TableRow},
|
||||
};
|
||||
use alloc::{boxed::Box, vec::Vec};
|
||||
use bevy_platform::collections::HashMap;
|
||||
use bevy_platform::collections::{hash_map::Entry, HashMap};
|
||||
use core::{
|
||||
hash::Hash,
|
||||
ops::{Index, IndexMut, RangeFrom},
|
||||
};
|
||||
use nonmax::NonMaxU32;
|
||||
|
||||
#[derive(Event)]
|
||||
#[expect(dead_code, reason = "Prepare for the upcoming Query as Entities")]
|
||||
pub(crate) struct ArchetypeCreated(pub ArchetypeId);
|
||||
|
||||
/// An opaque location within a [`Archetype`].
|
||||
///
|
||||
@ -44,23 +50,30 @@ use core::{
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
// SAFETY: Must be repr(transparent) due to the safety requirements on EntityLocation
|
||||
#[repr(transparent)]
|
||||
pub struct ArchetypeRow(u32);
|
||||
pub struct ArchetypeRow(NonMaxU32);
|
||||
|
||||
impl ArchetypeRow {
|
||||
/// Index indicating an invalid archetype row.
|
||||
/// This is meant to be used as a placeholder.
|
||||
pub const INVALID: ArchetypeRow = ArchetypeRow(u32::MAX);
|
||||
// TODO: Deprecate in favor of options, since `INVALID` is, technically, valid.
|
||||
pub const INVALID: ArchetypeRow = ArchetypeRow(NonMaxU32::MAX);
|
||||
|
||||
/// Creates a `ArchetypeRow`.
|
||||
#[inline]
|
||||
pub const fn new(index: usize) -> Self {
|
||||
Self(index as u32)
|
||||
pub const fn new(index: NonMaxU32) -> Self {
|
||||
Self(index)
|
||||
}
|
||||
|
||||
/// Gets the index of the row.
|
||||
#[inline]
|
||||
pub const fn index(self) -> usize {
|
||||
self.0 as usize
|
||||
self.0.get() as usize
|
||||
}
|
||||
|
||||
/// Gets the index of the row.
|
||||
#[inline]
|
||||
pub const fn index_u32(self) -> u32 {
|
||||
self.0.get()
|
||||
}
|
||||
}
|
||||
|
||||
@ -341,7 +354,6 @@ pub(crate) struct ArchetypeSwapRemoveResult {
|
||||
/// [`Component`]: crate::component::Component
|
||||
struct ArchetypeComponentInfo {
|
||||
storage_type: StorageType,
|
||||
archetype_component_id: ArchetypeComponentId,
|
||||
}
|
||||
|
||||
bitflags::bitflags! {
|
||||
@ -386,14 +398,14 @@ impl Archetype {
|
||||
observers: &Observers,
|
||||
id: ArchetypeId,
|
||||
table_id: TableId,
|
||||
table_components: impl Iterator<Item = (ComponentId, ArchetypeComponentId)>,
|
||||
sparse_set_components: impl Iterator<Item = (ComponentId, ArchetypeComponentId)>,
|
||||
table_components: impl Iterator<Item = ComponentId>,
|
||||
sparse_set_components: impl Iterator<Item = ComponentId>,
|
||||
) -> Self {
|
||||
let (min_table, _) = table_components.size_hint();
|
||||
let (min_sparse, _) = sparse_set_components.size_hint();
|
||||
let mut flags = ArchetypeFlags::empty();
|
||||
let mut archetype_components = SparseSet::with_capacity(min_table + min_sparse);
|
||||
for (idx, (component_id, archetype_component_id)) in table_components.enumerate() {
|
||||
for (idx, component_id) in table_components.enumerate() {
|
||||
// SAFETY: We are creating an archetype that includes this component so it must exist
|
||||
let info = unsafe { components.get_info_unchecked(component_id) };
|
||||
info.update_archetype_flags(&mut flags);
|
||||
@ -402,7 +414,6 @@ impl Archetype {
|
||||
component_id,
|
||||
ArchetypeComponentInfo {
|
||||
storage_type: StorageType::Table,
|
||||
archetype_component_id,
|
||||
},
|
||||
);
|
||||
// NOTE: the `table_components` are sorted AND they were inserted in the `Table` in the same
|
||||
@ -414,7 +425,7 @@ impl Archetype {
|
||||
.insert(id, ArchetypeRecord { column: Some(idx) });
|
||||
}
|
||||
|
||||
for (component_id, archetype_component_id) in sparse_set_components {
|
||||
for component_id in sparse_set_components {
|
||||
// SAFETY: We are creating an archetype that includes this component so it must exist
|
||||
let info = unsafe { components.get_info_unchecked(component_id) };
|
||||
info.update_archetype_flags(&mut flags);
|
||||
@ -423,7 +434,6 @@ impl Archetype {
|
||||
component_id,
|
||||
ArchetypeComponentInfo {
|
||||
storage_type: StorageType::SparseSet,
|
||||
archetype_component_id,
|
||||
},
|
||||
);
|
||||
component_index
|
||||
@ -467,6 +477,27 @@ impl Archetype {
|
||||
&self.entities
|
||||
}
|
||||
|
||||
/// Fetches the entities contained in this archetype.
|
||||
#[inline]
|
||||
pub fn entities_with_location(&self) -> impl Iterator<Item = (Entity, EntityLocation)> {
|
||||
self.entities.iter().enumerate().map(
|
||||
|(archetype_row, &ArchetypeEntity { entity, table_row })| {
|
||||
(
|
||||
entity,
|
||||
EntityLocation {
|
||||
archetype_id: self.id,
|
||||
// SAFETY: The entities in the archetype must be unique and there are never more than u32::MAX entities.
|
||||
archetype_row: unsafe {
|
||||
ArchetypeRow::new(NonMaxU32::new_unchecked(archetype_row as u32))
|
||||
},
|
||||
table_id: self.table_id,
|
||||
table_row,
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Gets an iterator of all of the components stored in [`Table`]s.
|
||||
///
|
||||
/// All of the IDs are unique.
|
||||
@ -507,16 +538,6 @@ impl Archetype {
|
||||
self.components.len()
|
||||
}
|
||||
|
||||
/// Gets an iterator of all of the components in the archetype, along with
|
||||
/// their archetype component ID.
|
||||
pub(crate) fn components_with_archetype_component_id(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (ComponentId, ArchetypeComponentId)> + '_ {
|
||||
self.components
|
||||
.iter()
|
||||
.map(|(component_id, info)| (*component_id, info.archetype_component_id))
|
||||
}
|
||||
|
||||
/// Fetches an immutable reference to the archetype's [`Edges`], a cache of
|
||||
/// archetypal relationships.
|
||||
#[inline]
|
||||
@ -569,7 +590,8 @@ impl Archetype {
|
||||
entity: Entity,
|
||||
table_row: TableRow,
|
||||
) -> EntityLocation {
|
||||
let archetype_row = ArchetypeRow::new(self.entities.len());
|
||||
// SAFETY: An entity can not have multiple archetype rows and there can not be more than u32::MAX entities.
|
||||
let archetype_row = unsafe { ArchetypeRow::new(NonMaxU32::new_unchecked(self.len())) };
|
||||
self.entities.push(ArchetypeEntity { entity, table_row });
|
||||
|
||||
EntityLocation {
|
||||
@ -606,8 +628,10 @@ impl Archetype {
|
||||
|
||||
/// Gets the total number of entities that belong to the archetype.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.entities.len()
|
||||
pub fn len(&self) -> u32 {
|
||||
// No entity may have more than one archetype row, so there are no duplicates,
|
||||
// and there may only ever be u32::MAX entities, so the length never exceeds u32's cappacity.
|
||||
self.entities.len() as u32
|
||||
}
|
||||
|
||||
/// Checks if the archetype has any entities.
|
||||
@ -632,19 +656,6 @@ impl Archetype {
|
||||
.map(|info| info.storage_type)
|
||||
}
|
||||
|
||||
/// Fetches the corresponding [`ArchetypeComponentId`] for a component in the archetype.
|
||||
/// Returns `None` if the component is not part of the archetype.
|
||||
/// This runs in `O(1)` time.
|
||||
#[inline]
|
||||
pub fn get_archetype_component_id(
|
||||
&self,
|
||||
component_id: ComponentId,
|
||||
) -> Option<ArchetypeComponentId> {
|
||||
self.components
|
||||
.get(component_id)
|
||||
.map(|info| info.archetype_component_id)
|
||||
}
|
||||
|
||||
/// Clears all entities from the archetype.
|
||||
pub(crate) fn clear_entities(&mut self) {
|
||||
self.entities.clear();
|
||||
@ -742,46 +753,6 @@ struct ArchetypeComponents {
|
||||
sparse_set_components: Box<[ComponentId]>,
|
||||
}
|
||||
|
||||
/// An opaque unique joint ID for a [`Component`] in an [`Archetype`] within a [`World`].
|
||||
///
|
||||
/// A component may be present within multiple archetypes, but each component within
|
||||
/// each archetype has its own unique `ArchetypeComponentId`. This is leveraged by the system
|
||||
/// schedulers to opportunistically run multiple systems in parallel that would otherwise
|
||||
/// conflict. For example, `Query<&mut A, With<B>>` and `Query<&mut A, Without<B>>` can run in
|
||||
/// parallel as the matched `ArchetypeComponentId` sets for both queries are disjoint, even
|
||||
/// though `&mut A` on both queries point to the same [`ComponentId`].
|
||||
///
|
||||
/// In SQL terms, these IDs are composite keys on a [many-to-many relationship] between archetypes
|
||||
/// and components. Each component type will have only one [`ComponentId`], but may have many
|
||||
/// [`ArchetypeComponentId`]s, one for every archetype the component is present in. Likewise, each
|
||||
/// archetype will have only one [`ArchetypeId`] but may have many [`ArchetypeComponentId`]s, one
|
||||
/// for each component that belongs to the archetype.
|
||||
///
|
||||
/// Every [`Resource`] is also assigned one of these IDs. As resources do not belong to any
|
||||
/// particular archetype, a resource's ID uniquely identifies it.
|
||||
///
|
||||
/// These IDs are only valid within a given World, and are not globally unique.
|
||||
/// Attempting to use an ID on a world that it wasn't sourced from will
|
||||
/// not point to the same archetype nor the same component.
|
||||
///
|
||||
/// [`Component`]: crate::component::Component
|
||||
/// [`World`]: crate::world::World
|
||||
/// [`Resource`]: crate::resource::Resource
|
||||
/// [many-to-many relationship]: https://en.wikipedia.org/wiki/Many-to-many_(data_model)
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct ArchetypeComponentId(usize);
|
||||
|
||||
impl SparseSetIndex for ArchetypeComponentId {
|
||||
#[inline]
|
||||
fn sparse_set_index(&self) -> usize {
|
||||
self.0
|
||||
}
|
||||
|
||||
fn get_sparse_set_index(value: usize) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps a [`ComponentId`] to the list of [`Archetypes`]([`Archetype`]) that contain the [`Component`](crate::component::Component),
|
||||
/// along with an [`ArchetypeRecord`] which contains some metadata about how the component is stored in the archetype.
|
||||
pub type ComponentIndex = HashMap<ComponentId, HashMap<ArchetypeId, ArchetypeRecord>>;
|
||||
@ -794,7 +765,6 @@ pub type ComponentIndex = HashMap<ComponentId, HashMap<ArchetypeId, ArchetypeRec
|
||||
/// [module level documentation]: crate::archetype
|
||||
pub struct Archetypes {
|
||||
pub(crate) archetypes: Vec<Archetype>,
|
||||
archetype_component_count: usize,
|
||||
/// find the archetype id by the archetype's components
|
||||
by_components: HashMap<ArchetypeComponents, ArchetypeId>,
|
||||
/// find all the archetypes that contain a component
|
||||
@ -818,7 +788,6 @@ impl Archetypes {
|
||||
archetypes: Vec::new(),
|
||||
by_components: Default::default(),
|
||||
by_component: Default::default(),
|
||||
archetype_component_count: 0,
|
||||
};
|
||||
// SAFETY: Empty archetype has no components
|
||||
unsafe {
|
||||
@ -873,22 +842,6 @@ impl Archetypes {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate and store a new [`ArchetypeComponentId`].
|
||||
///
|
||||
/// This simply increment the counter and return the new value.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// On archetype component id overflow.
|
||||
pub(crate) fn new_archetype_component_id(&mut self) -> ArchetypeComponentId {
|
||||
let id = ArchetypeComponentId(self.archetype_component_count);
|
||||
self.archetype_component_count = self
|
||||
.archetype_component_count
|
||||
.checked_add(1)
|
||||
.expect("archetype_component_count overflow");
|
||||
id
|
||||
}
|
||||
|
||||
/// Fetches an immutable reference to an [`Archetype`] using its
|
||||
/// ID. Returns `None` if no corresponding archetype exists.
|
||||
#[inline]
|
||||
@ -921,6 +874,10 @@ impl Archetypes {
|
||||
}
|
||||
|
||||
/// Gets the archetype id matching the given inputs or inserts a new one if it doesn't exist.
|
||||
///
|
||||
/// Specifically, it returns a tuple where the first element
|
||||
/// is the [`ArchetypeId`] that the given inputs belong to, and the second element is a boolean indicating whether a new archetype was created.
|
||||
///
|
||||
/// `table_components` and `sparse_set_components` must be sorted
|
||||
///
|
||||
/// # Safety
|
||||
@ -933,56 +890,35 @@ impl Archetypes {
|
||||
table_id: TableId,
|
||||
table_components: Vec<ComponentId>,
|
||||
sparse_set_components: Vec<ComponentId>,
|
||||
) -> ArchetypeId {
|
||||
) -> (ArchetypeId, bool) {
|
||||
let archetype_identity = ArchetypeComponents {
|
||||
sparse_set_components: sparse_set_components.into_boxed_slice(),
|
||||
table_components: table_components.into_boxed_slice(),
|
||||
};
|
||||
|
||||
let archetypes = &mut self.archetypes;
|
||||
let archetype_component_count = &mut self.archetype_component_count;
|
||||
let component_index = &mut self.by_component;
|
||||
*self
|
||||
.by_components
|
||||
.entry(archetype_identity)
|
||||
.or_insert_with_key(move |identity| {
|
||||
match self.by_components.entry(archetype_identity) {
|
||||
Entry::Occupied(occupied) => (*occupied.get(), false),
|
||||
Entry::Vacant(vacant) => {
|
||||
let ArchetypeComponents {
|
||||
table_components,
|
||||
sparse_set_components,
|
||||
} = identity;
|
||||
} = vacant.key();
|
||||
let id = ArchetypeId::new(archetypes.len());
|
||||
let table_start = *archetype_component_count;
|
||||
*archetype_component_count += table_components.len();
|
||||
let table_archetype_components =
|
||||
(table_start..*archetype_component_count).map(ArchetypeComponentId);
|
||||
let sparse_start = *archetype_component_count;
|
||||
*archetype_component_count += sparse_set_components.len();
|
||||
let sparse_set_archetype_components =
|
||||
(sparse_start..*archetype_component_count).map(ArchetypeComponentId);
|
||||
archetypes.push(Archetype::new(
|
||||
components,
|
||||
component_index,
|
||||
observers,
|
||||
id,
|
||||
table_id,
|
||||
table_components
|
||||
.iter()
|
||||
.copied()
|
||||
.zip(table_archetype_components),
|
||||
sparse_set_components
|
||||
.iter()
|
||||
.copied()
|
||||
.zip(sparse_set_archetype_components),
|
||||
table_components.iter().copied(),
|
||||
sparse_set_components.iter().copied(),
|
||||
));
|
||||
id
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the number of components that are stored in archetypes.
|
||||
/// Note that if some component `T` is stored in more than one archetype, it will be counted once for each archetype it's present in.
|
||||
#[inline]
|
||||
pub fn archetype_components_len(&self) -> usize {
|
||||
self.archetype_component_count
|
||||
vacant.insert(id);
|
||||
(id, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Clears all entities from all archetypes.
|
||||
|
||||
@ -22,7 +22,7 @@ use core::ops::Range;
|
||||
/// [`EventReader::par_read`]: crate::event::EventReader::par_read
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BatchingStrategy {
|
||||
/// The upper and lower limits for a batch of entities.
|
||||
/// The upper and lower limits for a batch of items.
|
||||
///
|
||||
/// Setting the bounds to the same value will result in a fixed
|
||||
/// batch size.
|
||||
|
||||
@ -6,8 +6,8 @@ pub use bevy_ecs_macros::Bundle;
|
||||
|
||||
use crate::{
|
||||
archetype::{
|
||||
Archetype, ArchetypeAfterBundleInsert, ArchetypeId, Archetypes, BundleComponentStatus,
|
||||
ComponentStatus, SpawnBundleStatus,
|
||||
Archetype, ArchetypeAfterBundleInsert, ArchetypeCreated, ArchetypeId, Archetypes,
|
||||
BundleComponentStatus, ComponentStatus, SpawnBundleStatus,
|
||||
},
|
||||
change_detection::MaybeLocation,
|
||||
component::{
|
||||
@ -732,7 +732,7 @@ impl BundleInfo {
|
||||
}
|
||||
}
|
||||
|
||||
/// Inserts a bundle into the given archetype and returns the resulting archetype.
|
||||
/// Inserts a bundle into the given archetype and returns the resulting archetype and whether a new archetype was created.
|
||||
/// This could be the same [`ArchetypeId`], in the event that inserting the given bundle
|
||||
/// does not result in an [`Archetype`] change.
|
||||
///
|
||||
@ -747,12 +747,12 @@ impl BundleInfo {
|
||||
components: &Components,
|
||||
observers: &Observers,
|
||||
archetype_id: ArchetypeId,
|
||||
) -> ArchetypeId {
|
||||
) -> (ArchetypeId, bool) {
|
||||
if let Some(archetype_after_insert_id) = archetypes[archetype_id]
|
||||
.edges()
|
||||
.get_archetype_after_bundle_insert(self.id)
|
||||
{
|
||||
return archetype_after_insert_id;
|
||||
return (archetype_after_insert_id, false);
|
||||
}
|
||||
let mut new_table_components = Vec::new();
|
||||
let mut new_sparse_set_components = Vec::new();
|
||||
@ -806,7 +806,7 @@ impl BundleInfo {
|
||||
added,
|
||||
existing,
|
||||
);
|
||||
archetype_id
|
||||
(archetype_id, false)
|
||||
} else {
|
||||
let table_id;
|
||||
let table_components;
|
||||
@ -842,13 +842,14 @@ impl BundleInfo {
|
||||
};
|
||||
};
|
||||
// SAFETY: ids in self must be valid
|
||||
let new_archetype_id = archetypes.get_id_or_insert(
|
||||
let (new_archetype_id, is_new_created) = archetypes.get_id_or_insert(
|
||||
components,
|
||||
observers,
|
||||
table_id,
|
||||
table_components,
|
||||
sparse_set_components,
|
||||
);
|
||||
|
||||
// Add an edge from the old archetype to the new archetype.
|
||||
archetypes[archetype_id]
|
||||
.edges_mut()
|
||||
@ -860,11 +861,11 @@ impl BundleInfo {
|
||||
added,
|
||||
existing,
|
||||
);
|
||||
new_archetype_id
|
||||
(new_archetype_id, is_new_created)
|
||||
}
|
||||
}
|
||||
|
||||
/// Removes a bundle from the given archetype and returns the resulting archetype
|
||||
/// Removes a bundle from the given archetype and returns the resulting archetype and whether a new archetype was created.
|
||||
/// (or `None` if the removal was invalid).
|
||||
/// This could be the same [`ArchetypeId`], in the event that removing the given bundle
|
||||
/// does not result in an [`Archetype`] change.
|
||||
@ -887,7 +888,7 @@ impl BundleInfo {
|
||||
observers: &Observers,
|
||||
archetype_id: ArchetypeId,
|
||||
intersection: bool,
|
||||
) -> Option<ArchetypeId> {
|
||||
) -> (Option<ArchetypeId>, bool) {
|
||||
// Check the archetype graph to see if the bundle has been
|
||||
// removed from this archetype in the past.
|
||||
let archetype_after_remove_result = {
|
||||
@ -898,9 +899,9 @@ impl BundleInfo {
|
||||
edges.get_archetype_after_bundle_take(self.id())
|
||||
}
|
||||
};
|
||||
let result = if let Some(result) = archetype_after_remove_result {
|
||||
let (result, is_new_created) = if let Some(result) = archetype_after_remove_result {
|
||||
// This bundle removal result is cached. Just return that!
|
||||
result
|
||||
(result, false)
|
||||
} else {
|
||||
let mut next_table_components;
|
||||
let mut next_sparse_set_components;
|
||||
@ -925,7 +926,7 @@ impl BundleInfo {
|
||||
current_archetype
|
||||
.edges_mut()
|
||||
.cache_archetype_after_bundle_take(self.id(), None);
|
||||
return None;
|
||||
return (None, false);
|
||||
}
|
||||
}
|
||||
|
||||
@ -953,14 +954,14 @@ impl BundleInfo {
|
||||
};
|
||||
}
|
||||
|
||||
let new_archetype_id = archetypes.get_id_or_insert(
|
||||
let (new_archetype_id, is_new_created) = archetypes.get_id_or_insert(
|
||||
components,
|
||||
observers,
|
||||
next_table_id,
|
||||
next_table_components,
|
||||
next_sparse_set_components,
|
||||
);
|
||||
Some(new_archetype_id)
|
||||
(Some(new_archetype_id), is_new_created)
|
||||
};
|
||||
let current_archetype = &mut archetypes[archetype_id];
|
||||
// Cache the result in an edge.
|
||||
@ -973,7 +974,7 @@ impl BundleInfo {
|
||||
.edges_mut()
|
||||
.cache_archetype_after_bundle_take(self.id(), result);
|
||||
}
|
||||
result
|
||||
(result, is_new_created)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1036,14 +1037,15 @@ impl<'w> BundleInserter<'w> {
|
||||
// SAFETY: We will not make any accesses to the command queue, component or resource data of this world
|
||||
let bundle_info = world.bundles.get_unchecked(bundle_id);
|
||||
let bundle_id = bundle_info.id();
|
||||
let new_archetype_id = bundle_info.insert_bundle_into_archetype(
|
||||
let (new_archetype_id, is_new_created) = bundle_info.insert_bundle_into_archetype(
|
||||
&mut world.archetypes,
|
||||
&mut world.storages,
|
||||
&world.components,
|
||||
&world.observers,
|
||||
archetype_id,
|
||||
);
|
||||
if new_archetype_id == archetype_id {
|
||||
|
||||
let inserter = if new_archetype_id == archetype_id {
|
||||
let archetype = &mut world.archetypes[archetype_id];
|
||||
// SAFETY: The edge is assured to be initialized when we called insert_bundle_into_archetype
|
||||
let archetype_after_insert = unsafe {
|
||||
@ -1103,7 +1105,15 @@ impl<'w> BundleInserter<'w> {
|
||||
world: world.as_unsafe_world_cell(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if is_new_created {
|
||||
inserter
|
||||
.world
|
||||
.into_deferred()
|
||||
.trigger(ArchetypeCreated(new_archetype_id));
|
||||
}
|
||||
inserter
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
@ -1193,16 +1203,16 @@ impl<'w> BundleInserter<'w> {
|
||||
unsafe { entities.get(swapped_entity).debug_checked_unwrap() };
|
||||
entities.set(
|
||||
swapped_entity.index(),
|
||||
EntityLocation {
|
||||
Some(EntityLocation {
|
||||
archetype_id: swapped_location.archetype_id,
|
||||
archetype_row: location.archetype_row,
|
||||
table_id: swapped_location.table_id,
|
||||
table_row: swapped_location.table_row,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
let new_location = new_archetype.allocate(entity, result.table_row);
|
||||
entities.set(entity.index(), new_location);
|
||||
entities.set(entity.index(), Some(new_location));
|
||||
let after_effect = bundle_info.write_components(
|
||||
table,
|
||||
sparse_sets,
|
||||
@ -1242,19 +1252,19 @@ impl<'w> BundleInserter<'w> {
|
||||
unsafe { entities.get(swapped_entity).debug_checked_unwrap() };
|
||||
entities.set(
|
||||
swapped_entity.index(),
|
||||
EntityLocation {
|
||||
Some(EntityLocation {
|
||||
archetype_id: swapped_location.archetype_id,
|
||||
archetype_row: location.archetype_row,
|
||||
table_id: swapped_location.table_id,
|
||||
table_row: swapped_location.table_row,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
// PERF: store "non bundle" components in edge, then just move those to avoid
|
||||
// redundant copies
|
||||
let move_result = table.move_to_superset_unchecked(result.table_row, new_table);
|
||||
let new_location = new_archetype.allocate(entity, move_result.new_row);
|
||||
entities.set(entity.index(), new_location);
|
||||
entities.set(entity.index(), Some(new_location));
|
||||
|
||||
// If an entity was moved into this entity's table spot, update its table row.
|
||||
if let Some(swapped_entity) = move_result.swapped_entity {
|
||||
@ -1264,12 +1274,12 @@ impl<'w> BundleInserter<'w> {
|
||||
|
||||
entities.set(
|
||||
swapped_entity.index(),
|
||||
EntityLocation {
|
||||
Some(EntityLocation {
|
||||
archetype_id: swapped_location.archetype_id,
|
||||
archetype_row: swapped_location.archetype_row,
|
||||
table_id: swapped_location.table_id,
|
||||
table_row: result.table_row,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
if archetype.id() == swapped_location.archetype_id {
|
||||
@ -1421,7 +1431,7 @@ impl<'w> BundleRemover<'w> {
|
||||
) -> Option<Self> {
|
||||
let bundle_info = world.bundles.get_unchecked(bundle_id);
|
||||
// SAFETY: Caller ensures archetype and bundle ids are correct.
|
||||
let new_archetype_id = unsafe {
|
||||
let (new_archetype_id, is_new_created) = unsafe {
|
||||
bundle_info.remove_bundle_from_archetype(
|
||||
&mut world.archetypes,
|
||||
&mut world.storages,
|
||||
@ -1429,11 +1439,14 @@ impl<'w> BundleRemover<'w> {
|
||||
&world.observers,
|
||||
archetype_id,
|
||||
!require_all,
|
||||
)?
|
||||
)
|
||||
};
|
||||
let new_archetype_id = new_archetype_id?;
|
||||
|
||||
if new_archetype_id == archetype_id {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (old_archetype, new_archetype) =
|
||||
world.archetypes.get_2_mut(archetype_id, new_archetype_id);
|
||||
|
||||
@ -1447,13 +1460,20 @@ impl<'w> BundleRemover<'w> {
|
||||
Some((old.into(), new.into()))
|
||||
};
|
||||
|
||||
Some(Self {
|
||||
let remover = Self {
|
||||
bundle_info: bundle_info.into(),
|
||||
new_archetype: new_archetype.into(),
|
||||
old_archetype: old_archetype.into(),
|
||||
old_and_new_table: tables,
|
||||
world: world.as_unsafe_world_cell(),
|
||||
})
|
||||
};
|
||||
if is_new_created {
|
||||
remover
|
||||
.world
|
||||
.into_deferred()
|
||||
.trigger(ArchetypeCreated(new_archetype_id));
|
||||
}
|
||||
Some(remover)
|
||||
}
|
||||
|
||||
/// This can be passed to [`remove`](Self::remove) as the `pre_remove` function if you don't want to do anything before removing.
|
||||
@ -1573,12 +1593,12 @@ impl<'w> BundleRemover<'w> {
|
||||
|
||||
world.entities.set(
|
||||
swapped_entity.index(),
|
||||
EntityLocation {
|
||||
Some(EntityLocation {
|
||||
archetype_id: swapped_location.archetype_id,
|
||||
archetype_row: location.archetype_row,
|
||||
table_id: swapped_location.table_id,
|
||||
table_row: swapped_location.table_row,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@ -1614,12 +1634,12 @@ impl<'w> BundleRemover<'w> {
|
||||
|
||||
world.entities.set(
|
||||
swapped_entity.index(),
|
||||
EntityLocation {
|
||||
Some(EntityLocation {
|
||||
archetype_id: swapped_location.archetype_id,
|
||||
archetype_row: swapped_location.archetype_row,
|
||||
table_id: swapped_location.table_id,
|
||||
table_row: location.table_row,
|
||||
},
|
||||
}),
|
||||
);
|
||||
world.archetypes[swapped_location.archetype_id]
|
||||
.set_entity_table_row(swapped_location.archetype_row, location.table_row);
|
||||
@ -1635,7 +1655,7 @@ impl<'w> BundleRemover<'w> {
|
||||
|
||||
// SAFETY: The entity is valid and has been moved to the new location already.
|
||||
unsafe {
|
||||
world.entities.set(entity.index(), new_location);
|
||||
world.entities.set(entity.index(), Some(new_location));
|
||||
}
|
||||
|
||||
(new_location, pre_remove_result)
|
||||
@ -1675,22 +1695,30 @@ impl<'w> BundleSpawner<'w> {
|
||||
change_tick: Tick,
|
||||
) -> Self {
|
||||
let bundle_info = world.bundles.get_unchecked(bundle_id);
|
||||
let new_archetype_id = bundle_info.insert_bundle_into_archetype(
|
||||
let (new_archetype_id, is_new_created) = bundle_info.insert_bundle_into_archetype(
|
||||
&mut world.archetypes,
|
||||
&mut world.storages,
|
||||
&world.components,
|
||||
&world.observers,
|
||||
ArchetypeId::EMPTY,
|
||||
);
|
||||
|
||||
let archetype = &mut world.archetypes[new_archetype_id];
|
||||
let table = &mut world.storages.tables[archetype.table_id()];
|
||||
Self {
|
||||
let spawner = Self {
|
||||
bundle_info: bundle_info.into(),
|
||||
table: table.into(),
|
||||
archetype: archetype.into(),
|
||||
change_tick,
|
||||
world: world.as_unsafe_world_cell(),
|
||||
};
|
||||
if is_new_created {
|
||||
spawner
|
||||
.world
|
||||
.into_deferred()
|
||||
.trigger(ArchetypeCreated(new_archetype_id));
|
||||
}
|
||||
spawner
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -1736,7 +1764,8 @@ impl<'w> BundleSpawner<'w> {
|
||||
InsertMode::Replace,
|
||||
caller,
|
||||
);
|
||||
entities.set_spawn_despawn(entity.index(), location, caller, self.change_tick);
|
||||
entities.set(entity.index(), Some(location));
|
||||
entities.mark_spawn_despawn(entity.index(), caller, self.change_tick);
|
||||
(location, after_effect)
|
||||
};
|
||||
|
||||
@ -2042,7 +2071,9 @@ fn sorted_remove<T: Eq + Ord + Copy>(source: &mut Vec<T>, remove: &[T]) {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{component::HookContext, prelude::*, world::DeferredWorld};
|
||||
use crate::{
|
||||
archetype::ArchetypeCreated, component::HookContext, prelude::*, world::DeferredWorld,
|
||||
};
|
||||
use alloc::vec;
|
||||
|
||||
#[derive(Component)]
|
||||
@ -2237,6 +2268,28 @@ mod tests {
|
||||
assert_eq!(entity.get(), Some(&V("one")));
|
||||
}
|
||||
|
||||
#[derive(Component, Debug, Eq, PartialEq)]
|
||||
#[component(storage = "SparseSet")]
|
||||
pub struct SparseV(&'static str);
|
||||
|
||||
#[derive(Component, Debug, Eq, PartialEq)]
|
||||
#[component(storage = "SparseSet")]
|
||||
pub struct SparseA;
|
||||
|
||||
#[test]
|
||||
fn sparse_set_insert_if_new() {
|
||||
let mut world = World::new();
|
||||
let id = world.spawn(SparseV("one")).id();
|
||||
let mut entity = world.entity_mut(id);
|
||||
entity.insert_if_new(SparseV("two"));
|
||||
entity.insert_if_new((SparseA, SparseV("three")));
|
||||
entity.flush();
|
||||
// should still contain "one"
|
||||
let entity = world.entity(id);
|
||||
assert!(entity.contains::<SparseA>());
|
||||
assert_eq!(entity.get(), Some(&SparseV("one")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sorted_remove() {
|
||||
let mut a = vec![1, 2, 3, 4, 5, 6, 7];
|
||||
@ -2257,4 +2310,23 @@ mod tests {
|
||||
|
||||
assert_eq!(a, vec![1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_archetype_created() {
|
||||
let mut world = World::new();
|
||||
#[derive(Resource, Default)]
|
||||
struct Count(u32);
|
||||
world.init_resource::<Count>();
|
||||
world.add_observer(|_t: Trigger<ArchetypeCreated>, mut count: ResMut<Count>| {
|
||||
count.0 += 1;
|
||||
});
|
||||
|
||||
let mut e = world.spawn((A, B));
|
||||
e.insert(C);
|
||||
e.remove::<A>();
|
||||
e.insert(A);
|
||||
e.insert(A);
|
||||
|
||||
assert_eq!(world.resource::<Count>().0, 3);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1517,7 +1517,7 @@ impl MaybeLocation {
|
||||
/// within a non-tracked function body.
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn caller() -> Self {
|
||||
pub const fn caller() -> Self {
|
||||
// Note that this cannot use `new_with`, since `FnOnce` invocations cannot be annotated with `#[track_caller]`.
|
||||
MaybeLocation {
|
||||
#[cfg(feature = "track_location")]
|
||||
|
||||
@ -481,7 +481,7 @@ use thiserror::Error;
|
||||
/// ```
|
||||
/// # use std::cell::RefCell;
|
||||
/// # use bevy_ecs::component::Component;
|
||||
/// use bevy_utils::synccell::SyncCell;
|
||||
/// use bevy_platform::cell::SyncCell;
|
||||
///
|
||||
/// // This will compile.
|
||||
/// #[derive(Component)]
|
||||
@ -490,7 +490,7 @@ use thiserror::Error;
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// [`SyncCell`]: bevy_utils::synccell::SyncCell
|
||||
/// [`SyncCell`]: bevy_platform::cell::SyncCell
|
||||
/// [`Exclusive`]: https://doc.rust-lang.org/nightly/std/sync/struct.Exclusive.html
|
||||
#[diagnostic::on_unimplemented(
|
||||
message = "`{Self}` is not a `Component`",
|
||||
@ -2400,7 +2400,7 @@ impl Components {
|
||||
/// * [`World::component_id()`]
|
||||
#[inline]
|
||||
pub fn valid_component_id<T: Component>(&self) -> Option<ComponentId> {
|
||||
self.get_id(TypeId::of::<T>())
|
||||
self.get_valid_id(TypeId::of::<T>())
|
||||
}
|
||||
|
||||
/// Type-erased equivalent of [`Components::valid_resource_id()`].
|
||||
@ -2431,7 +2431,7 @@ impl Components {
|
||||
/// * [`Components::get_resource_id()`]
|
||||
#[inline]
|
||||
pub fn valid_resource_id<T: Resource>(&self) -> Option<ComponentId> {
|
||||
self.get_resource_id(TypeId::of::<T>())
|
||||
self.get_valid_resource_id(TypeId::of::<T>())
|
||||
}
|
||||
|
||||
/// Type-erased equivalent of [`Components::component_id()`].
|
||||
|
||||
@ -5,6 +5,7 @@ use bumpalo::Bump;
|
||||
use core::any::TypeId;
|
||||
|
||||
use crate::{
|
||||
archetype::Archetype,
|
||||
bundle::Bundle,
|
||||
component::{Component, ComponentCloneBehavior, ComponentCloneFn, ComponentId, ComponentInfo},
|
||||
entity::{hash_map::EntityHashMap, Entities, Entity, EntityMapper},
|
||||
@ -340,6 +341,7 @@ impl<'a, 'b> ComponentCloneCtx<'a, 'b> {
|
||||
pub struct EntityCloner {
|
||||
filter_allows_components: bool,
|
||||
filter: HashSet<ComponentId>,
|
||||
filter_required: HashSet<ComponentId>,
|
||||
clone_behavior_overrides: HashMap<ComponentId, ComponentCloneBehavior>,
|
||||
move_components: bool,
|
||||
linked_cloning: bool,
|
||||
@ -356,6 +358,7 @@ impl Default for EntityCloner {
|
||||
linked_cloning: false,
|
||||
default_clone_fn: ComponentCloneBehavior::global_default_fn(),
|
||||
filter: Default::default(),
|
||||
filter_required: Default::default(),
|
||||
clone_behavior_overrides: Default::default(),
|
||||
clone_queue: Default::default(),
|
||||
deferred_commands: Default::default(),
|
||||
@ -459,6 +462,12 @@ impl EntityCloner {
|
||||
{
|
||||
let world = world.as_unsafe_world_cell();
|
||||
let source_entity = world.get_entity(source).expect("Source entity must exist");
|
||||
let target_archetype = (!self.filter_required.is_empty()).then(|| {
|
||||
world
|
||||
.get_entity(target)
|
||||
.expect("Target entity must exist")
|
||||
.archetype()
|
||||
});
|
||||
|
||||
#[cfg(feature = "bevy_reflect")]
|
||||
// SAFETY: we have unique access to `world`, nothing else accesses the registry at this moment, and we clone
|
||||
@ -475,7 +484,7 @@ impl EntityCloner {
|
||||
bundle_scratch = BundleScratch::with_capacity(archetype.component_count());
|
||||
|
||||
for component in archetype.components() {
|
||||
if !self.is_cloning_allowed(&component) {
|
||||
if !self.is_cloning_allowed(&component, target_archetype) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -599,9 +608,19 @@ impl EntityCloner {
|
||||
target
|
||||
}
|
||||
|
||||
fn is_cloning_allowed(&self, component: &ComponentId) -> bool {
|
||||
(self.filter_allows_components && self.filter.contains(component))
|
||||
|| (!self.filter_allows_components && !self.filter.contains(component))
|
||||
fn is_cloning_allowed(
|
||||
&self,
|
||||
component: &ComponentId,
|
||||
target_archetype: Option<&Archetype>,
|
||||
) -> bool {
|
||||
if self.filter_allows_components {
|
||||
self.filter.contains(component)
|
||||
|| target_archetype.is_some_and(|archetype| {
|
||||
!archetype.contains(*component) && self.filter_required.contains(component)
|
||||
})
|
||||
} else {
|
||||
!self.filter.contains(component) && !self.filter_required.contains(component)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -686,7 +705,7 @@ impl<'w> EntityClonerBuilder<'w> {
|
||||
/// [`deny_all`](`Self::deny_all`) before calling any of the `allow` methods.
|
||||
pub fn allow_by_type_ids(&mut self, ids: impl IntoIterator<Item = TypeId>) -> &mut Self {
|
||||
for type_id in ids {
|
||||
if let Some(id) = self.world.components().get_id(type_id) {
|
||||
if let Some(id) = self.world.components().get_valid_id(type_id) {
|
||||
self.filter_allow(id);
|
||||
}
|
||||
}
|
||||
@ -721,7 +740,7 @@ impl<'w> EntityClonerBuilder<'w> {
|
||||
/// Extends the list of components that shouldn't be cloned by type ids.
|
||||
pub fn deny_by_type_ids(&mut self, ids: impl IntoIterator<Item = TypeId>) -> &mut Self {
|
||||
for type_id in ids {
|
||||
if let Some(id) = self.world.components().get_id(type_id) {
|
||||
if let Some(id) = self.world.components().get_valid_id(type_id) {
|
||||
self.filter_deny(id);
|
||||
}
|
||||
}
|
||||
@ -743,7 +762,7 @@ impl<'w> EntityClonerBuilder<'w> {
|
||||
&mut self,
|
||||
clone_behavior: ComponentCloneBehavior,
|
||||
) -> &mut Self {
|
||||
if let Some(id) = self.world.components().component_id::<T>() {
|
||||
if let Some(id) = self.world.components().valid_component_id::<T>() {
|
||||
self.entity_cloner
|
||||
.clone_behavior_overrides
|
||||
.insert(id, clone_behavior);
|
||||
@ -768,7 +787,7 @@ impl<'w> EntityClonerBuilder<'w> {
|
||||
|
||||
/// Removes a previously set override of [`ComponentCloneBehavior`] for a component in this builder.
|
||||
pub fn remove_clone_behavior_override<T: Component>(&mut self) -> &mut Self {
|
||||
if let Some(id) = self.world.components().component_id::<T>() {
|
||||
if let Some(id) = self.world.components().valid_component_id::<T>() {
|
||||
self.entity_cloner.clone_behavior_overrides.remove(&id);
|
||||
}
|
||||
self
|
||||
@ -803,9 +822,9 @@ impl<'w> EntityClonerBuilder<'w> {
|
||||
if let Some(info) = self.world.components().get_info(id) {
|
||||
for required_id in info.required_components().iter_ids() {
|
||||
if self.entity_cloner.filter_allows_components {
|
||||
self.entity_cloner.filter.insert(required_id);
|
||||
self.entity_cloner.filter_required.insert(required_id);
|
||||
} else {
|
||||
self.entity_cloner.filter.remove(&required_id);
|
||||
self.entity_cloner.filter_required.remove(&required_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -823,9 +842,9 @@ impl<'w> EntityClonerBuilder<'w> {
|
||||
if let Some(info) = self.world.components().get_info(id) {
|
||||
for required_id in info.required_components().iter_ids() {
|
||||
if self.entity_cloner.filter_allows_components {
|
||||
self.entity_cloner.filter.remove(&required_id);
|
||||
self.entity_cloner.filter_required.remove(&required_id);
|
||||
} else {
|
||||
self.entity_cloner.filter.insert(required_id);
|
||||
self.entity_cloner.filter_required.insert(required_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1400,4 +1419,36 @@ mod tests {
|
||||
);
|
||||
assert!(world.resource::<FromWorldCalled>().0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cloning_with_required_components_preserves_existing() {
|
||||
#[derive(Component, Clone, PartialEq, Debug, Default)]
|
||||
#[require(B(5))]
|
||||
struct A;
|
||||
|
||||
#[derive(Component, Clone, PartialEq, Debug)]
|
||||
struct B(u32);
|
||||
|
||||
let mut world = World::default();
|
||||
|
||||
let e = world.spawn((A, B(0))).id();
|
||||
let e_clone = world.spawn(B(1)).id();
|
||||
|
||||
EntityCloner::build(&mut world)
|
||||
.deny_all()
|
||||
.allow::<A>()
|
||||
.clone_entity(e, e_clone);
|
||||
|
||||
assert_eq!(world.entity(e_clone).get::<A>(), Some(&A));
|
||||
assert_eq!(world.entity(e_clone).get::<B>(), Some(&B(1)));
|
||||
|
||||
let e_clone2 = world.spawn(B(2)).id();
|
||||
|
||||
EntityCloner::build(&mut world)
|
||||
.allow_all()
|
||||
.deny::<A>()
|
||||
.clone_entity(e, e_clone2);
|
||||
|
||||
assert_eq!(world.entity(e_clone2).get::<B>(), Some(&B(2)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -358,7 +358,10 @@ mod tests {
|
||||
// Next allocated entity should be a further generation on the same index
|
||||
let entity = world.spawn_empty().id();
|
||||
assert_eq!(entity.index(), dead_ref.index());
|
||||
assert!(entity.generation() > dead_ref.generation());
|
||||
assert!(entity
|
||||
.generation()
|
||||
.cmp_approx(&dead_ref.generation())
|
||||
.is_gt());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -373,7 +376,10 @@ mod tests {
|
||||
// Next allocated entity should be a further generation on the same index
|
||||
let entity = world.spawn_empty().id();
|
||||
assert_eq!(entity.index(), dead_ref.index());
|
||||
assert!(entity.generation() > dead_ref.generation());
|
||||
assert!(entity
|
||||
.generation()
|
||||
.cmp_approx(&dead_ref.generation())
|
||||
.is_gt());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user