Merge branch 'main' into http-asset-sources
This commit is contained in:
commit
e1f593fae0
2
.github/example-run/testbed_ui.ron
vendored
2
.github/example-run/testbed_ui.ron
vendored
@ -1,6 +1,4 @@
|
|||||||
(
|
(
|
||||||
events: [
|
events: [
|
||||||
(100, Screenshot),
|
|
||||||
(200, AppExit),
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|||||||
8
.github/pull_request_template.md
vendored
8
.github/pull_request_template.md
vendored
@ -36,11 +36,3 @@ println!("My super cool code.");
|
|||||||
```
|
```
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
## Migration Guide
|
|
||||||
|
|
||||||
> This section is optional. If there are no breaking changes, you can delete this section.
|
|
||||||
|
|
||||||
- If this PR is a breaking change (relative to the last release of Bevy), describe how a user might need to migrate their code to support these changes
|
|
||||||
- Simply adding new functionality is not a breaking change.
|
|
||||||
- Fixing behavior that was definitely a bug, rather than a questionable design choice is not a breaking change.
|
|
||||||
|
|||||||
54
.github/workflows/action-on-PR-labeled.yml
vendored
54
.github/workflows/action-on-PR-labeled.yml
vendored
@ -12,19 +12,63 @@ permissions:
|
|||||||
pull-requests: 'write'
|
pull-requests: 'write'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
comment-on-breaking-change-label:
|
comment-on-migration-guide-label:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event.label.name == 'M-Needs-Migration-Guide' && !contains(github.event.pull_request.body, '## Migration Guide')
|
if: github.event.label.name == 'M-Needs-Migration-Guide'
|
||||||
steps:
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
fetch-depth: 2
|
||||||
|
- name: Get changes
|
||||||
|
id: get_changes
|
||||||
|
shell: bash {0}
|
||||||
|
run: |
|
||||||
|
git fetch --depth=1 origin $BASE_SHA
|
||||||
|
git diff --exit-code $BASE_SHA $HEAD_SHA -- ./release-content/migration-guides
|
||||||
|
echo "found_changes=$?" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
||||||
|
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||||
- uses: actions/github-script@v7
|
- uses: actions/github-script@v7
|
||||||
|
if: steps.get_changes.outputs.found_changes == '0'
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
await github.rest.issues.createComment({
|
await github.rest.issues.createComment({
|
||||||
issue_number: context.issue.number,
|
issue_number: context.issue.number,
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
body: `It looks like your PR is a breaking change, but you didn't provide a migration guide.
|
body: `It looks like your PR is a breaking change, but **you didn't provide a migration guide**.
|
||||||
|
|
||||||
Could you add some context on what users should update when this change get released in a new version of Bevy?
|
Please review the [instructions for writing migration guides](https://github.com/bevyengine/bevy/tree/main/release-content/migration_guides.md), then expand or revise the content in the [migration guides directory](https://github.com/bevyengine/bevy/tree/main/release-content/migration-guides) to reflect your changes.`
|
||||||
It will be used to help writing the migration guide for the version. Putting it after a \`## Migration Guide\` will help it get automatically picked up by our tooling.`
|
})
|
||||||
|
comment-on-release-note-label:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.label.name == 'M-Needs-Release-Note'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
fetch-depth: 2
|
||||||
|
- name: Get changes
|
||||||
|
id: get_changes
|
||||||
|
shell: bash {0}
|
||||||
|
run: |
|
||||||
|
git fetch --depth=1 origin $BASE_SHA
|
||||||
|
git diff --exit-code $BASE_SHA $HEAD_SHA -- ./release-content/release-notes
|
||||||
|
echo "found_changes=$?" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
||||||
|
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||||
|
- uses: actions/github-script@v7
|
||||||
|
if: steps.get_changes.outputs.found_changes == '0'
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: `It looks like your PR has been selected for a highlight in the next release blog post, but **you didn't provide a release note**.
|
||||||
|
|
||||||
|
Please review the [instructions for writing release notes](https://github.com/bevyengine/bevy/tree/main/release-content/release_notes.md), then expand or revise the content in the [release notes directory](https://github.com/bevyengine/bevy/tree/main/release-content/release_notes) to showcase your changes.`
|
||||||
})
|
})
|
||||||
|
|||||||
106
.github/workflows/ci-comment-failures.yml
vendored
106
.github/workflows/ci-comment-failures.yml
vendored
@ -48,8 +48,21 @@ jobs:
|
|||||||
return "true"
|
return "true"
|
||||||
- run: unzip missing-examples.zip
|
- run: unzip missing-examples.zip
|
||||||
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
||||||
- name: 'Comment on PR'
|
- name: "Check if last comment is already from actions"
|
||||||
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
||||||
|
id: check-last-comment
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
PR=`cat ./NR`
|
||||||
|
if [[ `gh api --jq '.[-1].user.login' /repos/bevyengine/bevy/issues/$PR/comments` == 'github-actions[bot]' ]]
|
||||||
|
then
|
||||||
|
echo "result=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "result=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: "Comment on PR"
|
||||||
|
if: ${{ steps.find-artifact.outputs.result == 'true' && steps.check-last-comment.outputs.result == 'false' }}
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@ -106,8 +119,21 @@ jobs:
|
|||||||
return "true"
|
return "true"
|
||||||
- run: unzip missing-features.zip
|
- run: unzip missing-features.zip
|
||||||
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
||||||
- name: 'Comment on PR'
|
- name: "Check if last comment is already from actions"
|
||||||
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
||||||
|
id: check-last-comment
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
PR=`cat ./NR`
|
||||||
|
if [[ `gh api --jq '.[-1].user.login' /repos/bevyengine/bevy/issues/$PR/comments` == 'github-actions[bot]' ]]
|
||||||
|
then
|
||||||
|
echo "result=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "result=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: "Comment on PR"
|
||||||
|
if: ${{ steps.find-artifact.outputs.result == 'true' && steps.check-last-comment.outputs.result == 'false' }}
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@ -164,8 +190,21 @@ jobs:
|
|||||||
return "true"
|
return "true"
|
||||||
- run: unzip msrv.zip
|
- run: unzip msrv.zip
|
||||||
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
||||||
- name: 'Comment on PR'
|
- name: "Check if last comment is already from actions"
|
||||||
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
if: ${{ steps.find-artifact.outputs.result == 'true' }}
|
||||||
|
id: check-last-comment
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
PR=`cat ./NR`
|
||||||
|
if [[ `gh api --jq '.[-1].user.login' /repos/bevyengine/bevy/issues/$PR/comments` == 'github-actions[bot]' ]]
|
||||||
|
then
|
||||||
|
echo "result=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "result=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: "Comment on PR"
|
||||||
|
if: ${{ steps.find-artifact.outputs.result == 'true' && steps.check-last-comment.outputs.result == 'false' }}
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@ -178,64 +217,3 @@ jobs:
|
|||||||
issue_number: issue_number,
|
issue_number: issue_number,
|
||||||
body: 'Your PR increases Bevy Minimum Supported Rust Version. Please update the `rust-version` field in the root Cargo.toml file.'
|
body: 'Your PR increases Bevy Minimum Supported Rust Version. Please update the `rust-version` field in the root Cargo.toml file.'
|
||||||
});
|
});
|
||||||
|
|
||||||
make-macos-screenshots-available:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
outputs:
|
|
||||||
branch-name: ${{ steps.branch-name.outputs.result }}
|
|
||||||
steps:
|
|
||||||
- name: 'Download artifact'
|
|
||||||
id: find-artifact
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
result-encoding: string
|
|
||||||
script: |
|
|
||||||
var artifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
run_id: ${{github.event.workflow_run.id }},
|
|
||||||
});
|
|
||||||
var matchArtifacts = artifacts.data.artifacts.filter((artifact) => {
|
|
||||||
return artifact.name == "screenshots-macos"
|
|
||||||
});
|
|
||||||
if (matchArtifacts.length == 0) { return "false" }
|
|
||||||
var matchArtifact = matchArtifacts[0];
|
|
||||||
var download = await github.rest.actions.downloadArtifact({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
artifact_id: matchArtifact.id,
|
|
||||||
archive_format: 'zip',
|
|
||||||
});
|
|
||||||
var fs = require('fs');
|
|
||||||
fs.writeFileSync('${{github.workspace}}/screenshots-macos.zip', Buffer.from(download.data));
|
|
||||||
return "true"
|
|
||||||
- name: prepare artifact folder
|
|
||||||
run: |
|
|
||||||
unzip screenshots-macos.zip
|
|
||||||
mkdir screenshots
|
|
||||||
mv screenshots-* screenshots/
|
|
||||||
- name: save screenshots
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: screenshots-macos
|
|
||||||
path: screenshots
|
|
||||||
- name: branch name
|
|
||||||
id: branch-name
|
|
||||||
run: |
|
|
||||||
if [ -f PR ]; then
|
|
||||||
echo "result=PR-$(cat PR)-${{ github.event.workflow_run.head_branch }}" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "result=${{ github.event.workflow_run.head_branch }}" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
compare-macos-screenshots:
|
|
||||||
name: Compare macOS screenshots
|
|
||||||
needs: [make-macos-screenshots-available]
|
|
||||||
uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml
|
|
||||||
with:
|
|
||||||
commit: ${{ github.event.workflow_run.head_sha }}
|
|
||||||
branch: ${{ needs.make-macos-screenshots-available.outputs.branch-name }}
|
|
||||||
artifact: screenshots-macos
|
|
||||||
os: macos
|
|
||||||
secrets: inherit
|
|
||||||
|
|||||||
104
.github/workflows/ci.yml
vendored
104
.github/workflows/ci.yml
vendored
@ -5,7 +5,6 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
|
||||||
- release-*
|
- release-*
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@ -152,7 +151,56 @@ jobs:
|
|||||||
- name: Install Linux dependencies
|
- name: Install Linux dependencies
|
||||||
uses: ./.github/actions/install-linux-deps
|
uses: ./.github/actions/install-linux-deps
|
||||||
- name: Check Compile
|
- name: Check Compile
|
||||||
run: cargo run -p ci -- compile-check-no-std
|
run: cargo check -p bevy --no-default-features --features default_no_std --target x86_64-unknown-none
|
||||||
|
check-compiles-no-std-portable-atomic:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
needs: ci
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
crates/bevy_ecs_compile_fail_tests/target/
|
||||||
|
crates/bevy_reflect_compile_fail_tests/target/
|
||||||
|
key: ${{ runner.os }}-cargo-check-compiles-no-std-portable-atomic-${{ hashFiles('**/Cargo.toml') }}
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
targets: thumbv6m-none-eabi
|
||||||
|
- name: Install Linux dependencies
|
||||||
|
uses: ./.github/actions/install-linux-deps
|
||||||
|
- name: Check Compile
|
||||||
|
run: cargo check -p bevy --no-default-features --features default_no_std --target thumbv6m-none-eabi
|
||||||
|
|
||||||
|
check-compiles-no-std-examples:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
needs: ci
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
crates/bevy_ecs_compile_fail_tests/target/
|
||||||
|
crates/bevy_reflect_compile_fail_tests/target/
|
||||||
|
key: ${{ runner.os }}-cargo-check-compiles-no-std-examples-${{ hashFiles('**/Cargo.toml') }}
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
|
with:
|
||||||
|
targets: x86_64-unknown-none
|
||||||
|
- name: Install Linux dependencies
|
||||||
|
uses: ./.github/actions/install-linux-deps
|
||||||
|
- name: Check Compile
|
||||||
|
run: cd examples/no_std/library && cargo check --no-default-features --features libm,critical-section --target x86_64-unknown-none
|
||||||
|
|
||||||
build-wasm:
|
build-wasm:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -211,7 +259,7 @@ jobs:
|
|||||||
# Full git history is needed to get a proper list of changed files within `super-linter`
|
# Full git history is needed to get a proper list of changed files within `super-linter`
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Run Markdown Lint
|
- name: Run Markdown Lint
|
||||||
uses: docker://ghcr.io/github/super-linter:slim-v4
|
uses: super-linter/super-linter/slim@v7.3.0
|
||||||
env:
|
env:
|
||||||
MULTI_STATUS: false
|
MULTI_STATUS: false
|
||||||
VALIDATE_ALL_CODEBASE: false
|
VALIDATE_ALL_CODEBASE: false
|
||||||
@ -244,7 +292,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Check for typos
|
- name: Check for typos
|
||||||
uses: crate-ci/typos@v1.29.5
|
uses: crate-ci/typos@v1.31.1
|
||||||
- name: Typos info
|
- name: Typos info
|
||||||
if: failure()
|
if: failure()
|
||||||
run: |
|
run: |
|
||||||
@ -254,49 +302,6 @@ jobs:
|
|||||||
echo 'if you use VSCode, you can also install `Typos Spell Checker'
|
echo 'if you use VSCode, you can also install `Typos Spell Checker'
|
||||||
echo 'You can find the extension here: https://marketplace.visualstudio.com/items?itemName=tekumara.typos-vscode'
|
echo 'You can find the extension here: https://marketplace.visualstudio.com/items?itemName=tekumara.typos-vscode'
|
||||||
|
|
||||||
run-examples-macos-metal:
|
|
||||||
runs-on: macos-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
|
||||||
- name: Disable audio
|
|
||||||
# Disable audio through a patch. on github m1 runners, audio timeouts after 15 minutes
|
|
||||||
run: git apply --ignore-whitespace tools/example-showcase/disable-audio.patch
|
|
||||||
- name: Run examples
|
|
||||||
run: |
|
|
||||||
for example in .github/example-run/*.ron; do
|
|
||||||
example_name=`basename $example .ron`
|
|
||||||
echo -n $example_name > last_example_run
|
|
||||||
echo "running $example_name - "`date`
|
|
||||||
time TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
|
||||||
sleep 10
|
|
||||||
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
|
||||||
mkdir screenshots-$example_name
|
|
||||||
mv screenshot-*.png screenshots-$example_name/
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
mkdir traces && mv trace*.json traces/
|
|
||||||
mkdir screenshots && mv screenshots-* screenshots/
|
|
||||||
- name: save traces
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: example-traces-macos
|
|
||||||
path: traces
|
|
||||||
- name: Save PR number
|
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
|
||||||
run: |
|
|
||||||
echo ${{ github.event.number }} > ./screenshots/PR
|
|
||||||
- name: save screenshots
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: screenshots-macos
|
|
||||||
path: screenshots
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
if: ${{ failure() && github.event_name == 'pull_request' }}
|
|
||||||
with:
|
|
||||||
name: example-run-macos
|
|
||||||
path: example-run/
|
|
||||||
check-doc:
|
check-doc:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@ -335,6 +340,7 @@ jobs:
|
|||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
- name: check for missing metadata
|
- name: check for missing metadata
|
||||||
id: missing-metadata
|
id: missing-metadata
|
||||||
run: cargo run -p build-templated-pages -- check-missing examples
|
run: cargo run -p build-templated-pages -- check-missing examples
|
||||||
@ -369,6 +375,7 @@ jobs:
|
|||||||
needs: check-missing-examples-in-docs
|
needs: check-missing-examples-in-docs
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
- name: check for missing features
|
- name: check for missing features
|
||||||
id: missing-features
|
id: missing-features
|
||||||
run: cargo run -p build-templated-pages -- check-missing features
|
run: cargo run -p build-templated-pages -- check-missing features
|
||||||
@ -412,6 +419,7 @@ jobs:
|
|||||||
~/.cargo/git/db/
|
~/.cargo/git/db/
|
||||||
target/
|
target/
|
||||||
key: ${{ runner.os }}-cargo-msrv-${{ hashFiles('**/Cargo.toml') }}
|
key: ${{ runner.os }}-cargo-msrv-${{ hashFiles('**/Cargo.toml') }}
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
- name: get MSRV
|
- name: get MSRV
|
||||||
id: msrv
|
id: msrv
|
||||||
run: |
|
run: |
|
||||||
@ -445,7 +453,7 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
errors=""
|
errors=""
|
||||||
for file in $(find examples tests -name '*.rs'); do
|
for file in $(find examples tests -name '*.rs' -not -path 'examples/mobile/*'); do
|
||||||
if grep -q "use bevy_" "$file"; then
|
if grep -q "use bevy_" "$file"; then
|
||||||
errors+="ERROR: Detected internal Bevy import in $file\n"
|
errors+="ERROR: Detected internal Bevy import in $file\n"
|
||||||
fi
|
fi
|
||||||
|
|||||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@ -59,7 +59,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
# needs to be in sync with [package.metadata.docs.rs]
|
# needs to be in sync with [package.metadata.docs.rs]
|
||||||
RUSTFLAGS: --cfg docsrs_dep
|
RUSTFLAGS: --cfg docsrs_dep
|
||||||
RUSTDOCFLAGS: -Zunstable-options --cfg=docsrs --generate-link-to-definition
|
RUSTDOCFLAGS: -Zunstable-options --cfg=docsrs --generate-link-to-definition --html-after-content docs-rs/trait-tags.html
|
||||||
run: |
|
run: |
|
||||||
cargo doc \
|
cargo doc \
|
||||||
-Zunstable-options \
|
-Zunstable-options \
|
||||||
|
|||||||
120
.github/workflows/example-run-report.yml
vendored
Normal file
120
.github/workflows/example-run-report.yml
vendored
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
name: Example Run - PR Comments
|
||||||
|
|
||||||
|
# This workflow has write permissions on the repo
|
||||||
|
# It must not checkout a PR and run untrusted code
|
||||||
|
|
||||||
|
# Also requesting write permissions on PR to be able to comment
|
||||||
|
permissions:
|
||||||
|
pull-requests: "write"
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["Example Run"]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
make-macos-screenshots-available:
|
||||||
|
if: github.event.workflow_run.event == 'pull_request'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
outputs:
|
||||||
|
branch-name: ${{ steps.branch-name.outputs.result }}
|
||||||
|
pr-number: ${{ steps.pr-number.outputs.result }}
|
||||||
|
steps:
|
||||||
|
- name: "Download artifact"
|
||||||
|
id: find-artifact
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
result-encoding: string
|
||||||
|
script: |
|
||||||
|
var artifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
run_id: ${{github.event.workflow_run.id }},
|
||||||
|
});
|
||||||
|
var matchArtifacts = artifacts.data.artifacts.filter((artifact) => {
|
||||||
|
return artifact.name == "screenshots-macos"
|
||||||
|
});
|
||||||
|
if (matchArtifacts.length == 0) { return "false" }
|
||||||
|
var matchArtifact = matchArtifacts[0];
|
||||||
|
var download = await github.rest.actions.downloadArtifact({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
artifact_id: matchArtifact.id,
|
||||||
|
archive_format: 'zip',
|
||||||
|
});
|
||||||
|
var fs = require('fs');
|
||||||
|
fs.writeFileSync('${{github.workspace}}/screenshots-macos.zip', Buffer.from(download.data));
|
||||||
|
return "true"
|
||||||
|
- name: prepare artifact folder
|
||||||
|
run: |
|
||||||
|
unzip screenshots-macos.zip
|
||||||
|
mkdir screenshots
|
||||||
|
mv screenshots-* screenshots/
|
||||||
|
- name: save screenshots
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: screenshots-macos
|
||||||
|
path: screenshots
|
||||||
|
- name: branch name
|
||||||
|
id: branch-name
|
||||||
|
run: |
|
||||||
|
echo "result=PR-$(cat PR)-${{ github.event.workflow_run.head_branch }}" >> $GITHUB_OUTPUT
|
||||||
|
- name: PR number
|
||||||
|
id: pr-number
|
||||||
|
run: |
|
||||||
|
echo "result=$(cat PR)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
compare-macos-screenshots:
|
||||||
|
name: Compare macOS screenshots
|
||||||
|
needs: [make-macos-screenshots-available]
|
||||||
|
uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml
|
||||||
|
with:
|
||||||
|
commit: ${{ github.event.workflow_run.head_sha }}
|
||||||
|
branch: ${{ needs.make-macos-screenshots-available.outputs.branch-name }}
|
||||||
|
artifact: screenshots-macos
|
||||||
|
os: macos
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
comment-on-pr:
|
||||||
|
name: Comment on PR
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [make-macos-screenshots-available, compare-macos-screenshots]
|
||||||
|
if: ${{ always() && needs.compare-macos-screenshots.result == 'failure' }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: "Check if PR already has label"
|
||||||
|
id: check-label
|
||||||
|
env:
|
||||||
|
PR: ${{ needs.make-macos-screenshots-available.outputs.pr-number }}
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
if [[ `gh api --jq '.labels.[].name' /repos/bevyengine/bevy/pulls/$PR` =~ "M-Deliberate-Rendering-Change" ]]
|
||||||
|
then
|
||||||
|
echo "result=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "result=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: "Check if last comment is already from actions"
|
||||||
|
id: check-last-comment
|
||||||
|
env:
|
||||||
|
PR: ${{ needs.make-macos-screenshots-available.outputs.pr-number }}
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
if [[ `gh api --jq '.[-1].user.login' /repos/bevyengine/bevy/issues/$PR/comments` == 'github-actions[bot' ]]
|
||||||
|
then
|
||||||
|
echo "result=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "result=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: "Comment on PR"
|
||||||
|
if: ${{ steps.check-label.outputs.result == 'false' && steps.check-last-comment.outputs.result == 'false' }}
|
||||||
|
env:
|
||||||
|
PROJECT: B04F67C0-C054-4A6F-92EC-F599FEC2FD1D
|
||||||
|
PR: ${{ needs.make-macos-screenshots-available.outputs.pr-number }}
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
LF=$'\n'
|
||||||
|
COMMENT_BODY="Your PR caused a change in the graphical output of an example or rendering test. This might be intentional, but it could also mean that something broke! ${LF}You can review it at https://pixel-eagle.com/project/$PROJECT?filter=PR-$PR ${LF} ${LF}If it's expected, please add the M-Deliberate-Rendering-Change label. ${LF} ${LF}If this change seems unrelated to your PR, you can consider updating your PR to target the latest main branch, either by rebasing or merging main into it."
|
||||||
|
gh issue comment $PR --body "$COMMENT_BODY"
|
||||||
187
.github/workflows/example-run.yml
vendored
Normal file
187
.github/workflows/example-run.yml
vendored
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
name: Example Run
|
||||||
|
|
||||||
|
on:
|
||||||
|
merge_group:
|
||||||
|
pull_request:
|
||||||
|
# also run when pushed to main to update reference screenshots
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
CARGO_PROFILE_TEST_DEBUG: 0
|
||||||
|
CARGO_PROFILE_DEV_DEBUG: 0
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run-examples-macos-metal:
|
||||||
|
runs-on: macos-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
|
- name: Disable audio
|
||||||
|
# Disable audio through a patch. on github m1 runners, audio timeouts after 15 minutes
|
||||||
|
run: git apply --ignore-whitespace tools/example-showcase/disable-audio.patch
|
||||||
|
- name: Run examples
|
||||||
|
run: |
|
||||||
|
for example in .github/example-run/*.ron; do
|
||||||
|
example_name=`basename $example .ron`
|
||||||
|
echo -n $example_name > last_example_run
|
||||||
|
echo "running $example_name - "`date`
|
||||||
|
time TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
||||||
|
sleep 10
|
||||||
|
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
||||||
|
mkdir screenshots-$example_name
|
||||||
|
mv screenshot-*.png screenshots-$example_name/
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
mkdir traces && mv trace*.json traces/
|
||||||
|
mkdir screenshots && mv screenshots-* screenshots/
|
||||||
|
- name: save traces
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: example-traces-macos
|
||||||
|
path: traces
|
||||||
|
- name: Save PR number
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: |
|
||||||
|
echo ${{ github.event.number }} > ./screenshots/PR
|
||||||
|
- name: save screenshots
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: screenshots-macos
|
||||||
|
path: screenshots
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
if: ${{ failure() && github.event_name == 'pull_request' }}
|
||||||
|
with:
|
||||||
|
name: example-run-macos
|
||||||
|
path: example-run/
|
||||||
|
|
||||||
|
compare-macos-screenshots:
|
||||||
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
|
name: Compare Macos screenshots
|
||||||
|
needs: [run-examples-macos-metal]
|
||||||
|
uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml
|
||||||
|
with:
|
||||||
|
commit: ${{ github.sha }}
|
||||||
|
branch: ${{ github.ref_name }}
|
||||||
|
artifact: screenshots-macos
|
||||||
|
os: macos
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
run-examples-linux-vulkan:
|
||||||
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Install Linux dependencies
|
||||||
|
uses: ./.github/actions/install-linux-deps
|
||||||
|
# At some point this may be merged into `install-linux-deps`, but for now it is its own step.
|
||||||
|
- name: Install additional Linux dependencies for Vulkan
|
||||||
|
run: |
|
||||||
|
sudo add-apt-repository ppa:kisak/turtle -y
|
||||||
|
sudo apt-get install --no-install-recommends libxkbcommon-x11-0 xvfb libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
|
||||||
|
- uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/
|
||||||
|
~/.cargo/registry/index/
|
||||||
|
~/.cargo/registry/cache/
|
||||||
|
~/.cargo/git/db/
|
||||||
|
target/
|
||||||
|
key: ${{ runner.os }}-cargo-run-examples-${{ hashFiles('**/Cargo.toml') }}
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
|
- name: Run examples
|
||||||
|
run: |
|
||||||
|
for example in .github/example-run/*.ron; do
|
||||||
|
example_name=`basename $example .ron`
|
||||||
|
echo -n $example_name > last_example_run
|
||||||
|
echo "running $example_name - "`date`
|
||||||
|
time TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example xvfb-run cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
||||||
|
sleep 10
|
||||||
|
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
||||||
|
mkdir screenshots-$example_name
|
||||||
|
mv screenshot-*.png screenshots-$example_name/
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
mkdir traces && mv trace*.json traces/
|
||||||
|
mkdir screenshots && mv screenshots-* screenshots/
|
||||||
|
- name: save traces
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: example-traces-linux
|
||||||
|
path: traces
|
||||||
|
- name: save screenshots
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: screenshots-linux
|
||||||
|
path: screenshots
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
if: ${{ failure() && github.event_name == 'pull_request' }}
|
||||||
|
with:
|
||||||
|
name: example-run-linux
|
||||||
|
path: example-run/
|
||||||
|
|
||||||
|
compare-linux-screenshots:
|
||||||
|
name: Compare Linux screenshots
|
||||||
|
needs: [run-examples-linux-vulkan]
|
||||||
|
uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml
|
||||||
|
with:
|
||||||
|
commit: ${{ github.sha }}
|
||||||
|
branch: ${{ github.ref_name }}
|
||||||
|
artifact: screenshots-linux
|
||||||
|
os: linux
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
run-examples-on-windows-dx12:
|
||||||
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
|
runs-on: windows-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: dtolnay/rust-toolchain@stable
|
||||||
|
- name: Run examples
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
for example in .github/example-run/*.ron; do
|
||||||
|
example_name=`basename $example .ron`
|
||||||
|
echo -n $example_name > last_example_run
|
||||||
|
echo "running $example_name - "`date`
|
||||||
|
time WGPU_BACKEND=dx12 TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example cargo run --example $example_name --features "statically-linked-dxc,bevy_ci_testing,trace,trace_chrome"
|
||||||
|
sleep 10
|
||||||
|
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
||||||
|
mkdir screenshots-$example_name
|
||||||
|
mv screenshot-*.png screenshots-$example_name/
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
mkdir traces && mv trace*.json traces/
|
||||||
|
mkdir screenshots && mv screenshots-* screenshots/
|
||||||
|
- name: save traces
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: example-traces-windows
|
||||||
|
path: traces
|
||||||
|
- name: save screenshots
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: screenshots-windows
|
||||||
|
path: screenshots
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
if: ${{ failure() && github.event_name == 'pull_request' }}
|
||||||
|
with:
|
||||||
|
name: example-run-windows
|
||||||
|
path: example-run/
|
||||||
|
|
||||||
|
compare-windows-screenshots:
|
||||||
|
name: Compare Windows screenshots
|
||||||
|
needs: [run-examples-on-windows-dx12]
|
||||||
|
uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml
|
||||||
|
with:
|
||||||
|
commit: ${{ github.sha }}
|
||||||
|
branch: ${{ github.ref_name }}
|
||||||
|
artifact: screenshots-windows
|
||||||
|
os: windows
|
||||||
|
secrets: inherit
|
||||||
@ -48,7 +48,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Create a new run with its associated metadata
|
# Create a new run with its associated metadata
|
||||||
metadata='{"os":"${{ inputs.os }}", "commit": "${{ inputs.commit }}", "branch": "${{ inputs.branch }}"}'
|
metadata='{"os":"${{ inputs.os }}", "commit": "${{ inputs.commit }}", "branch": "${{ inputs.branch }}"}'
|
||||||
run=`curl https://pixel-eagle.vleue.com/$project/runs --json "$metadata" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} | jq '.id'`
|
run=`curl https://pixel-eagle.com/$project/runs --json "$metadata" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} | jq '.id'`
|
||||||
|
|
||||||
SAVEIFS=$IFS
|
SAVEIFS=$IFS
|
||||||
|
|
||||||
@ -71,11 +71,11 @@ jobs:
|
|||||||
IFS=$SAVEIFS
|
IFS=$SAVEIFS
|
||||||
|
|
||||||
# Upload screenshots with unknown hashes
|
# Upload screenshots with unknown hashes
|
||||||
curl https://pixel-eagle.vleue.com/$project/runs/$run/hashes --json "$hashes" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} | jq '.[]|[.name] | @tsv' |
|
curl https://pixel-eagle.com/$project/runs/$run/hashes --json "$hashes" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} | jq '.[]|[.name] | @tsv' |
|
||||||
while IFS=$'\t' read -r name; do
|
while IFS=$'\t' read -r name; do
|
||||||
name=`echo $name | tr -d '"'`
|
name=`echo $name | tr -d '"'`
|
||||||
echo "Uploading $name"
|
echo "Uploading $name"
|
||||||
curl https://pixel-eagle.vleue.com/$project/runs/$run/screenshots -F "data=@./screenshots-$name" -F "screenshot=$name" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }}
|
curl https://pixel-eagle.com/$project/runs/$run/screenshots -F "data=@./screenshots-$name" -F "screenshot=$name" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }}
|
||||||
echo
|
echo
|
||||||
done
|
done
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ jobs:
|
|||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
# Trigger comparison with the main branch on the same os
|
# Trigger comparison with the main branch on the same os
|
||||||
curl https://pixel-eagle.vleue.com/$project/runs/$run/compare/auto --json '{"os":"<equal>", "branch": "main"}' --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} > pixeleagle.json
|
curl https://pixel-eagle.com/$project/runs/$run/compare/auto --json '{"os":"<equal>", "branch": "main"}' --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} > pixeleagle.json
|
||||||
|
|
||||||
# Log results
|
# Log results
|
||||||
compared_with=`cat pixeleagle.json | jq '.to'`
|
compared_with=`cat pixeleagle.json | jq '.to'`
|
||||||
|
|||||||
114
.github/workflows/validation-jobs.yml
vendored
114
.github/workflows/validation-jobs.yml
vendored
@ -5,7 +5,6 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
|
||||||
- release-*
|
- release-*
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
@ -80,112 +79,6 @@ jobs:
|
|||||||
- name: Build app for Android
|
- name: Build app for Android
|
||||||
run: cd examples/mobile/android_example && chmod +x gradlew && ./gradlew build
|
run: cd examples/mobile/android_example && chmod +x gradlew && ./gradlew build
|
||||||
|
|
||||||
run-examples-linux-vulkan:
|
|
||||||
# also run when pushed to main to update reference screenshots
|
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Install Linux dependencies
|
|
||||||
uses: ./.github/actions/install-linux-deps
|
|
||||||
# At some point this may be merged into `install-linux-deps`, but for now it is its own step.
|
|
||||||
- name: Install additional Linux dependencies for Vulkan
|
|
||||||
run: |
|
|
||||||
sudo add-apt-repository ppa:kisak/turtle -y
|
|
||||||
sudo apt-get install --no-install-recommends libxkbcommon-x11-0 xvfb libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
|
|
||||||
- uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.cargo/bin/
|
|
||||||
~/.cargo/registry/index/
|
|
||||||
~/.cargo/registry/cache/
|
|
||||||
~/.cargo/git/db/
|
|
||||||
target/
|
|
||||||
key: ${{ runner.os }}-cargo-run-examples-${{ hashFiles('**/Cargo.toml') }}
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
|
||||||
- name: Run examples
|
|
||||||
run: |
|
|
||||||
for example in .github/example-run/*.ron; do
|
|
||||||
example_name=`basename $example .ron`
|
|
||||||
echo -n $example_name > last_example_run
|
|
||||||
echo "running $example_name - "`date`
|
|
||||||
time TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example xvfb-run cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
|
||||||
sleep 10
|
|
||||||
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
|
||||||
mkdir screenshots-$example_name
|
|
||||||
mv screenshot-*.png screenshots-$example_name/
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
mkdir traces && mv trace*.json traces/
|
|
||||||
mkdir screenshots && mv screenshots-* screenshots/
|
|
||||||
- name: save traces
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: example-traces-linux
|
|
||||||
path: traces
|
|
||||||
- name: save screenshots
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: screenshots-linux
|
|
||||||
path: screenshots
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
if: ${{ failure() && github.event_name == 'pull_request' }}
|
|
||||||
with:
|
|
||||||
name: example-run-linux
|
|
||||||
path: example-run/
|
|
||||||
|
|
||||||
compare-linux-screenshots:
|
|
||||||
name: Compare Linux screenshots
|
|
||||||
needs: [run-examples-linux-vulkan]
|
|
||||||
uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml
|
|
||||||
with:
|
|
||||||
commit: ${{ github.sha }}
|
|
||||||
branch: ${{ github.ref_name }}
|
|
||||||
artifact: screenshots-linux
|
|
||||||
os: linux
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
run-examples-on-windows-dx12:
|
|
||||||
# also run when pushed to main to update reference screenshots
|
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
|
||||||
runs-on: windows-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
|
||||||
- name: Run examples
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
for example in .github/example-run/*.ron; do
|
|
||||||
example_name=`basename $example .ron`
|
|
||||||
echo -n $example_name > last_example_run
|
|
||||||
echo "running $example_name - "`date`
|
|
||||||
time WGPU_BACKEND=dx12 TRACE_CHROME=trace-$example_name.json CI_TESTING_CONFIG=$example cargo run --example $example_name --features "bevy_ci_testing,trace,trace_chrome"
|
|
||||||
sleep 10
|
|
||||||
if [ `find ./ -maxdepth 1 -name 'screenshot-*.png' -print -quit` ]; then
|
|
||||||
mkdir screenshots-$example_name
|
|
||||||
mv screenshot-*.png screenshots-$example_name/
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
mkdir traces && mv trace*.json traces/
|
|
||||||
mkdir screenshots && mv screenshots-* screenshots/
|
|
||||||
- name: save traces
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: example-traces-windows
|
|
||||||
path: traces
|
|
||||||
- name: save screenshots
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: screenshots-windows
|
|
||||||
path: screenshots
|
|
||||||
- uses: actions/upload-artifact@v4
|
|
||||||
if: ${{ failure() && github.event_name == 'pull_request' }}
|
|
||||||
with:
|
|
||||||
name: example-run-windows
|
|
||||||
path: example-run/
|
|
||||||
|
|
||||||
run-examples-on-wasm:
|
run-examples-on-wasm:
|
||||||
if: ${{ github.event_name == 'merge_group' }}
|
if: ${{ github.event_name == 'merge_group' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -208,13 +101,6 @@ jobs:
|
|||||||
target/
|
target/
|
||||||
key: ${{ runner.os }}-wasm-run-examples-${{ hashFiles('**/Cargo.toml') }}
|
key: ${{ runner.os }}-wasm-run-examples-${{ hashFiles('**/Cargo.toml') }}
|
||||||
|
|
||||||
- name: install xvfb, llvmpipe and lavapipe
|
|
||||||
run: |
|
|
||||||
sudo apt-get update -y -qq
|
|
||||||
sudo add-apt-repository ppa:kisak/turtle -y
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt install -y xvfb libgl1-mesa-dri libxcb-xfixes0-dev mesa-vulkan-drivers
|
|
||||||
|
|
||||||
- name: Install wasm-bindgen
|
- name: Install wasm-bindgen
|
||||||
run: cargo install --force wasm-bindgen-cli
|
run: cargo install --force wasm-bindgen-cli
|
||||||
|
|
||||||
|
|||||||
30
.github/workflows/weekly.yml
vendored
30
.github/workflows/weekly.yml
vendored
@ -12,6 +12,7 @@ env:
|
|||||||
CARGO_INCREMENTAL: 0
|
CARGO_INCREMENTAL: 0
|
||||||
CARGO_PROFILE_TEST_DEBUG: 0
|
CARGO_PROFILE_TEST_DEBUG: 0
|
||||||
CARGO_PROFILE_DEV_DEBUG: 0
|
CARGO_PROFILE_DEV_DEBUG: 0
|
||||||
|
ISSUE_TITLE: Main branch fails to compile on Rust beta.
|
||||||
|
|
||||||
# The jobs listed here are intentionally skipped when running on forks, for a number of reasons:
|
# The jobs listed here are intentionally skipped when running on forks, for a number of reasons:
|
||||||
#
|
#
|
||||||
@ -82,6 +83,30 @@ jobs:
|
|||||||
# See tools/ci/src/main.rs for the commands this runs
|
# See tools/ci/src/main.rs for the commands this runs
|
||||||
run: cargo run -p ci -- compile
|
run: cargo run -p ci -- compile
|
||||||
|
|
||||||
|
close-any-open-issues:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: ['test', 'lint', 'check-compiles']
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
steps:
|
||||||
|
- name: Close issues
|
||||||
|
run: |
|
||||||
|
previous_issue_number=$(gh issue list \
|
||||||
|
--search "$ISSUE_TITLE in:title" \
|
||||||
|
--json number \
|
||||||
|
--jq '.[0].number')
|
||||||
|
if [[ -n $previous_issue_number ]]; then
|
||||||
|
gh issue close $previous_issue_number \
|
||||||
|
-r completed \
|
||||||
|
-c $COMMENT
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GH_REPO: ${{ github.repository }}
|
||||||
|
COMMENT: |
|
||||||
|
[Last pipeline run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) successfully completed. Closing issue.
|
||||||
|
|
||||||
|
|
||||||
open-issue:
|
open-issue:
|
||||||
name: Warn that weekly CI fails
|
name: Warn that weekly CI fails
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -95,7 +120,7 @@ jobs:
|
|||||||
- name: Create issue
|
- name: Create issue
|
||||||
run: |
|
run: |
|
||||||
previous_issue_number=$(gh issue list \
|
previous_issue_number=$(gh issue list \
|
||||||
--search "$TITLE in:title" \
|
--search "$ISSUE_TITLE in:title" \
|
||||||
--json number \
|
--json number \
|
||||||
--jq '.[0].number')
|
--jq '.[0].number')
|
||||||
if [[ -n $previous_issue_number ]]; then
|
if [[ -n $previous_issue_number ]]; then
|
||||||
@ -103,14 +128,13 @@ jobs:
|
|||||||
--body "Weekly pipeline still fails: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
--body "Weekly pipeline still fails: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||||
else
|
else
|
||||||
gh issue create \
|
gh issue create \
|
||||||
--title "$TITLE" \
|
--title "$ISSUE_TITLE" \
|
||||||
--label "$LABELS" \
|
--label "$LABELS" \
|
||||||
--body "$BODY"
|
--body "$BODY"
|
||||||
fi
|
fi
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
GH_REPO: ${{ github.repository }}
|
GH_REPO: ${{ github.repository }}
|
||||||
TITLE: Main branch fails to compile on Rust beta.
|
|
||||||
LABELS: C-Bug,S-Needs-Triage
|
LABELS: C-Bug,S-Needs-Triage
|
||||||
BODY: |
|
BODY: |
|
||||||
## Weekly CI run has failed.
|
## Weekly CI run has failed.
|
||||||
|
|||||||
@ -20,8 +20,8 @@
|
|||||||
* Cake from [Kenney's Food Kit](https://www.kenney.nl/assets/food-kit) (CC0 1.0 Universal)
|
* Cake from [Kenney's Food Kit](https://www.kenney.nl/assets/food-kit) (CC0 1.0 Universal)
|
||||||
* Ground tile from [Kenney's Tower Defense Kit](https://www.kenney.nl/assets/tower-defense-kit) (CC0 1.0 Universal)
|
* Ground tile from [Kenney's Tower Defense Kit](https://www.kenney.nl/assets/tower-defense-kit) (CC0 1.0 Universal)
|
||||||
* Game icons from [Kenney's Game Icons](https://www.kenney.nl/assets/game-icons) (CC0 1.0 Universal)
|
* Game icons from [Kenney's Game Icons](https://www.kenney.nl/assets/game-icons) (CC0 1.0 Universal)
|
||||||
* Space ships from [Kenny's Simple Space Kit](https://www.kenney.nl/assets/simple-space) (CC0 1.0 Universal)
|
* Space ships from [Kenney's Simple Space Kit](https://www.kenney.nl/assets/simple-space) (CC0 1.0 Universal)
|
||||||
* UI borders from [Kenny's Fantasy UI Borders Kit](https://kenney.nl/assets/fantasy-ui-borders) (CC0 1.0 Universal)
|
* UI borders from [Kenney's Fantasy UI Borders Kit](https://kenney.nl/assets/fantasy-ui-borders) (CC0 1.0 Universal)
|
||||||
* glTF animated fox from [glTF Sample Models][fox]
|
* glTF animated fox from [glTF Sample Models][fox]
|
||||||
* Low poly fox [by PixelMannen] (CC0 1.0 Universal)
|
* Low poly fox [by PixelMannen] (CC0 1.0 Universal)
|
||||||
* Rigging and animation [by @tomkranis on Sketchfab] ([CC-BY 4.0])
|
* Rigging and animation [by @tomkranis on Sketchfab] ([CC-BY 4.0])
|
||||||
@ -32,7 +32,7 @@
|
|||||||
* Epic orchestra music sample, modified to loop, from [Migfus20](https://freesound.org/people/Migfus20/sounds/560449/) ([CC BY 4.0 DEED](https://creativecommons.org/licenses/by/4.0/))
|
* Epic orchestra music sample, modified to loop, from [Migfus20](https://freesound.org/people/Migfus20/sounds/560449/) ([CC BY 4.0 DEED](https://creativecommons.org/licenses/by/4.0/))
|
||||||
|
|
||||||
[MorphStressTest]: https://github.com/KhronosGroup/glTF-Sample-Models/tree/master/2.0/MorphStressTest
|
[MorphStressTest]: https://github.com/KhronosGroup/glTF-Sample-Models/tree/master/2.0/MorphStressTest
|
||||||
[fox]: https://github.com/KhronosGroup/glTF-Sample-Models/tree/master/2.0/Fox
|
[fox]: https://github.com/KhronosGroup/glTF-Sample-Assets/tree/main/Models/Fox
|
||||||
[by PixelMannen]: https://opengameart.org/content/fox-and-shiba
|
[by PixelMannen]: https://opengameart.org/content/fox-and-shiba
|
||||||
[by @tomkranis on Sketchfab]: https://sketchfab.com/models/371dea88d7e04a76af5763f2a36866bc
|
[by @tomkranis on Sketchfab]: https://sketchfab.com/models/371dea88d7e04a76af5763f2a36866bc
|
||||||
[CC-BY 4.0]: https://creativecommons.org/licenses/by/4.0/
|
[CC-BY 4.0]: https://creativecommons.org/licenses/by/4.0/
|
||||||
|
|||||||
189
Cargo.toml
189
Cargo.toml
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bevy"
|
name = "bevy"
|
||||||
version = "0.16.0-dev"
|
version = "0.16.0-dev"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
categories = ["game-engines", "graphics", "gui", "rendering"]
|
categories = ["game-engines", "graphics", "gui", "rendering"]
|
||||||
description = "A refreshingly simple data-driven game engine and app framework"
|
description = "A refreshingly simple data-driven game engine and app framework"
|
||||||
exclude = ["assets/", "tools/", ".github/", "crates/", "examples/wasm/assets/"]
|
exclude = ["assets/", "tools/", ".github/", "crates/", "examples/wasm/assets/"]
|
||||||
@ -10,7 +10,7 @@ keywords = ["game", "engine", "gamedev", "graphics", "bevy"]
|
|||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
repository = "https://github.com/bevyengine/bevy"
|
repository = "https://github.com/bevyengine/bevy"
|
||||||
documentation = "https://docs.rs/bevy"
|
documentation = "https://docs.rs/bevy"
|
||||||
rust-version = "1.83.0"
|
rust-version = "1.85.0"
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
@ -19,9 +19,15 @@ members = [
|
|||||||
"crates/*",
|
"crates/*",
|
||||||
# Several crates with macros have "compile fail" tests nested inside them, also known as UI
|
# Several crates with macros have "compile fail" tests nested inside them, also known as UI
|
||||||
# tests, that verify diagnostic output does not accidentally change.
|
# tests, that verify diagnostic output does not accidentally change.
|
||||||
"crates/*/compile_fail",
|
# TODO: Use a glob pattern once they are fixed in `dependabot-core`
|
||||||
|
# TODO: See https://github.com/bevyengine/bevy/issues/17876 for context.
|
||||||
|
"crates/bevy_derive/compile_fail",
|
||||||
|
"crates/bevy_ecs/compile_fail",
|
||||||
|
"crates/bevy_reflect/compile_fail",
|
||||||
# Examples of compiling Bevy for mobile platforms.
|
# Examples of compiling Bevy for mobile platforms.
|
||||||
"examples/mobile",
|
"examples/mobile",
|
||||||
|
# Examples of using Bevy on no_std platforms.
|
||||||
|
"examples/no_std/*",
|
||||||
# Benchmarks
|
# Benchmarks
|
||||||
"benches",
|
"benches",
|
||||||
# Internal tools that are not published.
|
# Internal tools that are not published.
|
||||||
@ -46,6 +52,9 @@ undocumented_unsafe_blocks = "warn"
|
|||||||
unwrap_or_default = "warn"
|
unwrap_or_default = "warn"
|
||||||
needless_lifetimes = "allow"
|
needless_lifetimes = "allow"
|
||||||
too_many_arguments = "allow"
|
too_many_arguments = "allow"
|
||||||
|
nonstandard_macro_braces = "warn"
|
||||||
|
print_stdout = "warn"
|
||||||
|
print_stderr = "warn"
|
||||||
|
|
||||||
ptr_as_ptr = "warn"
|
ptr_as_ptr = "warn"
|
||||||
ptr_cast_constness = "warn"
|
ptr_cast_constness = "warn"
|
||||||
@ -91,6 +100,7 @@ undocumented_unsafe_blocks = "warn"
|
|||||||
unwrap_or_default = "warn"
|
unwrap_or_default = "warn"
|
||||||
needless_lifetimes = "allow"
|
needless_lifetimes = "allow"
|
||||||
too_many_arguments = "allow"
|
too_many_arguments = "allow"
|
||||||
|
nonstandard_macro_braces = "warn"
|
||||||
|
|
||||||
ptr_as_ptr = "warn"
|
ptr_as_ptr = "warn"
|
||||||
ptr_cast_constness = "warn"
|
ptr_cast_constness = "warn"
|
||||||
@ -114,6 +124,8 @@ unused_qualifications = "warn"
|
|||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = [
|
default = [
|
||||||
|
"std",
|
||||||
|
"async_executor",
|
||||||
"android-game-activity",
|
"android-game-activity",
|
||||||
"android_shared_stdcxx",
|
"android_shared_stdcxx",
|
||||||
"animation",
|
"animation",
|
||||||
@ -121,9 +133,12 @@ default = [
|
|||||||
"bevy_audio",
|
"bevy_audio",
|
||||||
"bevy_color",
|
"bevy_color",
|
||||||
"bevy_core_pipeline",
|
"bevy_core_pipeline",
|
||||||
|
"bevy_anti_aliasing",
|
||||||
"bevy_gilrs",
|
"bevy_gilrs",
|
||||||
"bevy_gizmos",
|
"bevy_gizmos",
|
||||||
"bevy_gltf",
|
"bevy_gltf",
|
||||||
|
"bevy_input_focus",
|
||||||
|
"bevy_log",
|
||||||
"bevy_mesh_picking_backend",
|
"bevy_mesh_picking_backend",
|
||||||
"bevy_pbr",
|
"bevy_pbr",
|
||||||
"bevy_picking",
|
"bevy_picking",
|
||||||
@ -150,6 +165,9 @@ default = [
|
|||||||
"x11",
|
"x11",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Recommended defaults for no_std applications
|
||||||
|
default_no_std = ["libm", "critical-section", "bevy_color", "bevy_state"]
|
||||||
|
|
||||||
# Provides an implementation for picking meshes
|
# Provides an implementation for picking meshes
|
||||||
bevy_mesh_picking_backend = [
|
bevy_mesh_picking_backend = [
|
||||||
"bevy_picking",
|
"bevy_picking",
|
||||||
@ -196,6 +214,13 @@ bevy_core_pipeline = [
|
|||||||
"bevy_render",
|
"bevy_render",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Provides various anti aliasing solutions
|
||||||
|
bevy_anti_aliasing = [
|
||||||
|
"bevy_internal/bevy_anti_aliasing",
|
||||||
|
"bevy_asset",
|
||||||
|
"bevy_render",
|
||||||
|
]
|
||||||
|
|
||||||
# Adds gamepad support
|
# Adds gamepad support
|
||||||
bevy_gilrs = ["bevy_internal/bevy_gilrs"]
|
bevy_gilrs = ["bevy_internal/bevy_gilrs"]
|
||||||
|
|
||||||
@ -208,6 +233,7 @@ bevy_pbr = [
|
|||||||
"bevy_asset",
|
"bevy_asset",
|
||||||
"bevy_render",
|
"bevy_render",
|
||||||
"bevy_core_pipeline",
|
"bevy_core_pipeline",
|
||||||
|
"bevy_anti_aliasing",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Provides picking functionality
|
# Provides picking functionality
|
||||||
@ -225,6 +251,7 @@ bevy_sprite = [
|
|||||||
"bevy_render",
|
"bevy_render",
|
||||||
"bevy_core_pipeline",
|
"bevy_core_pipeline",
|
||||||
"bevy_color",
|
"bevy_color",
|
||||||
|
"bevy_anti_aliasing",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Provides text functionality
|
# Provides text functionality
|
||||||
@ -237,6 +264,7 @@ bevy_ui = [
|
|||||||
"bevy_text",
|
"bevy_text",
|
||||||
"bevy_sprite",
|
"bevy_sprite",
|
||||||
"bevy_color",
|
"bevy_color",
|
||||||
|
"bevy_anti_aliasing",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Windowing layer
|
# Windowing layer
|
||||||
@ -257,6 +285,15 @@ bevy_dev_tools = ["bevy_internal/bevy_dev_tools"]
|
|||||||
# Enable the Bevy Remote Protocol
|
# Enable the Bevy Remote Protocol
|
||||||
bevy_remote = ["bevy_internal/bevy_remote"]
|
bevy_remote = ["bevy_internal/bevy_remote"]
|
||||||
|
|
||||||
|
# Enable integration with `tracing` and `log`
|
||||||
|
bevy_log = ["bevy_internal/bevy_log"]
|
||||||
|
|
||||||
|
# Enable input focus subsystem
|
||||||
|
bevy_input_focus = ["bevy_internal/bevy_input_focus"]
|
||||||
|
|
||||||
|
# Use the configurable global error handler as the default error handler.
|
||||||
|
configurable_error_handler = ["bevy_internal/configurable_error_handler"]
|
||||||
|
|
||||||
# Enable passthrough loading for SPIR-V shaders (Only supported on Vulkan, shader capabilities and extensions must agree with the platform implementation)
|
# Enable passthrough loading for SPIR-V shaders (Only supported on Vulkan, shader capabilities and extensions must agree with the platform implementation)
|
||||||
spirv_shader_passthrough = ["bevy_internal/spirv_shader_passthrough"]
|
spirv_shader_passthrough = ["bevy_internal/spirv_shader_passthrough"]
|
||||||
|
|
||||||
@ -277,7 +314,7 @@ trace_tracy_memory = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Tracing support
|
# Tracing support
|
||||||
trace = ["bevy_internal/trace"]
|
trace = ["bevy_internal/trace", "dep:tracing"]
|
||||||
|
|
||||||
# Basis Universal compressed texture support
|
# Basis Universal compressed texture support
|
||||||
basis-universal = ["bevy_internal/basis-universal"]
|
basis-universal = ["bevy_internal/basis-universal"]
|
||||||
@ -423,6 +460,9 @@ shader_format_glsl = ["bevy_internal/shader_format_glsl"]
|
|||||||
# Enable support for shaders in SPIR-V
|
# Enable support for shaders in SPIR-V
|
||||||
shader_format_spirv = ["bevy_internal/shader_format_spirv"]
|
shader_format_spirv = ["bevy_internal/shader_format_spirv"]
|
||||||
|
|
||||||
|
# Enable support for shaders in WESL
|
||||||
|
shader_format_wesl = ["bevy_internal/shader_format_wesl"]
|
||||||
|
|
||||||
# Enable support for transmission-related textures in the `StandardMaterial`, at the risk of blowing past the global, per-shader texture limit on older/lower-end GPUs
|
# Enable support for transmission-related textures in the `StandardMaterial`, at the risk of blowing past the global, per-shader texture limit on older/lower-end GPUs
|
||||||
pbr_transmission_textures = ["bevy_internal/pbr_transmission_textures"]
|
pbr_transmission_textures = ["bevy_internal/pbr_transmission_textures"]
|
||||||
|
|
||||||
@ -470,9 +510,6 @@ meshlet = ["bevy_internal/meshlet"]
|
|||||||
# Enables processing meshes into meshlet meshes for bevy_pbr
|
# Enables processing meshes into meshlet meshes for bevy_pbr
|
||||||
meshlet_processor = ["bevy_internal/meshlet_processor"]
|
meshlet_processor = ["bevy_internal/meshlet_processor"]
|
||||||
|
|
||||||
# Enable support for the ios_simulator by downgrading some rendering capabilities
|
|
||||||
ios_simulator = ["bevy_internal/ios_simulator"]
|
|
||||||
|
|
||||||
# Enable built in global state machines
|
# Enable built in global state machines
|
||||||
bevy_state = ["bevy_internal/bevy_state"]
|
bevy_state = ["bevy_internal/bevy_state"]
|
||||||
|
|
||||||
@ -482,14 +519,33 @@ track_location = ["bevy_internal/track_location"]
|
|||||||
# Enable function reflection
|
# Enable function reflection
|
||||||
reflect_functions = ["bevy_internal/reflect_functions"]
|
reflect_functions = ["bevy_internal/reflect_functions"]
|
||||||
|
|
||||||
|
# Enable documentation reflection
|
||||||
|
reflect_documentation = ["bevy_internal/reflect_documentation"]
|
||||||
|
|
||||||
# Enable winit custom cursor support
|
# Enable winit custom cursor support
|
||||||
custom_cursor = ["bevy_internal/custom_cursor"]
|
custom_cursor = ["bevy_internal/custom_cursor"]
|
||||||
|
|
||||||
# Experimental support for nodes that are ignored for UI layouting
|
# Experimental support for nodes that are ignored for UI layouting
|
||||||
ghost_nodes = ["bevy_internal/ghost_nodes"]
|
ghost_nodes = ["bevy_internal/ghost_nodes"]
|
||||||
|
|
||||||
|
# Uses `async-executor` as a task execution backend.
|
||||||
|
async_executor = ["std", "bevy_internal/async_executor"]
|
||||||
|
|
||||||
|
# Allows access to the `std` crate.
|
||||||
|
std = ["bevy_internal/std"]
|
||||||
|
|
||||||
|
# `critical-section` provides the building blocks for synchronization primitives on all platforms, including `no_std`.
|
||||||
|
critical-section = ["bevy_internal/critical-section"]
|
||||||
|
|
||||||
|
# Uses the `libm` maths library instead of the one provided in `std` and `core`.
|
||||||
|
libm = ["bevy_internal/libm"]
|
||||||
|
|
||||||
|
# Enables use of browser APIs. Note this is currently only applicable on `wasm32` architectures.
|
||||||
|
web = ["bevy_internal/web"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bevy_internal = { path = "crates/bevy_internal", version = "0.16.0-dev", default-features = false }
|
bevy_internal = { path = "crates/bevy_internal", version = "0.16.0-dev", default-features = false }
|
||||||
|
tracing = { version = "0.1", default-features = false, optional = true }
|
||||||
|
|
||||||
# Wasm does not support dynamic linking.
|
# Wasm does not support dynamic linking.
|
||||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||||
@ -522,7 +578,8 @@ hyper = { version = "1", features = ["server", "http1"] }
|
|||||||
http-body-util = "0.1"
|
http-body-util = "0.1"
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
macro_rules_attribute = "0.2"
|
macro_rules_attribute = "0.2"
|
||||||
accesskit = "0.17"
|
accesskit = "0.18"
|
||||||
|
nonmax = "0.5"
|
||||||
|
|
||||||
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies]
|
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies]
|
||||||
smol = "2"
|
smol = "2"
|
||||||
@ -572,7 +629,7 @@ doc-scrape-examples = true
|
|||||||
|
|
||||||
[package.metadata.example.2d_viewport_to_world]
|
[package.metadata.example.2d_viewport_to_world]
|
||||||
name = "2D Viewport To World"
|
name = "2D Viewport To World"
|
||||||
description = "Demonstrates how to use the `Camera::viewport_to_world_2d` method"
|
description = "Demonstrates how to use the `Camera::viewport_to_world_2d` method with a dynamic viewport and camera."
|
||||||
category = "2D Rendering"
|
category = "2D Rendering"
|
||||||
wasm = true
|
wasm = true
|
||||||
|
|
||||||
@ -785,6 +842,17 @@ description = "Used to test alpha modes with mesh2d"
|
|||||||
category = "2D Rendering"
|
category = "2D Rendering"
|
||||||
wasm = true
|
wasm = true
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "mesh2d_repeated_texture"
|
||||||
|
path = "examples/2d/mesh2d_repeated_texture.rs"
|
||||||
|
doc-scrape-examples = true
|
||||||
|
|
||||||
|
[package.metadata.example.mesh2d_repeated_texture]
|
||||||
|
name = "Mesh2d Repeated Texture"
|
||||||
|
description = "Showcase of using `uv_transform` on the `ColorMaterial` of a `Mesh2d`"
|
||||||
|
category = "2D Rendering"
|
||||||
|
wasm = true
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "pixel_grid_snap"
|
name = "pixel_grid_snap"
|
||||||
path = "examples/2d/pixel_grid_snap.rs"
|
path = "examples/2d/pixel_grid_snap.rs"
|
||||||
@ -1530,6 +1598,7 @@ wasm = true
|
|||||||
name = "headless"
|
name = "headless"
|
||||||
path = "examples/app/headless.rs"
|
path = "examples/app/headless.rs"
|
||||||
doc-scrape-examples = true
|
doc-scrape-examples = true
|
||||||
|
required-features = ["bevy_log"]
|
||||||
|
|
||||||
[package.metadata.example.headless]
|
[package.metadata.example.headless]
|
||||||
name = "Headless"
|
name = "Headless"
|
||||||
@ -1803,7 +1872,7 @@ path = "examples/asset/multi_asset_sync.rs"
|
|||||||
doc-scrape-examples = true
|
doc-scrape-examples = true
|
||||||
|
|
||||||
[package.metadata.example.multi_asset_sync]
|
[package.metadata.example.multi_asset_sync]
|
||||||
name = "Mult-asset synchronization"
|
name = "Multi-asset synchronization"
|
||||||
description = "Demonstrates how to wait for multiple assets to be loaded."
|
description = "Demonstrates how to wait for multiple assets to be loaded."
|
||||||
category = "Assets"
|
category = "Assets"
|
||||||
wasm = true
|
wasm = true
|
||||||
@ -2164,6 +2233,7 @@ wasm = false
|
|||||||
name = "fallible_params"
|
name = "fallible_params"
|
||||||
path = "examples/ecs/fallible_params.rs"
|
path = "examples/ecs/fallible_params.rs"
|
||||||
doc-scrape-examples = true
|
doc-scrape-examples = true
|
||||||
|
required-features = ["configurable_error_handler"]
|
||||||
|
|
||||||
[package.metadata.example.fallible_params]
|
[package.metadata.example.fallible_params]
|
||||||
name = "Fallible System Parameters"
|
name = "Fallible System Parameters"
|
||||||
@ -2172,13 +2242,14 @@ category = "ECS (Entity Component System)"
|
|||||||
wasm = false
|
wasm = false
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "fallible_systems"
|
name = "error_handling"
|
||||||
path = "examples/ecs/fallible_systems.rs"
|
path = "examples/ecs/error_handling.rs"
|
||||||
doc-scrape-examples = true
|
doc-scrape-examples = true
|
||||||
|
required-features = ["bevy_mesh_picking_backend", "configurable_error_handler"]
|
||||||
|
|
||||||
[package.metadata.example.fallible_systems]
|
[package.metadata.example.error_handling]
|
||||||
name = "Fallible Systems"
|
name = "Error handling"
|
||||||
description = "Systems that return results to handle errors"
|
description = "How to return and handle errors across the ECS"
|
||||||
category = "ECS (Entity Component System)"
|
category = "ECS (Entity Component System)"
|
||||||
wasm = false
|
wasm = false
|
||||||
|
|
||||||
@ -2734,6 +2805,18 @@ description = "A shader that uses the GLSL shading language"
|
|||||||
category = "Shaders"
|
category = "Shaders"
|
||||||
wasm = true
|
wasm = true
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "shader_material_wesl"
|
||||||
|
path = "examples/shader/shader_material_wesl.rs"
|
||||||
|
doc-scrape-examples = true
|
||||||
|
required-features = ["shader_format_wesl"]
|
||||||
|
|
||||||
|
[package.metadata.example.shader_material_wesl]
|
||||||
|
name = "Material - WESL"
|
||||||
|
description = "A shader that uses WESL"
|
||||||
|
category = "Shaders"
|
||||||
|
wasm = true
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "custom_shader_instancing"
|
name = "custom_shader_instancing"
|
||||||
path = "examples/shader/custom_shader_instancing.rs"
|
path = "examples/shader/custom_shader_instancing.rs"
|
||||||
@ -2745,6 +2828,18 @@ description = "A shader that renders a mesh multiple times in one draw call usin
|
|||||||
category = "Shaders"
|
category = "Shaders"
|
||||||
wasm = true
|
wasm = true
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "custom_render_phase"
|
||||||
|
path = "examples/shader/custom_render_phase.rs"
|
||||||
|
doc-scrape-examples = true
|
||||||
|
|
||||||
|
[package.metadata.example.custom_render_phase]
|
||||||
|
name = "Custom Render Phase"
|
||||||
|
description = "Shows how to make a complete render phase"
|
||||||
|
category = "Shaders"
|
||||||
|
wasm = true
|
||||||
|
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "automatic_instancing"
|
name = "automatic_instancing"
|
||||||
path = "examples/shader/automatic_instancing.rs"
|
path = "examples/shader/automatic_instancing.rs"
|
||||||
@ -4006,7 +4101,6 @@ name = "Sprite Picking"
|
|||||||
description = "Demonstrates picking sprites and sprite atlases"
|
description = "Demonstrates picking sprites and sprite atlases"
|
||||||
category = "Picking"
|
category = "Picking"
|
||||||
wasm = true
|
wasm = true
|
||||||
required-features = ["bevy_sprite_picking_backend"]
|
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "debug_picking"
|
name = "debug_picking"
|
||||||
@ -4106,7 +4200,15 @@ panic = "abort"
|
|||||||
# for details on why this is needed. Since dependencies don't expect to be built
|
# for details on why this is needed. Since dependencies don't expect to be built
|
||||||
# with `--cfg docsrs` (and thus fail to compile) we use a different cfg.
|
# with `--cfg docsrs` (and thus fail to compile) we use a different cfg.
|
||||||
rustc-args = ["--cfg", "docsrs_dep"]
|
rustc-args = ["--cfg", "docsrs_dep"]
|
||||||
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
rustdoc-args = [
|
||||||
|
"-Zunstable-options",
|
||||||
|
"--generate-link-to-definition",
|
||||||
|
# Embed tags to the top of documentation pages for common Bevy traits
|
||||||
|
# that are implemented by the current type, like `Component` or `Resource`.
|
||||||
|
# This makes it easier to see at a glance what types are used for.
|
||||||
|
"--html-after-content",
|
||||||
|
"docs-rs/trait-tags.html",
|
||||||
|
]
|
||||||
all-features = true
|
all-features = true
|
||||||
cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]
|
cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]
|
||||||
|
|
||||||
@ -4147,11 +4249,11 @@ doc-scrape-examples = true
|
|||||||
hidden = true
|
hidden = true
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "testbed_ui_layout_rounding"
|
name = "testbed_full_ui"
|
||||||
path = "examples/testbed/ui_layout_rounding.rs"
|
path = "examples/testbed/full_ui.rs"
|
||||||
doc-scrape-examples = true
|
doc-scrape-examples = true
|
||||||
|
|
||||||
[package.metadata.example.testbed_ui_layout_rounding]
|
[package.metadata.example.testbed_full_ui]
|
||||||
hidden = true
|
hidden = true
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
@ -4197,3 +4299,50 @@ name = "Occlusion Culling"
|
|||||||
description = "Demonstration of Occlusion Culling"
|
description = "Demonstration of Occlusion Culling"
|
||||||
category = "3D Rendering"
|
category = "3D Rendering"
|
||||||
wasm = false
|
wasm = false
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "camera_controller"
|
||||||
|
path = "examples/helpers/camera_controller.rs"
|
||||||
|
doc-scrape-examples = true
|
||||||
|
crate-type = ["lib"]
|
||||||
|
|
||||||
|
[package.metadata.example.camera_controller]
|
||||||
|
name = "Camera Controller"
|
||||||
|
description = "Example Free-Cam Styled Camera Controller"
|
||||||
|
category = "Helpers"
|
||||||
|
wasm = true
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "widgets"
|
||||||
|
path = "examples/helpers/widgets.rs"
|
||||||
|
doc-scrape-examples = true
|
||||||
|
crate-type = ["lib"]
|
||||||
|
|
||||||
|
[package.metadata.example.widgets]
|
||||||
|
name = "Widgets"
|
||||||
|
description = "Example UI Widgets"
|
||||||
|
category = "Helpers"
|
||||||
|
wasm = true
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "no_std_library"
|
||||||
|
path = "examples/no_std/library/src/lib.rs"
|
||||||
|
doc-scrape-examples = true
|
||||||
|
crate-type = ["lib"]
|
||||||
|
|
||||||
|
[package.metadata.example.no_std_library]
|
||||||
|
name = "`no_std` Compatible Library"
|
||||||
|
description = "Example library compatible with `std` and `no_std` targets"
|
||||||
|
category = "Embedded"
|
||||||
|
wasm = true
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "extended_material_bindless"
|
||||||
|
path = "examples/shader/extended_material_bindless.rs"
|
||||||
|
doc-scrape-examples = true
|
||||||
|
|
||||||
|
[package.metadata.example.extended_material_bindless]
|
||||||
|
name = "Extended Bindless Material"
|
||||||
|
description = "Demonstrates bindless `ExtendedMaterial`"
|
||||||
|
category = "Shaders"
|
||||||
|
wasm = false
|
||||||
|
|||||||
Binary file not shown.
@ -7,10 +7,7 @@
|
|||||||
entities: {
|
entities: {
|
||||||
4294967296: (
|
4294967296: (
|
||||||
components: {
|
components: {
|
||||||
"bevy_ecs::name::Name": (
|
"bevy_ecs::name::Name": "joe",
|
||||||
hash: 17588334858059901562,
|
|
||||||
name: "joe",
|
|
||||||
),
|
|
||||||
"bevy_transform::components::global_transform::GlobalTransform": ((1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0)),
|
"bevy_transform::components::global_transform::GlobalTransform": ((1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0)),
|
||||||
"bevy_transform::components::transform::Transform": (
|
"bevy_transform::components::transform::Transform": (
|
||||||
translation: (0.0, 0.0, 0.0),
|
translation: (0.0, 0.0, 0.0),
|
||||||
|
|||||||
43
assets/shaders/automatic_instancing.wgsl
Normal file
43
assets/shaders/automatic_instancing.wgsl
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
#import bevy_pbr::{
|
||||||
|
mesh_functions,
|
||||||
|
view_transformations::position_world_to_clip
|
||||||
|
}
|
||||||
|
|
||||||
|
@group(2) @binding(0) var texture: texture_2d<f32>;
|
||||||
|
@group(2) @binding(1) var texture_sampler: sampler;
|
||||||
|
|
||||||
|
struct Vertex {
|
||||||
|
@builtin(instance_index) instance_index: u32,
|
||||||
|
@location(0) position: vec3<f32>,
|
||||||
|
};
|
||||||
|
|
||||||
|
struct VertexOutput {
|
||||||
|
@builtin(position) clip_position: vec4<f32>,
|
||||||
|
@location(0) world_position: vec4<f32>,
|
||||||
|
@location(1) color: vec4<f32>,
|
||||||
|
};
|
||||||
|
|
||||||
|
@vertex
|
||||||
|
fn vertex(vertex: Vertex) -> VertexOutput {
|
||||||
|
var out: VertexOutput;
|
||||||
|
|
||||||
|
// Lookup the tag for the given mesh
|
||||||
|
let tag = mesh_functions::get_tag(vertex.instance_index);
|
||||||
|
var world_from_local = mesh_functions::get_world_from_local(vertex.instance_index);
|
||||||
|
out.world_position = mesh_functions::mesh_position_local_to_world(world_from_local, vec4(vertex.position, 1.0));
|
||||||
|
out.clip_position = position_world_to_clip(out.world_position.xyz);
|
||||||
|
|
||||||
|
let tex_dim = textureDimensions(texture);
|
||||||
|
// Find the texel coordinate as derived from the tag
|
||||||
|
let texel_coord = vec2<u32>(tag % tex_dim.x, tag / tex_dim.x);
|
||||||
|
|
||||||
|
out.color = textureLoad(texture, texel_coord, 0);
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
@fragment
|
||||||
|
fn fragment(
|
||||||
|
mesh: VertexOutput,
|
||||||
|
) -> @location(0) vec4<f32> {
|
||||||
|
return mesh.color;
|
||||||
|
}
|
||||||
@ -1,14 +1,22 @@
|
|||||||
#import bevy_pbr::forward_io::VertexOutput
|
#import bevy_pbr::forward_io::VertexOutput
|
||||||
#import bevy_pbr::mesh_bindings::mesh
|
#import bevy_pbr::mesh_bindings::mesh
|
||||||
|
#import bevy_render::bindless::{bindless_samplers_filtering, bindless_textures_2d}
|
||||||
|
|
||||||
struct Color {
|
struct Color {
|
||||||
base_color: vec4<f32>,
|
base_color: vec4<f32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This structure is a mapping from bindless index to the index in the
|
||||||
|
// appropriate slab
|
||||||
|
struct MaterialBindings {
|
||||||
|
material: u32, // 0
|
||||||
|
color_texture: u32, // 1
|
||||||
|
color_texture_sampler: u32, // 2
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef BINDLESS
|
#ifdef BINDLESS
|
||||||
@group(2) @binding(0) var<storage> material_color: binding_array<Color, 4>;
|
@group(2) @binding(0) var<storage> materials: array<MaterialBindings>;
|
||||||
@group(2) @binding(1) var material_color_texture: binding_array<texture_2d<f32>, 4>;
|
@group(2) @binding(10) var<storage> material_color: binding_array<Color>;
|
||||||
@group(2) @binding(2) var material_color_sampler: binding_array<sampler, 4>;
|
|
||||||
#else // BINDLESS
|
#else // BINDLESS
|
||||||
@group(2) @binding(0) var<uniform> material_color: Color;
|
@group(2) @binding(0) var<uniform> material_color: Color;
|
||||||
@group(2) @binding(1) var material_color_texture: texture_2d<f32>;
|
@group(2) @binding(1) var material_color_texture: texture_2d<f32>;
|
||||||
@ -19,15 +27,15 @@ struct Color {
|
|||||||
fn fragment(in: VertexOutput) -> @location(0) vec4<f32> {
|
fn fragment(in: VertexOutput) -> @location(0) vec4<f32> {
|
||||||
#ifdef BINDLESS
|
#ifdef BINDLESS
|
||||||
let slot = mesh[in.instance_index].material_and_lightmap_bind_group_slot & 0xffffu;
|
let slot = mesh[in.instance_index].material_and_lightmap_bind_group_slot & 0xffffu;
|
||||||
let base_color = material_color[slot].base_color;
|
let base_color = material_color[materials[slot].material].base_color;
|
||||||
#else // BINDLESS
|
#else // BINDLESS
|
||||||
let base_color = material_color.base_color;
|
let base_color = material_color.base_color;
|
||||||
#endif // BINDLESS
|
#endif // BINDLESS
|
||||||
|
|
||||||
return base_color * textureSampleLevel(
|
return base_color * textureSampleLevel(
|
||||||
#ifdef BINDLESS
|
#ifdef BINDLESS
|
||||||
material_color_texture[slot],
|
bindless_textures_2d[materials[slot].color_texture],
|
||||||
material_color_sampler[slot],
|
bindless_samplers_filtering[materials[slot].color_texture_sampler],
|
||||||
#else // BINDLESS
|
#else // BINDLESS
|
||||||
material_color_texture,
|
material_color_texture,
|
||||||
material_color_sampler,
|
material_color_sampler,
|
||||||
|
|||||||
20
assets/shaders/custom_material.wesl
Normal file
20
assets/shaders/custom_material.wesl
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import super::util::make_polka_dots;
|
||||||
|
|
||||||
|
struct VertexOutput {
|
||||||
|
@builtin(position) position: vec4<f32>,
|
||||||
|
@location(2) uv: vec2<f32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CustomMaterial {
|
||||||
|
// Needed for 16-bit alignment on WebGL2
|
||||||
|
time: vec4<f32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
@group(2) @binding(0) var<uniform> material: CustomMaterial;
|
||||||
|
|
||||||
|
@fragment
|
||||||
|
fn fragment(
|
||||||
|
mesh: VertexOutput,
|
||||||
|
) -> @location(0) vec4<f32> {
|
||||||
|
return make_polka_dots(mesh.uv, material.time.x);
|
||||||
|
}
|
||||||
41
assets/shaders/custom_stencil.wgsl
Normal file
41
assets/shaders/custom_stencil.wgsl
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
//! A shader showing how to use the vertex position data to output the
|
||||||
|
//! stencil in the right position
|
||||||
|
|
||||||
|
// First we import everything we need from bevy_pbr
|
||||||
|
// A 2d shader would be vevry similar but import from bevy_sprite instead
|
||||||
|
#import bevy_pbr::{
|
||||||
|
mesh_functions,
|
||||||
|
view_transformations::position_world_to_clip
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Vertex {
|
||||||
|
// This is needed if you are using batching and/or gpu preprocessing
|
||||||
|
// It's a built in so you don't need to define it in the vertex layout
|
||||||
|
@builtin(instance_index) instance_index: u32,
|
||||||
|
// Like we defined for the vertex layout
|
||||||
|
// position is at location 0
|
||||||
|
@location(0) position: vec3<f32>,
|
||||||
|
};
|
||||||
|
|
||||||
|
// This is the output of the vertex shader and we also use it as the input for the fragment shader
|
||||||
|
struct VertexOutput {
|
||||||
|
@builtin(position) clip_position: vec4<f32>,
|
||||||
|
@location(0) world_position: vec4<f32>,
|
||||||
|
};
|
||||||
|
|
||||||
|
@vertex
|
||||||
|
fn vertex(vertex: Vertex) -> VertexOutput {
|
||||||
|
var out: VertexOutput;
|
||||||
|
// This is how bevy computes the world position
|
||||||
|
// The vertex.instance_index is very important. Especially if you are using batching and gpu preprocessing
|
||||||
|
var world_from_local = mesh_functions::get_world_from_local(vertex.instance_index);
|
||||||
|
out.world_position = mesh_functions::mesh_position_local_to_world(world_from_local, vec4(vertex.position, 1.0));
|
||||||
|
out.clip_position = position_world_to_clip(out.world_position.xyz);
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
@fragment
|
||||||
|
fn fragment(in: VertexOutput) -> @location(0) vec4<f32> {
|
||||||
|
// Output a red color to represent the stencil of the mesh
|
||||||
|
return vec4(1.0, 0.0, 0.0, 1.0);
|
||||||
|
}
|
||||||
@ -2,7 +2,7 @@
|
|||||||
#import bevy_ui::ui_vertex_output::UiVertexOutput
|
#import bevy_ui::ui_vertex_output::UiVertexOutput
|
||||||
|
|
||||||
@group(1) @binding(0) var<uniform> color: vec4<f32>;
|
@group(1) @binding(0) var<uniform> color: vec4<f32>;
|
||||||
@group(1) @binding(1) var<uniform> slider: f32;
|
@group(1) @binding(1) var<uniform> slider: vec4<f32>;
|
||||||
@group(1) @binding(2) var material_color_texture: texture_2d<f32>;
|
@group(1) @binding(2) var material_color_texture: texture_2d<f32>;
|
||||||
@group(1) @binding(3) var material_color_sampler: sampler;
|
@group(1) @binding(3) var material_color_sampler: sampler;
|
||||||
@group(1) @binding(4) var<uniform> border_color: vec4<f32>;
|
@group(1) @binding(4) var<uniform> border_color: vec4<f32>;
|
||||||
@ -50,7 +50,7 @@ fn fragment(in: UiVertexOutput) -> @location(0) vec4<f32> {
|
|||||||
|
|
||||||
// sample the texture at this position if it's to the left of the slider value
|
// sample the texture at this position if it's to the left of the slider value
|
||||||
// otherwise return a fully transparent color
|
// otherwise return a fully transparent color
|
||||||
if in.uv.x < slider {
|
if in.uv.x < slider.x {
|
||||||
let output_color = textureSample(material_color_texture, material_color_sampler, in.uv) * color;
|
let output_color = textureSample(material_color_texture, material_color_sampler, in.uv) * color;
|
||||||
return output_color;
|
return output_color;
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
107
assets/shaders/extended_material_bindless.wgsl
Normal file
107
assets/shaders/extended_material_bindless.wgsl
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
// The shader that goes with `extended_material_bindless.rs`.
|
||||||
|
//
|
||||||
|
// This code demonstrates how to write shaders that are compatible with both
|
||||||
|
// bindless and non-bindless mode. See the `#ifdef BINDLESS` blocks.
|
||||||
|
|
||||||
|
#import bevy_pbr::{
|
||||||
|
forward_io::{FragmentOutput, VertexOutput},
|
||||||
|
mesh_bindings::mesh,
|
||||||
|
pbr_fragment::pbr_input_from_standard_material,
|
||||||
|
pbr_functions::{apply_pbr_lighting, main_pass_post_lighting_processing},
|
||||||
|
}
|
||||||
|
#import bevy_render::bindless::{bindless_samplers_filtering, bindless_textures_2d}
|
||||||
|
|
||||||
|
#ifdef BINDLESS
|
||||||
|
#import bevy_pbr::pbr_bindings::{material_array, material_indices}
|
||||||
|
#else // BINDLESS
|
||||||
|
#import bevy_pbr::pbr_bindings::material
|
||||||
|
#endif // BINDLESS
|
||||||
|
|
||||||
|
// Stores the indices of the bindless resources in the bindless resource arrays,
|
||||||
|
// for the `ExampleBindlessExtension` fields.
|
||||||
|
struct ExampleBindlessExtendedMaterialIndices {
|
||||||
|
// The index of the `ExampleBindlessExtendedMaterial` data in
|
||||||
|
// `example_extended_material`.
|
||||||
|
material: u32,
|
||||||
|
// The index of the texture we're going to modulate the base color with in
|
||||||
|
// the `bindless_textures_2d` array.
|
||||||
|
modulate_texture: u32,
|
||||||
|
// The index of the sampler we're going to sample the modulated texture with
|
||||||
|
// in the `bindless_samplers_filtering` array.
|
||||||
|
modulate_texture_sampler: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Plain data associated with this example material.
|
||||||
|
struct ExampleBindlessExtendedMaterial {
|
||||||
|
// The color that we multiply the base color, base color texture, and
|
||||||
|
// modulated texture with.
|
||||||
|
modulate_color: vec4<f32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef BINDLESS
|
||||||
|
|
||||||
|
// The indices of the bindless resources in the bindless resource arrays, for
|
||||||
|
// the `ExampleBindlessExtension` fields.
|
||||||
|
@group(2) @binding(100) var<storage> example_extended_material_indices:
|
||||||
|
array<ExampleBindlessExtendedMaterialIndices>;
|
||||||
|
// An array that holds the `ExampleBindlessExtendedMaterial` plain old data,
|
||||||
|
// indexed by `ExampleBindlessExtendedMaterialIndices.material`.
|
||||||
|
@group(2) @binding(101) var<storage> example_extended_material:
|
||||||
|
array<ExampleBindlessExtendedMaterial>;
|
||||||
|
|
||||||
|
#else // BINDLESS
|
||||||
|
|
||||||
|
// In non-bindless mode, we simply use a uniform for the plain old data.
|
||||||
|
@group(2) @binding(50) var<uniform> example_extended_material: ExampleBindlessExtendedMaterial;
|
||||||
|
@group(2) @binding(51) var modulate_texture: texture_2d<f32>;
|
||||||
|
@group(2) @binding(52) var modulate_sampler: sampler;
|
||||||
|
|
||||||
|
#endif // BINDLESS
|
||||||
|
|
||||||
|
@fragment
|
||||||
|
fn fragment(
|
||||||
|
in: VertexOutput,
|
||||||
|
@builtin(front_facing) is_front: bool,
|
||||||
|
) -> FragmentOutput {
|
||||||
|
#ifdef BINDLESS
|
||||||
|
// Fetch the material slot. We'll use this in turn to fetch the bindless
|
||||||
|
// indices from `example_extended_material_indices`.
|
||||||
|
let slot = mesh[in.instance_index].material_and_lightmap_bind_group_slot & 0xffffu;
|
||||||
|
#endif // BINDLESS
|
||||||
|
|
||||||
|
// Generate a `PbrInput` struct from the `StandardMaterial` bindings.
|
||||||
|
var pbr_input = pbr_input_from_standard_material(in, is_front);
|
||||||
|
|
||||||
|
// Calculate the UV for the texture we're about to sample.
|
||||||
|
#ifdef BINDLESS
|
||||||
|
let uv_transform = material_array[material_indices[slot].material].uv_transform;
|
||||||
|
#else // BINDLESS
|
||||||
|
let uv_transform = material.uv_transform;
|
||||||
|
#endif // BINDLESS
|
||||||
|
let uv = (uv_transform * vec3(in.uv, 1.0)).xy;
|
||||||
|
|
||||||
|
// Multiply the base color by the `modulate_texture` and `modulate_color`.
|
||||||
|
#ifdef BINDLESS
|
||||||
|
// Notice how we fetch the texture, sampler, and plain extended material
|
||||||
|
// data from the appropriate arrays.
|
||||||
|
pbr_input.material.base_color *= textureSample(
|
||||||
|
bindless_textures_2d[example_extended_material_indices[slot].modulate_texture],
|
||||||
|
bindless_samplers_filtering[
|
||||||
|
example_extended_material_indices[slot].modulate_texture_sampler
|
||||||
|
],
|
||||||
|
uv
|
||||||
|
) * example_extended_material[example_extended_material_indices[slot].material].modulate_color;
|
||||||
|
#else // BINDLESS
|
||||||
|
pbr_input.material.base_color *= textureSample(modulate_texture, modulate_sampler, uv) *
|
||||||
|
example_extended_material.modulate_color;
|
||||||
|
#endif // BINDLESS
|
||||||
|
|
||||||
|
var out: FragmentOutput;
|
||||||
|
// Apply lighting.
|
||||||
|
out.color = apply_pbr_lighting(pbr_input);
|
||||||
|
// Apply in-shader post processing (fog, alpha-premultiply, and also
|
||||||
|
// tonemapping, debanding if the camera is non-HDR). Note this does not
|
||||||
|
// include fullscreen postprocessing effects like bloom.
|
||||||
|
out.color = main_pass_post_lighting_processing(pbr_input, out.color);
|
||||||
|
return out;
|
||||||
|
}
|
||||||
@ -2,7 +2,7 @@
|
|||||||
//! between the vertex and fragment shader. Also shows the custom vertex layout.
|
//! between the vertex and fragment shader. Also shows the custom vertex layout.
|
||||||
|
|
||||||
// First we import everything we need from bevy_pbr
|
// First we import everything we need from bevy_pbr
|
||||||
// A 2d shader would be vevry similar but import from bevy_sprite instead
|
// A 2D shader would be very similar but import from bevy_sprite instead
|
||||||
#import bevy_pbr::{
|
#import bevy_pbr::{
|
||||||
mesh_functions,
|
mesh_functions,
|
||||||
view_transformations::position_world_to_clip
|
view_transformations::position_world_to_clip
|
||||||
|
|||||||
@ -4,7 +4,6 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
@group(2) @binding(0) var<storage, read> colors: array<vec4<f32>, 5>;
|
@group(2) @binding(0) var<storage, read> colors: array<vec4<f32>, 5>;
|
||||||
@group(2) @binding(1) var<uniform> color_id: u32;
|
|
||||||
|
|
||||||
struct Vertex {
|
struct Vertex {
|
||||||
@builtin(instance_index) instance_index: u32,
|
@builtin(instance_index) instance_index: u32,
|
||||||
@ -20,11 +19,12 @@ struct VertexOutput {
|
|||||||
@vertex
|
@vertex
|
||||||
fn vertex(vertex: Vertex) -> VertexOutput {
|
fn vertex(vertex: Vertex) -> VertexOutput {
|
||||||
var out: VertexOutput;
|
var out: VertexOutput;
|
||||||
|
let tag = mesh_functions::get_tag(vertex.instance_index);
|
||||||
var world_from_local = mesh_functions::get_world_from_local(vertex.instance_index);
|
var world_from_local = mesh_functions::get_world_from_local(vertex.instance_index);
|
||||||
out.world_position = mesh_functions::mesh_position_local_to_world(world_from_local, vec4(vertex.position, 1.0));
|
out.world_position = mesh_functions::mesh_position_local_to_world(world_from_local, vec4(vertex.position, 1.0));
|
||||||
out.clip_position = position_world_to_clip(out.world_position.xyz);
|
out.clip_position = position_world_to_clip(out.world_position.xyz);
|
||||||
|
|
||||||
out.color = colors[color_id];
|
out.color = colors[tag];
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
44
assets/shaders/util.wesl
Normal file
44
assets/shaders/util.wesl
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
fn make_polka_dots(pos: vec2<f32>, time: f32) -> vec4<f32> {
|
||||||
|
let scaled_pos = pos * 6.0;
|
||||||
|
let cell = vec2<f32>(fract(scaled_pos.x), fract(scaled_pos.y));
|
||||||
|
var dist_from_center = distance(cell, vec2<f32>(0.5));
|
||||||
|
|
||||||
|
let is_even = (floor(scaled_pos.x) + floor(scaled_pos.y)) % 2.0;
|
||||||
|
|
||||||
|
var dot_color = vec3<f32>(0.0);
|
||||||
|
var is_dot = 0.0;
|
||||||
|
|
||||||
|
@if(!PARTY_MODE) {
|
||||||
|
let color1 = vec3<f32>(1.0, 0.4, 0.8); // pink
|
||||||
|
let color2 = vec3<f32>(0.6, 0.2, 1.0); // purple
|
||||||
|
dot_color = mix(color1, color2, is_even);
|
||||||
|
is_dot = step(dist_from_center, 0.3);
|
||||||
|
} @else {
|
||||||
|
let grid_x = floor(scaled_pos.x);
|
||||||
|
let grid_y = floor(scaled_pos.y);
|
||||||
|
let wave_speed = 3.0;
|
||||||
|
let wave_phase = time * wave_speed;
|
||||||
|
|
||||||
|
let diagonal_pos = (grid_x + grid_y) * 0.5;
|
||||||
|
let wave_value = sin(diagonal_pos + wave_phase);
|
||||||
|
|
||||||
|
let wave_normalized = (wave_value + 1.0) * 0.5;
|
||||||
|
|
||||||
|
let color1 = vec3<f32>(1.0, 0.3, 0.7);
|
||||||
|
let color2 = vec3<f32>(0.5, 0.1, 1.0);
|
||||||
|
let intense_color1 = vec3<f32>(1.0, 0.1, 0.9);
|
||||||
|
let intense_color2 = vec3<f32>(0.8, 0.0, 1.0);
|
||||||
|
|
||||||
|
let animated_color1 = mix(color1, intense_color1, wave_normalized);
|
||||||
|
let animated_color2 = mix(color2, intense_color2, wave_normalized);
|
||||||
|
|
||||||
|
dot_color = mix(animated_color1, animated_color2, is_even);
|
||||||
|
|
||||||
|
let size_mod = 0.15 * wave_value;
|
||||||
|
dist_from_center = dist_from_center * (1.0 - size_mod);
|
||||||
|
// Animate whether something is a dot by position but also time
|
||||||
|
is_dot = step(dist_from_center, 0.3 + wave_normalized * 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
return vec4<f32>(dot_color * is_dot, 1.0);
|
||||||
|
}
|
||||||
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "benches"
|
name = "benches"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
description = "Benchmarks that test Bevy's performance"
|
description = "Benchmarks that test Bevy's performance"
|
||||||
publish = false
|
publish = false
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
@ -24,7 +24,7 @@ bevy_reflect = { path = "../crates/bevy_reflect", features = ["functions"] }
|
|||||||
bevy_render = { path = "../crates/bevy_render" }
|
bevy_render = { path = "../crates/bevy_render" }
|
||||||
bevy_tasks = { path = "../crates/bevy_tasks" }
|
bevy_tasks = { path = "../crates/bevy_tasks" }
|
||||||
bevy_utils = { path = "../crates/bevy_utils" }
|
bevy_utils = { path = "../crates/bevy_utils" }
|
||||||
bevy_platform_support = { path = "../crates/bevy_platform_support", default-features = false, features = [
|
bevy_platform = { path = "../crates/bevy_platform", default-features = false, features = [
|
||||||
"std",
|
"std",
|
||||||
] }
|
] }
|
||||||
|
|
||||||
@ -50,6 +50,7 @@ undocumented_unsafe_blocks = "warn"
|
|||||||
unwrap_or_default = "warn"
|
unwrap_or_default = "warn"
|
||||||
needless_lifetimes = "allow"
|
needless_lifetimes = "allow"
|
||||||
too_many_arguments = "allow"
|
too_many_arguments = "allow"
|
||||||
|
nonstandard_macro_braces = "warn"
|
||||||
|
|
||||||
ptr_as_ptr = "warn"
|
ptr_as_ptr = "warn"
|
||||||
ptr_cast_constness = "warn"
|
ptr_cast_constness = "warn"
|
||||||
|
|||||||
@ -25,10 +25,10 @@ cargo bench -p benches -- name_fragment
|
|||||||
cargo bench -p benches -- --list
|
cargo bench -p benches -- --list
|
||||||
|
|
||||||
# Save a baseline to be compared against later.
|
# Save a baseline to be compared against later.
|
||||||
cargo bench -p benches --save-baseline before
|
cargo bench -p benches -- --save-baseline before
|
||||||
|
|
||||||
# Compare the current benchmarks against a baseline to find performance gains and regressions.
|
# Compare the current benchmarks against a baseline to find performance gains and regressions.
|
||||||
cargo bench -p benches --baseline before
|
cargo bench -p benches -- --baseline before
|
||||||
```
|
```
|
||||||
|
|
||||||
## Criterion
|
## Criterion
|
||||||
|
|||||||
@ -95,7 +95,7 @@ fn all_added_detection_generic<T: Component + Default>(group: &mut BenchGroup, e
|
|||||||
let query = generic_filter_query::<Added<T>>(&mut world);
|
let query = generic_filter_query::<Added<T>>(&mut world);
|
||||||
(world, query)
|
(world, query)
|
||||||
},
|
},
|
||||||
|(ref mut world, ref mut query)| {
|
|(world, query)| {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for entity in query.iter(world) {
|
for entity in query.iter(world) {
|
||||||
black_box(entity);
|
black_box(entity);
|
||||||
@ -143,7 +143,7 @@ fn all_changed_detection_generic<T: Component<Mutability = Mutable> + Default +
|
|||||||
let query = generic_filter_query::<Changed<T>>(&mut world);
|
let query = generic_filter_query::<Changed<T>>(&mut world);
|
||||||
(world, query)
|
(world, query)
|
||||||
},
|
},
|
||||||
|(ref mut world, ref mut query)| {
|
|(world, query)| {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for entity in query.iter(world) {
|
for entity in query.iter(world) {
|
||||||
black_box(entity);
|
black_box(entity);
|
||||||
@ -196,7 +196,7 @@ fn few_changed_detection_generic<T: Component<Mutability = Mutable> + Default +
|
|||||||
let query = generic_filter_query::<Changed<T>>(&mut world);
|
let query = generic_filter_query::<Changed<T>>(&mut world);
|
||||||
(world, query)
|
(world, query)
|
||||||
},
|
},
|
||||||
|(ref mut world, ref mut query)| {
|
|(world, query)| {
|
||||||
for entity in query.iter(world) {
|
for entity in query.iter(world) {
|
||||||
black_box(entity);
|
black_box(entity);
|
||||||
}
|
}
|
||||||
@ -237,7 +237,7 @@ fn none_changed_detection_generic<T: Component<Mutability = Mutable> + Default>(
|
|||||||
let query = generic_filter_query::<Changed<T>>(&mut world);
|
let query = generic_filter_query::<Changed<T>>(&mut world);
|
||||||
(world, query)
|
(world, query)
|
||||||
},
|
},
|
||||||
|(ref mut world, ref mut query)| {
|
|(world, query)| {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for entity in query.iter(world) {
|
for entity in query.iter(world) {
|
||||||
black_box(entity);
|
black_box(entity);
|
||||||
@ -343,7 +343,7 @@ fn multiple_archetype_none_changed_detection_generic<
|
|||||||
let query = generic_filter_query::<Changed<T>>(&mut world);
|
let query = generic_filter_query::<Changed<T>>(&mut world);
|
||||||
(world, query)
|
(world, query)
|
||||||
},
|
},
|
||||||
|(ref mut world, ref mut query)| {
|
|(world, query)| {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for entity in query.iter(world) {
|
for entity in query.iter(world) {
|
||||||
black_box(entity);
|
black_box(entity);
|
||||||
|
|||||||
@ -12,7 +12,7 @@ impl Benchmark {
|
|||||||
let mut world = World::default();
|
let mut world = World::default();
|
||||||
|
|
||||||
let entities = world
|
let entities = world
|
||||||
.spawn_batch(core::iter::repeat(A(0.)).take(10000))
|
.spawn_batch(core::iter::repeat_n(A(0.), 10_000))
|
||||||
.collect();
|
.collect();
|
||||||
Self(world, entities)
|
Self(world, entities)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -55,7 +55,7 @@ type ComplexBundle = (C1, C2, C3, C4, C5, C6, C7, C8, C9, C10);
|
|||||||
/// use the [`Reflect`] trait instead of [`Clone`].
|
/// use the [`Reflect`] trait instead of [`Clone`].
|
||||||
fn reflection_cloner<B: Bundle + GetTypeRegistration>(
|
fn reflection_cloner<B: Bundle + GetTypeRegistration>(
|
||||||
world: &mut World,
|
world: &mut World,
|
||||||
recursive: bool,
|
linked_cloning: bool,
|
||||||
) -> EntityCloner {
|
) -> EntityCloner {
|
||||||
// Get mutable access to the type registry, creating it if it does not exist yet.
|
// Get mutable access to the type registry, creating it if it does not exist yet.
|
||||||
let registry = world.get_resource_or_init::<AppTypeRegistry>();
|
let registry = world.get_resource_or_init::<AppTypeRegistry>();
|
||||||
@ -77,7 +77,7 @@ fn reflection_cloner<B: Bundle + GetTypeRegistration>(
|
|||||||
for component in component_ids {
|
for component in component_ids {
|
||||||
builder.override_clone_behavior_with_id(component, ComponentCloneBehavior::reflect());
|
builder.override_clone_behavior_with_id(component, ComponentCloneBehavior::reflect());
|
||||||
}
|
}
|
||||||
builder.recursive(recursive);
|
builder.linked_cloning(linked_cloning);
|
||||||
|
|
||||||
builder.finish()
|
builder.finish()
|
||||||
}
|
}
|
||||||
@ -136,7 +136,7 @@ fn bench_clone_hierarchy<B: Bundle + Default + GetTypeRegistration>(
|
|||||||
reflection_cloner::<B>(&mut world, true)
|
reflection_cloner::<B>(&mut world, true)
|
||||||
} else {
|
} else {
|
||||||
let mut builder = EntityCloner::build(&mut world);
|
let mut builder = EntityCloner::build(&mut world);
|
||||||
builder.recursive(true);
|
builder.linked_cloning(true);
|
||||||
builder.finish()
|
builder.finish()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -153,9 +153,9 @@ fn bench_clone_hierarchy<B: Bundle + Default + GetTypeRegistration>(
|
|||||||
|
|
||||||
hierarchy_level.clear();
|
hierarchy_level.clear();
|
||||||
|
|
||||||
for parent_id in current_hierarchy_level {
|
for parent in current_hierarchy_level {
|
||||||
for _ in 0..children {
|
for _ in 0..children {
|
||||||
let child_id = world.spawn((B::default(), ChildOf(parent_id))).id();
|
let child_id = world.spawn((B::default(), ChildOf(parent))).id();
|
||||||
hierarchy_level.push(child_id);
|
hierarchy_level.push(child_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,15 +19,15 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Position(Vec3::X),
|
Position(Vec3::X),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Velocity(Vec3::X),
|
Velocity(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query::<(&Velocity, &mut Position)>();
|
let query = world.query::<(&Velocity, &mut Position)>();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -19,15 +19,15 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Position(Vec3::X),
|
Position(Vec3::X),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Velocity(Vec3::X),
|
Velocity(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query::<(&Velocity, &mut Position)>();
|
let query = world.query::<(&Velocity, &mut Position)>();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -21,15 +21,15 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Position(Vec3::X),
|
Position(Vec3::X),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Velocity(Vec3::X),
|
Velocity(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query::<(&Velocity, &mut Position)>();
|
let query = world.query::<(&Velocity, &mut Position)>();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -33,8 +33,8 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Position::<0>(Vec3::X),
|
Position::<0>(Vec3::X),
|
||||||
@ -47,9 +47,9 @@ impl<'w> Benchmark<'w> {
|
|||||||
Velocity::<3>(Vec3::X),
|
Velocity::<3>(Vec3::X),
|
||||||
Position::<4>(Vec3::X),
|
Position::<4>(Vec3::X),
|
||||||
Velocity::<4>(Vec3::X),
|
Velocity::<4>(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query();
|
let query = world.query();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -35,8 +35,8 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Position::<0>(Vec3::X),
|
Position::<0>(Vec3::X),
|
||||||
@ -49,9 +49,9 @@ impl<'w> Benchmark<'w> {
|
|||||||
Velocity::<3>(Vec3::X),
|
Velocity::<3>(Vec3::X),
|
||||||
Position::<4>(Vec3::X),
|
Position::<4>(Vec3::X),
|
||||||
Velocity::<4>(Vec3::X),
|
Velocity::<4>(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query();
|
let query = world.query();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -21,15 +21,15 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Position(Vec3::X),
|
Position(Vec3::X),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Velocity(Vec3::X),
|
Velocity(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query::<(&Velocity, &mut Position)>();
|
let query = world.query::<(&Velocity, &mut Position)>();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -19,15 +19,15 @@ impl Benchmark {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Position(Vec3::X),
|
Position(Vec3::X),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Velocity(Vec3::X),
|
Velocity(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
fn query_system(mut query: Query<(&Velocity, &mut Position)>) {
|
fn query_system(mut query: Query<(&Velocity, &mut Position)>) {
|
||||||
for (velocity, mut position) in &mut query {
|
for (velocity, mut position) in &mut query {
|
||||||
|
|||||||
@ -33,8 +33,8 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Position::<0>(Vec3::X),
|
Position::<0>(Vec3::X),
|
||||||
@ -47,9 +47,9 @@ impl<'w> Benchmark<'w> {
|
|||||||
Velocity::<3>(Vec3::X),
|
Velocity::<3>(Vec3::X),
|
||||||
Position::<4>(Vec3::X),
|
Position::<4>(Vec3::X),
|
||||||
Velocity::<4>(Vec3::X),
|
Velocity::<4>(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query();
|
let query = world.query();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -35,8 +35,8 @@ impl<'w> Benchmark<'w> {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
world.spawn_batch(
|
world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Position::<0>(Vec3::X),
|
Position::<0>(Vec3::X),
|
||||||
@ -49,9 +49,9 @@ impl<'w> Benchmark<'w> {
|
|||||||
Velocity::<3>(Vec3::X),
|
Velocity::<3>(Vec3::X),
|
||||||
Position::<4>(Vec3::X),
|
Position::<4>(Vec3::X),
|
||||||
Velocity::<4>(Vec3::X),
|
Velocity::<4>(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(10_000),
|
10_000,
|
||||||
);
|
));
|
||||||
|
|
||||||
let query = world.query();
|
let query = world.query();
|
||||||
Self(world, query)
|
Self(world, query)
|
||||||
|
|||||||
@ -30,15 +30,15 @@ impl<'w> Benchmark<'w> {
|
|||||||
|
|
||||||
let mut world = World::new();
|
let mut world = World::new();
|
||||||
|
|
||||||
let iter = world.spawn_batch(
|
let iter = world.spawn_batch(core::iter::repeat_n(
|
||||||
core::iter::repeat((
|
(
|
||||||
Transform(Mat4::from_scale(Vec3::ONE)),
|
Transform(Mat4::from_scale(Vec3::ONE)),
|
||||||
Position(Vec3::X),
|
Position(Vec3::X),
|
||||||
Rotation(Vec3::X),
|
Rotation(Vec3::X),
|
||||||
Velocity(Vec3::X),
|
Velocity(Vec3::X),
|
||||||
))
|
),
|
||||||
.take(100_000),
|
100_000,
|
||||||
);
|
));
|
||||||
let entities = iter.into_iter().collect::<Vec<Entity>>();
|
let entities = iter.into_iter().collect::<Vec<Entity>>();
|
||||||
for i in 0..fragment {
|
for i in 0..fragment {
|
||||||
let mut e = world.entity_mut(entities[i as usize]);
|
let mut e = world.entity_mut(entities[i as usize]);
|
||||||
|
|||||||
@ -1,6 +1,10 @@
|
|||||||
use core::hint::black_box;
|
use core::hint::black_box;
|
||||||
|
|
||||||
use bevy_ecs::{entity::Entity, event::Event, observer::Trigger, world::World};
|
use bevy_ecs::{
|
||||||
|
event::Event,
|
||||||
|
observer::{Trigger, TriggerTargets},
|
||||||
|
world::World,
|
||||||
|
};
|
||||||
|
|
||||||
use criterion::Criterion;
|
use criterion::Criterion;
|
||||||
use rand::{prelude::SliceRandom, SeedableRng};
|
use rand::{prelude::SliceRandom, SeedableRng};
|
||||||
@ -46,6 +50,6 @@ fn empty_listener_base(trigger: Trigger<EventBase>) {
|
|||||||
black_box(trigger);
|
black_box(trigger);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn send_base_event(world: &mut World, entities: &Vec<Entity>) {
|
fn send_base_event(world: &mut World, entities: impl TriggerTargets) {
|
||||||
world.trigger_targets(EventBase, entities);
|
world.trigger_targets(EventBase, entities);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -79,7 +79,7 @@ pub fn build_schedule(criterion: &mut Criterion) {
|
|||||||
// Benchmark graphs of different sizes.
|
// Benchmark graphs of different sizes.
|
||||||
for graph_size in [100, 500, 1000] {
|
for graph_size in [100, 500, 1000] {
|
||||||
// Basic benchmark without constraints.
|
// Basic benchmark without constraints.
|
||||||
group.bench_function(format!("{graph_size}_schedule_noconstraints"), |bencher| {
|
group.bench_function(format!("{graph_size}_schedule_no_constraints"), |bencher| {
|
||||||
bencher.iter(|| {
|
bencher.iter(|| {
|
||||||
let mut app = App::new();
|
let mut app = App::new();
|
||||||
for _ in 0..graph_size {
|
for _ in 0..graph_size {
|
||||||
|
|||||||
@ -106,6 +106,10 @@ pub fn insert_commands(criterion: &mut Criterion) {
|
|||||||
for entity in &entities {
|
for entity in &entities {
|
||||||
values.push((*entity, (Matrix::default(), Vec3::default())));
|
values.push((*entity, (Matrix::default(), Vec3::default())));
|
||||||
}
|
}
|
||||||
|
#[expect(
|
||||||
|
deprecated,
|
||||||
|
reason = "This needs to be supported for now, and therefore still needs the benchmark."
|
||||||
|
)]
|
||||||
commands.insert_or_spawn_batch(values);
|
commands.insert_or_spawn_batch(values);
|
||||||
command_queue.apply(&mut world);
|
command_queue.apply(&mut world);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use bevy_ecs::prelude::*;
|
use bevy_ecs::prelude::*;
|
||||||
use criterion::Criterion;
|
use criterion::{BatchSize, Criterion};
|
||||||
use glam::*;
|
use glam::*;
|
||||||
|
|
||||||
#[derive(Component)]
|
#[derive(Component)]
|
||||||
@ -13,18 +13,23 @@ pub fn world_despawn(criterion: &mut Criterion) {
|
|||||||
group.measurement_time(core::time::Duration::from_secs(4));
|
group.measurement_time(core::time::Duration::from_secs(4));
|
||||||
|
|
||||||
for entity_count in (0..5).map(|i| 10_u32.pow(i)) {
|
for entity_count in (0..5).map(|i| 10_u32.pow(i)) {
|
||||||
|
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||||
|
bencher.iter_batched_ref(
|
||||||
|
|| {
|
||||||
let mut world = World::default();
|
let mut world = World::default();
|
||||||
for _ in 0..entity_count {
|
for _ in 0..entity_count {
|
||||||
world.spawn((A(Mat4::default()), B(Vec4::default())));
|
world.spawn((A(Mat4::default()), B(Vec4::default())));
|
||||||
}
|
}
|
||||||
|
|
||||||
let ents = world.iter_entities().map(|e| e.id()).collect::<Vec<_>>();
|
let ents = world.iter_entities().map(|e| e.id()).collect::<Vec<_>>();
|
||||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
(world, ents)
|
||||||
bencher.iter(|| {
|
},
|
||||||
|
|(world, ents)| {
|
||||||
ents.iter().for_each(|e| {
|
ents.iter().for_each(|e| {
|
||||||
world.despawn(*e);
|
world.despawn(*e);
|
||||||
});
|
});
|
||||||
});
|
},
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use bevy_ecs::prelude::*;
|
use bevy_ecs::prelude::*;
|
||||||
use criterion::Criterion;
|
use criterion::{BatchSize, Criterion};
|
||||||
use glam::*;
|
use glam::*;
|
||||||
|
|
||||||
#[derive(Component)]
|
#[derive(Component)]
|
||||||
@ -13,22 +13,30 @@ pub fn world_despawn_recursive(criterion: &mut Criterion) {
|
|||||||
group.measurement_time(core::time::Duration::from_secs(4));
|
group.measurement_time(core::time::Duration::from_secs(4));
|
||||||
|
|
||||||
for entity_count in (0..5).map(|i| 10_u32.pow(i)) {
|
for entity_count in (0..5).map(|i| 10_u32.pow(i)) {
|
||||||
|
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
||||||
|
bencher.iter_batched_ref(
|
||||||
|
|| {
|
||||||
let mut world = World::default();
|
let mut world = World::default();
|
||||||
for _ in 0..entity_count {
|
let parent_ents = (0..entity_count)
|
||||||
|
.map(|_| {
|
||||||
world
|
world
|
||||||
.spawn((A(Mat4::default()), B(Vec4::default())))
|
.spawn((A(Mat4::default()), B(Vec4::default())))
|
||||||
.with_children(|parent| {
|
.with_children(|parent| {
|
||||||
parent.spawn((A(Mat4::default()), B(Vec4::default())));
|
parent.spawn((A(Mat4::default()), B(Vec4::default())));
|
||||||
});
|
})
|
||||||
}
|
.id()
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let ents = world.iter_entities().map(|e| e.id()).collect::<Vec<_>>();
|
(world, parent_ents)
|
||||||
group.bench_function(format!("{}_entities", entity_count), |bencher| {
|
},
|
||||||
bencher.iter(|| {
|
|(world, parent_ents)| {
|
||||||
ents.iter().for_each(|e| {
|
parent_ents.iter().for_each(|e| {
|
||||||
world.entity_mut(*e).despawn();
|
world.despawn(*e);
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
},
|
||||||
|
BatchSize::SmallInput,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use bevy_ecs::entity::{hash_set::EntityHashSet, Entity};
|
use bevy_ecs::entity::{Entity, EntityHashSet};
|
||||||
use criterion::{BenchmarkId, Criterion, Throughput};
|
use criterion::{BenchmarkId, Criterion, Throughput};
|
||||||
use rand::{Rng, SeedableRng};
|
use rand::{Rng, SeedableRng};
|
||||||
use rand_chacha::ChaCha8Rng;
|
use rand_chacha::ChaCha8Rng;
|
||||||
@ -11,16 +11,16 @@ fn make_entity(rng: &mut impl Rng, size: usize) -> Entity {
|
|||||||
// * For ids, half are in [0, size), half are unboundedly larger.
|
// * For ids, half are in [0, size), half are unboundedly larger.
|
||||||
// * For generations, half are in [1, 3), half are unboundedly larger.
|
// * For generations, half are in [1, 3), half are unboundedly larger.
|
||||||
|
|
||||||
let x: f64 = rng.gen();
|
let x: f64 = rng.r#gen();
|
||||||
let id = -(1.0 - x).log2() * (size as f64);
|
let id = -(1.0 - x).log2() * (size as f64);
|
||||||
let x: f64 = rng.gen();
|
let x: f64 = rng.r#gen();
|
||||||
let gen = 1.0 + -(1.0 - x).log2() * 2.0;
|
let generation = 1.0 + -(1.0 - x).log2() * 2.0;
|
||||||
|
|
||||||
// this is not reliable, but we're internal so a hack is ok
|
// this is not reliable, but we're internal so a hack is ok
|
||||||
let bits = ((gen as u64) << 32) | (id as u64);
|
let bits = ((generation as u64) << 32) | (id as u64);
|
||||||
let e = Entity::from_bits(bits);
|
let e = Entity::from_bits(bits);
|
||||||
assert_eq!(e.index(), id as u32);
|
assert_eq!(e.index(), id as u32);
|
||||||
assert_eq!(e.generation(), gen as u32);
|
assert_eq!(e.generation(), generation as u32);
|
||||||
e
|
e
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -8,7 +8,10 @@ use criterion::{
|
|||||||
criterion_group!(benches, segment_ease, curve_position, curve_iter_positions);
|
criterion_group!(benches, segment_ease, curve_position, curve_iter_positions);
|
||||||
|
|
||||||
fn segment_ease(c: &mut Criterion) {
|
fn segment_ease(c: &mut Criterion) {
|
||||||
let segment = black_box(CubicSegment::new_bezier(vec2(0.25, 0.1), vec2(0.25, 1.0)));
|
let segment = black_box(CubicSegment::new_bezier_easing(
|
||||||
|
vec2(0.25, 0.1),
|
||||||
|
vec2(0.25, 1.0),
|
||||||
|
));
|
||||||
|
|
||||||
c.bench_function(bench!("segment_ease"), |b| {
|
c.bench_function(bench!("segment_ease"), |b| {
|
||||||
let mut t = 0;
|
let mut t = 0;
|
||||||
|
|||||||
@ -10,7 +10,7 @@ use criterion::{
|
|||||||
criterion_group!(
|
criterion_group!(
|
||||||
benches,
|
benches,
|
||||||
concrete_list_apply,
|
concrete_list_apply,
|
||||||
concrete_list_clone_dynamic,
|
concrete_list_to_dynamic_list,
|
||||||
dynamic_list_apply,
|
dynamic_list_apply,
|
||||||
dynamic_list_push
|
dynamic_list_push
|
||||||
);
|
);
|
||||||
@ -75,26 +75,26 @@ fn concrete_list_apply(criterion: &mut Criterion) {
|
|||||||
let mut group = create_group(criterion, bench!("concrete_list_apply"));
|
let mut group = create_group(criterion, bench!("concrete_list_apply"));
|
||||||
|
|
||||||
let empty_base = |_: usize| Vec::<u64>::new;
|
let empty_base = |_: usize| Vec::<u64>::new;
|
||||||
let full_base = |size: usize| move || iter::repeat(0).take(size).collect::<Vec<u64>>();
|
let full_base = |size: usize| move || iter::repeat_n(0, size).collect::<Vec<u64>>();
|
||||||
let patch = |size: usize| iter::repeat(1).take(size).collect::<Vec<u64>>();
|
let patch = |size: usize| iter::repeat_n(1, size).collect::<Vec<u64>>();
|
||||||
|
|
||||||
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);
|
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);
|
||||||
|
|
||||||
list_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
list_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
||||||
patch(size).clone_dynamic()
|
patch(size).to_dynamic_list()
|
||||||
});
|
});
|
||||||
|
|
||||||
list_apply(&mut group, "same_len_concrete_patch", full_base, patch);
|
list_apply(&mut group, "same_len_concrete_patch", full_base, patch);
|
||||||
|
|
||||||
list_apply(&mut group, "same_len_dynamic_patch", full_base, |size| {
|
list_apply(&mut group, "same_len_dynamic_patch", full_base, |size| {
|
||||||
patch(size).clone_dynamic()
|
patch(size).to_dynamic_list()
|
||||||
});
|
});
|
||||||
|
|
||||||
group.finish();
|
group.finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn concrete_list_clone_dynamic(criterion: &mut Criterion) {
|
fn concrete_list_to_dynamic_list(criterion: &mut Criterion) {
|
||||||
let mut group = create_group(criterion, bench!("concrete_list_clone_dynamic"));
|
let mut group = create_group(criterion, bench!("concrete_list_to_dynamic_list"));
|
||||||
|
|
||||||
for size in SIZES {
|
for size in SIZES {
|
||||||
group.throughput(Throughput::Elements(size as u64));
|
group.throughput(Throughput::Elements(size as u64));
|
||||||
@ -103,9 +103,9 @@ fn concrete_list_clone_dynamic(criterion: &mut Criterion) {
|
|||||||
BenchmarkId::from_parameter(size),
|
BenchmarkId::from_parameter(size),
|
||||||
&size,
|
&size,
|
||||||
|bencher, &size| {
|
|bencher, &size| {
|
||||||
let v = iter::repeat(0).take(size).collect::<Vec<_>>();
|
let v = iter::repeat_n(0, size).collect::<Vec<_>>();
|
||||||
|
|
||||||
bencher.iter(|| black_box(&v).clone_dynamic());
|
bencher.iter(|| black_box(&v).to_dynamic_list());
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -123,11 +123,11 @@ fn dynamic_list_push(criterion: &mut Criterion) {
|
|||||||
BenchmarkId::from_parameter(size),
|
BenchmarkId::from_parameter(size),
|
||||||
&size,
|
&size,
|
||||||
|bencher, &size| {
|
|bencher, &size| {
|
||||||
let src = iter::repeat(()).take(size).collect::<Vec<_>>();
|
let src = iter::repeat_n((), size).collect::<Vec<_>>();
|
||||||
let dst = DynamicList::default();
|
let dst = DynamicList::default();
|
||||||
|
|
||||||
bencher.iter_batched(
|
bencher.iter_batched(
|
||||||
|| (src.clone(), dst.clone_dynamic()),
|
|| (src.clone(), dst.to_dynamic_list()),
|
||||||
|(src, mut dst)| {
|
|(src, mut dst)| {
|
||||||
for item in src {
|
for item in src {
|
||||||
dst.push(item);
|
dst.push(item);
|
||||||
@ -145,20 +145,20 @@ fn dynamic_list_push(criterion: &mut Criterion) {
|
|||||||
fn dynamic_list_apply(criterion: &mut Criterion) {
|
fn dynamic_list_apply(criterion: &mut Criterion) {
|
||||||
let mut group = create_group(criterion, bench!("dynamic_list_apply"));
|
let mut group = create_group(criterion, bench!("dynamic_list_apply"));
|
||||||
|
|
||||||
let empty_base = |_: usize| || Vec::<u64>::new().clone_dynamic();
|
let empty_base = |_: usize| || Vec::<u64>::new().to_dynamic_list();
|
||||||
let full_base = |size: usize| move || iter::repeat(0).take(size).collect::<Vec<u64>>();
|
let full_base = |size: usize| move || iter::repeat_n(0, size).collect::<Vec<u64>>();
|
||||||
let patch = |size: usize| iter::repeat(1).take(size).collect::<Vec<u64>>();
|
let patch = |size: usize| iter::repeat_n(1, size).collect::<Vec<u64>>();
|
||||||
|
|
||||||
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);
|
list_apply(&mut group, "empty_base_concrete_patch", empty_base, patch);
|
||||||
|
|
||||||
list_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
list_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
||||||
patch(size).clone_dynamic()
|
patch(size).to_dynamic_list()
|
||||||
});
|
});
|
||||||
|
|
||||||
list_apply(&mut group, "same_len_concrete_patch", full_base, patch);
|
list_apply(&mut group, "same_len_concrete_patch", full_base, patch);
|
||||||
|
|
||||||
list_apply(&mut group, "same_len_dynamic_patch", full_base, |size| {
|
list_apply(&mut group, "same_len_dynamic_patch", full_base, |size| {
|
||||||
patch(size).clone_dynamic()
|
patch(size).to_dynamic_list()
|
||||||
});
|
});
|
||||||
|
|
||||||
group.finish();
|
group.finish();
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
use core::{fmt::Write, hint::black_box, iter, time::Duration};
|
use core::{fmt::Write, hint::black_box, iter, time::Duration};
|
||||||
|
|
||||||
use benches::bench;
|
use benches::bench;
|
||||||
use bevy_platform_support::collections::HashMap;
|
use bevy_platform::collections::HashMap;
|
||||||
use bevy_reflect::{DynamicMap, Map};
|
use bevy_reflect::{DynamicMap, Map};
|
||||||
use criterion::{
|
use criterion::{
|
||||||
criterion_group, measurement::Measurement, AxisScale, BatchSize, BenchmarkGroup, BenchmarkId,
|
criterion_group, measurement::Measurement, AxisScale, BatchSize, BenchmarkGroup, BenchmarkId,
|
||||||
@ -108,7 +108,7 @@ fn concrete_map_apply(criterion: &mut Criterion) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
map_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
map_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
||||||
key_range_patch(size).clone_dynamic()
|
key_range_patch(size).to_dynamic_map()
|
||||||
});
|
});
|
||||||
|
|
||||||
map_apply(
|
map_apply(
|
||||||
@ -122,7 +122,7 @@ fn concrete_map_apply(criterion: &mut Criterion) {
|
|||||||
&mut group,
|
&mut group,
|
||||||
"same_keys_dynamic_patch",
|
"same_keys_dynamic_patch",
|
||||||
key_range_base,
|
key_range_base,
|
||||||
|size| key_range_patch(size).clone_dynamic(),
|
|size| key_range_patch(size).to_dynamic_map(),
|
||||||
);
|
);
|
||||||
|
|
||||||
map_apply(
|
map_apply(
|
||||||
@ -136,7 +136,7 @@ fn concrete_map_apply(criterion: &mut Criterion) {
|
|||||||
&mut group,
|
&mut group,
|
||||||
"disjoint_keys_dynamic_patch",
|
"disjoint_keys_dynamic_patch",
|
||||||
key_range_base,
|
key_range_base,
|
||||||
|size| disjoint_patch(size).clone_dynamic(),
|
|size| disjoint_patch(size).to_dynamic_map(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -145,7 +145,7 @@ fn u64_to_n_byte_key(k: u64, n: usize) -> String {
|
|||||||
write!(&mut key, "{}", k).unwrap();
|
write!(&mut key, "{}", k).unwrap();
|
||||||
|
|
||||||
// Pad key to n bytes.
|
// Pad key to n bytes.
|
||||||
key.extend(iter::repeat('\0').take(n - key.len()));
|
key.extend(iter::repeat_n('\0', n - key.len()));
|
||||||
key
|
key
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ fn dynamic_map_apply(criterion: &mut Criterion) {
|
|||||||
(0..size as u64)
|
(0..size as u64)
|
||||||
.zip(iter::repeat(0))
|
.zip(iter::repeat(0))
|
||||||
.collect::<HashMap<u64, u64>>()
|
.collect::<HashMap<u64, u64>>()
|
||||||
.clone_dynamic()
|
.to_dynamic_map()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -183,7 +183,7 @@ fn dynamic_map_apply(criterion: &mut Criterion) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
map_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
map_apply(&mut group, "empty_base_dynamic_patch", empty_base, |size| {
|
||||||
key_range_patch(size).clone_dynamic()
|
key_range_patch(size).to_dynamic_map()
|
||||||
});
|
});
|
||||||
|
|
||||||
map_apply(
|
map_apply(
|
||||||
@ -197,7 +197,7 @@ fn dynamic_map_apply(criterion: &mut Criterion) {
|
|||||||
&mut group,
|
&mut group,
|
||||||
"same_keys_dynamic_patch",
|
"same_keys_dynamic_patch",
|
||||||
key_range_base,
|
key_range_base,
|
||||||
|size| key_range_patch(size).clone_dynamic(),
|
|size| key_range_patch(size).to_dynamic_map(),
|
||||||
);
|
);
|
||||||
|
|
||||||
map_apply(
|
map_apply(
|
||||||
@ -211,7 +211,7 @@ fn dynamic_map_apply(criterion: &mut Criterion) {
|
|||||||
&mut group,
|
&mut group,
|
||||||
"disjoint_keys_dynamic_patch",
|
"disjoint_keys_dynamic_patch",
|
||||||
key_range_base,
|
key_range_base,
|
||||||
|size| disjoint_patch(size).clone_dynamic(),
|
|size| disjoint_patch(size).to_dynamic_map(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -12,8 +12,8 @@ criterion_group!(
|
|||||||
concrete_struct_apply,
|
concrete_struct_apply,
|
||||||
concrete_struct_field,
|
concrete_struct_field,
|
||||||
concrete_struct_type_info,
|
concrete_struct_type_info,
|
||||||
concrete_struct_clone,
|
concrete_struct_to_dynamic_struct,
|
||||||
dynamic_struct_clone,
|
dynamic_struct_to_dynamic_struct,
|
||||||
dynamic_struct_apply,
|
dynamic_struct_apply,
|
||||||
dynamic_struct_get_field,
|
dynamic_struct_get_field,
|
||||||
dynamic_struct_insert,
|
dynamic_struct_insert,
|
||||||
@ -113,7 +113,7 @@ fn concrete_struct_apply(criterion: &mut Criterion) {
|
|||||||
bencher.iter_batched(
|
bencher.iter_batched(
|
||||||
|| {
|
|| {
|
||||||
let (obj, _) = input();
|
let (obj, _) = input();
|
||||||
let patch = obj.clone_dynamic();
|
let patch = obj.to_dynamic_struct();
|
||||||
(obj, patch)
|
(obj, patch)
|
||||||
},
|
},
|
||||||
|(mut obj, patch)| obj.apply(black_box(&patch)),
|
|(mut obj, patch)| obj.apply(black_box(&patch)),
|
||||||
@ -170,8 +170,8 @@ fn concrete_struct_type_info(criterion: &mut Criterion) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn concrete_struct_clone(criterion: &mut Criterion) {
|
fn concrete_struct_to_dynamic_struct(criterion: &mut Criterion) {
|
||||||
let mut group = create_group(criterion, bench!("concrete_struct_clone"));
|
let mut group = create_group(criterion, bench!("concrete_struct_to_dynamic_struct"));
|
||||||
|
|
||||||
let structs: [(Box<dyn Struct>, Box<dyn Struct>); 5] = [
|
let structs: [(Box<dyn Struct>, Box<dyn Struct>); 5] = [
|
||||||
(
|
(
|
||||||
@ -203,28 +203,28 @@ fn concrete_struct_clone(criterion: &mut Criterion) {
|
|||||||
BenchmarkId::new("NonGeneric", field_count),
|
BenchmarkId::new("NonGeneric", field_count),
|
||||||
&standard,
|
&standard,
|
||||||
|bencher, s| {
|
|bencher, s| {
|
||||||
bencher.iter(|| s.clone_dynamic());
|
bencher.iter(|| s.to_dynamic_struct());
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
group.bench_with_input(
|
group.bench_with_input(
|
||||||
BenchmarkId::new("Generic", field_count),
|
BenchmarkId::new("Generic", field_count),
|
||||||
&generic,
|
&generic,
|
||||||
|bencher, s| {
|
|bencher, s| {
|
||||||
bencher.iter(|| s.clone_dynamic());
|
bencher.iter(|| s.to_dynamic_struct());
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dynamic_struct_clone(criterion: &mut Criterion) {
|
fn dynamic_struct_to_dynamic_struct(criterion: &mut Criterion) {
|
||||||
let mut group = create_group(criterion, bench!("dynamic_struct_clone"));
|
let mut group = create_group(criterion, bench!("dynamic_struct_to_dynamic_struct"));
|
||||||
|
|
||||||
let structs: [Box<dyn Struct>; 5] = [
|
let structs: [Box<dyn Struct>; 5] = [
|
||||||
Box::new(Struct1::default().clone_dynamic()),
|
Box::new(Struct1::default().to_dynamic_struct()),
|
||||||
Box::new(Struct16::default().clone_dynamic()),
|
Box::new(Struct16::default().to_dynamic_struct()),
|
||||||
Box::new(Struct32::default().clone_dynamic()),
|
Box::new(Struct32::default().to_dynamic_struct()),
|
||||||
Box::new(Struct64::default().clone_dynamic()),
|
Box::new(Struct64::default().to_dynamic_struct()),
|
||||||
Box::new(Struct128::default().clone_dynamic()),
|
Box::new(Struct128::default().to_dynamic_struct()),
|
||||||
];
|
];
|
||||||
|
|
||||||
for s in structs {
|
for s in structs {
|
||||||
@ -234,7 +234,7 @@ fn dynamic_struct_clone(criterion: &mut Criterion) {
|
|||||||
BenchmarkId::from_parameter(field_count),
|
BenchmarkId::from_parameter(field_count),
|
||||||
&s,
|
&s,
|
||||||
|bencher, s| {
|
|bencher, s| {
|
||||||
bencher.iter(|| s.clone_dynamic());
|
bencher.iter(|| s.to_dynamic_struct());
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -265,7 +265,7 @@ fn dynamic_struct_apply(criterion: &mut Criterion) {
|
|||||||
&patch,
|
&patch,
|
||||||
|bencher, patch| {
|
|bencher, patch| {
|
||||||
bencher.iter_batched(
|
bencher.iter_batched(
|
||||||
|| (base.clone_dynamic(), patch()),
|
|| (base.to_dynamic_struct(), patch()),
|
||||||
|(mut base, patch)| base.apply(black_box(&*patch)),
|
|(mut base, patch)| base.apply(black_box(&*patch)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
);
|
);
|
||||||
@ -289,7 +289,7 @@ fn dynamic_struct_apply(criterion: &mut Criterion) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bencher.iter_batched(
|
bencher.iter_batched(
|
||||||
|| base.clone_dynamic(),
|
|| base.to_dynamic_struct(),
|
||||||
|mut base| base.apply(black_box(&patch)),
|
|mut base| base.apply(black_box(&patch)),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
);
|
);
|
||||||
@ -315,7 +315,7 @@ fn dynamic_struct_insert(criterion: &mut Criterion) {
|
|||||||
|
|
||||||
let field = format!("field_{}", field_count);
|
let field = format!("field_{}", field_count);
|
||||||
bencher.iter_batched(
|
bencher.iter_batched(
|
||||||
|| s.clone_dynamic(),
|
|| s.to_dynamic_struct(),
|
||||||
|mut s| {
|
|mut s| {
|
||||||
s.insert(black_box(&field), ());
|
s.insert(black_box(&field), ());
|
||||||
},
|
},
|
||||||
|
|||||||
96
benches/benches/bevy_render/compute_normals.rs
Normal file
96
benches/benches/bevy_render/compute_normals.rs
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
use core::hint::black_box;
|
||||||
|
|
||||||
|
use criterion::{criterion_group, Criterion};
|
||||||
|
use rand::random;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
|
use bevy_render::{
|
||||||
|
mesh::{Indices, Mesh, PrimitiveTopology},
|
||||||
|
render_asset::RenderAssetUsages,
|
||||||
|
};
|
||||||
|
|
||||||
|
const GRID_SIZE: usize = 256;
|
||||||
|
|
||||||
|
fn compute_normals(c: &mut Criterion) {
|
||||||
|
let indices = Indices::U32(
|
||||||
|
(0..GRID_SIZE - 1)
|
||||||
|
.flat_map(|i| std::iter::repeat(i).zip(0..GRID_SIZE - 1))
|
||||||
|
.flat_map(|(i, j)| {
|
||||||
|
let tl = ((GRID_SIZE * j) + i) as u32;
|
||||||
|
let tr = tl + 1;
|
||||||
|
let bl = ((GRID_SIZE * (j + 1)) + i) as u32;
|
||||||
|
let br = bl + 1;
|
||||||
|
[tl, bl, tr, tr, bl, br]
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let new_mesh = || {
|
||||||
|
let positions = (0..GRID_SIZE)
|
||||||
|
.flat_map(|i| std::iter::repeat(i).zip(0..GRID_SIZE))
|
||||||
|
.map(|(i, j)| [i as f32, j as f32, random::<f32>()])
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
Mesh::new(
|
||||||
|
PrimitiveTopology::TriangleList,
|
||||||
|
RenderAssetUsages::MAIN_WORLD,
|
||||||
|
)
|
||||||
|
.with_inserted_attribute(Mesh::ATTRIBUTE_POSITION, positions)
|
||||||
|
.with_inserted_indices(indices.clone())
|
||||||
|
};
|
||||||
|
|
||||||
|
c.bench_function("smooth_normals", |b| {
|
||||||
|
b.iter_custom(|iters| {
|
||||||
|
let mut total = Duration::default();
|
||||||
|
for _ in 0..iters {
|
||||||
|
let mut mesh = new_mesh();
|
||||||
|
black_box(mesh.attribute(Mesh::ATTRIBUTE_NORMAL));
|
||||||
|
let start = Instant::now();
|
||||||
|
mesh.compute_smooth_normals();
|
||||||
|
let end = Instant::now();
|
||||||
|
black_box(mesh.attribute(Mesh::ATTRIBUTE_NORMAL));
|
||||||
|
total += end.duration_since(start);
|
||||||
|
}
|
||||||
|
total
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
c.bench_function("face_weighted_normals", |b| {
|
||||||
|
b.iter_custom(|iters| {
|
||||||
|
let mut total = Duration::default();
|
||||||
|
for _ in 0..iters {
|
||||||
|
let mut mesh = new_mesh();
|
||||||
|
black_box(mesh.attribute(Mesh::ATTRIBUTE_NORMAL));
|
||||||
|
let start = Instant::now();
|
||||||
|
mesh.compute_smooth_normals();
|
||||||
|
let end = Instant::now();
|
||||||
|
black_box(mesh.attribute(Mesh::ATTRIBUTE_NORMAL));
|
||||||
|
total += end.duration_since(start);
|
||||||
|
}
|
||||||
|
total
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
let new_mesh = || {
|
||||||
|
new_mesh()
|
||||||
|
.with_duplicated_vertices()
|
||||||
|
.with_computed_flat_normals()
|
||||||
|
};
|
||||||
|
|
||||||
|
c.bench_function("flat_normals", |b| {
|
||||||
|
b.iter_custom(|iters| {
|
||||||
|
let mut total = Duration::default();
|
||||||
|
for _ in 0..iters {
|
||||||
|
let mut mesh = new_mesh();
|
||||||
|
black_box(mesh.attribute(Mesh::ATTRIBUTE_NORMAL));
|
||||||
|
let start = Instant::now();
|
||||||
|
mesh.compute_flat_normals();
|
||||||
|
let end = Instant::now();
|
||||||
|
black_box(mesh.attribute(Mesh::ATTRIBUTE_NORMAL));
|
||||||
|
total += end.duration_since(start);
|
||||||
|
}
|
||||||
|
total
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(benches, compute_normals);
|
||||||
@ -1,6 +1,11 @@
|
|||||||
use criterion::criterion_main;
|
use criterion::criterion_main;
|
||||||
|
|
||||||
|
mod compute_normals;
|
||||||
mod render_layers;
|
mod render_layers;
|
||||||
mod torus;
|
mod torus;
|
||||||
|
|
||||||
criterion_main!(render_layers::benches, torus::benches);
|
criterion_main!(
|
||||||
|
render_layers::benches,
|
||||||
|
compute_normals::benches,
|
||||||
|
torus::benches
|
||||||
|
);
|
||||||
|
|||||||
@ -43,3 +43,6 @@ disallowed-methods = [
|
|||||||
{ path = "f32::atanh", reason = "use bevy_math::ops::atanh instead for libm determinism" },
|
{ path = "f32::atanh", reason = "use bevy_math::ops::atanh instead for libm determinism" },
|
||||||
{ path = "criterion::black_box", reason = "use core::hint::black_box instead" },
|
{ path = "criterion::black_box", reason = "use core::hint::black_box instead" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Require `bevy_ecs::children!` to use `[]` braces, instead of `()` or `{}`.
|
||||||
|
standard-macro-braces = [{ name = "children", brace = "[" }]
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bevy_a11y"
|
name = "bevy_a11y"
|
||||||
version = "0.16.0-dev"
|
version = "0.16.0-dev"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
description = "Provides accessibility support for Bevy Engine"
|
description = "Provides accessibility support for Bevy Engine"
|
||||||
homepage = "https://bevyengine.org"
|
homepage = "https://bevyengine.org"
|
||||||
repository = "https://github.com/bevyengine/bevy"
|
repository = "https://github.com/bevyengine/bevy"
|
||||||
@ -18,28 +18,17 @@ bevy_reflect = [
|
|||||||
"dep:bevy_reflect",
|
"dep:bevy_reflect",
|
||||||
"bevy_app/bevy_reflect",
|
"bevy_app/bevy_reflect",
|
||||||
"bevy_ecs/bevy_reflect",
|
"bevy_ecs/bevy_reflect",
|
||||||
"bevy_input_focus/bevy_reflect",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
## Adds serialization support through `serde`.
|
## Adds serialization support through `serde`.
|
||||||
serialize = [
|
serialize = ["dep:serde", "bevy_ecs/serialize", "accesskit/serde"]
|
||||||
"dep:serde",
|
|
||||||
"bevy_ecs/serialize",
|
|
||||||
"bevy_input_focus/serialize",
|
|
||||||
"accesskit/serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Platform Compatibility
|
# Platform Compatibility
|
||||||
|
|
||||||
## Allows access to the `std` crate. Enabling this feature will prevent compilation
|
## Allows access to the `std` crate. Enabling this feature will prevent compilation
|
||||||
## on `no_std` targets, but provides access to certain additional features on
|
## on `no_std` targets, but provides access to certain additional features on
|
||||||
## supported platforms.
|
## supported platforms.
|
||||||
std = [
|
std = ["bevy_app/std", "bevy_ecs/std", "bevy_reflect/std"]
|
||||||
"bevy_app/std",
|
|
||||||
"bevy_ecs/std",
|
|
||||||
"bevy_reflect/std",
|
|
||||||
"bevy_input_focus/std",
|
|
||||||
]
|
|
||||||
|
|
||||||
## `critical-section` provides the building blocks for synchronization primitives
|
## `critical-section` provides the building blocks for synchronization primitives
|
||||||
## on all platforms, including `no_std`.
|
## on all platforms, including `no_std`.
|
||||||
@ -47,31 +36,17 @@ critical-section = [
|
|||||||
"bevy_app/critical-section",
|
"bevy_app/critical-section",
|
||||||
"bevy_ecs/critical-section",
|
"bevy_ecs/critical-section",
|
||||||
"bevy_reflect?/critical-section",
|
"bevy_reflect?/critical-section",
|
||||||
"bevy_input_focus/critical-section",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
## `portable-atomic` provides additional platform support for atomic types and
|
|
||||||
## operations, even on targets without native support.
|
|
||||||
portable-atomic = [
|
|
||||||
"bevy_app/portable-atomic",
|
|
||||||
"bevy_ecs/portable-atomic",
|
|
||||||
"bevy_reflect?/portable-atomic",
|
|
||||||
"bevy_input_focus/portable-atomic",
|
|
||||||
]
|
|
||||||
|
|
||||||
## Uses the `libm` maths library instead of the one provided in `std` and `core`.
|
|
||||||
libm = ["bevy_input_focus/libm"]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# bevy
|
# bevy
|
||||||
bevy_app = { path = "../bevy_app", version = "0.16.0-dev", default-features = false }
|
bevy_app = { path = "../bevy_app", version = "0.16.0-dev", default-features = false }
|
||||||
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }
|
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }
|
||||||
bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev", default-features = false }
|
bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev", default-features = false }
|
||||||
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", default-features = false, optional = true }
|
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", default-features = false, optional = true }
|
||||||
bevy_input_focus = { path = "../bevy_input_focus", version = "0.16.0-dev", default-features = false }
|
|
||||||
|
|
||||||
# other
|
# other
|
||||||
accesskit = { version = "0.17", default-features = false }
|
accesskit = { version = "0.18", default-features = false }
|
||||||
serde = { version = "1", default-features = false, features = [
|
serde = { version = "1", default-features = false, features = [
|
||||||
"alloc",
|
"alloc",
|
||||||
], optional = true }
|
], optional = true }
|
||||||
|
|||||||
@ -54,7 +54,11 @@ pub struct ActionRequest(pub accesskit::ActionRequest);
|
|||||||
/// Useful if a third-party plugin needs to conditionally integrate with
|
/// Useful if a third-party plugin needs to conditionally integrate with
|
||||||
/// `AccessKit`
|
/// `AccessKit`
|
||||||
#[derive(Resource, Default, Clone, Debug, Deref, DerefMut)]
|
#[derive(Resource, Default, Clone, Debug, Deref, DerefMut)]
|
||||||
#[cfg_attr(feature = "bevy_reflect", derive(Reflect), reflect(Default, Resource))]
|
#[cfg_attr(
|
||||||
|
feature = "bevy_reflect",
|
||||||
|
derive(Reflect),
|
||||||
|
reflect(Default, Clone, Resource)
|
||||||
|
)]
|
||||||
pub struct AccessibilityRequested(Arc<AtomicBool>);
|
pub struct AccessibilityRequested(Arc<AtomicBool>);
|
||||||
|
|
||||||
impl AccessibilityRequested {
|
impl AccessibilityRequested {
|
||||||
@ -78,7 +82,11 @@ impl AccessibilityRequested {
|
|||||||
/// will generate conflicting updates.
|
/// will generate conflicting updates.
|
||||||
#[derive(Resource, Clone, Debug, Deref, DerefMut)]
|
#[derive(Resource, Clone, Debug, Deref, DerefMut)]
|
||||||
#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "bevy_reflect", derive(Reflect), reflect(Resource))]
|
#[cfg_attr(
|
||||||
|
feature = "bevy_reflect",
|
||||||
|
derive(Reflect),
|
||||||
|
reflect(Resource, Clone, Default)
|
||||||
|
)]
|
||||||
#[cfg_attr(
|
#[cfg_attr(
|
||||||
all(feature = "bevy_reflect", feature = "serialize"),
|
all(feature = "bevy_reflect", feature = "serialize"),
|
||||||
reflect(Serialize, Deserialize)
|
reflect(Serialize, Deserialize)
|
||||||
@ -127,7 +135,7 @@ impl From<Node> for AccessibilityNode {
|
|||||||
#[cfg_attr(feature = "bevy_reflect", derive(Reflect))]
|
#[cfg_attr(feature = "bevy_reflect", derive(Reflect))]
|
||||||
#[cfg_attr(
|
#[cfg_attr(
|
||||||
all(feature = "bevy_reflect", feature = "serialize"),
|
all(feature = "bevy_reflect", feature = "serialize"),
|
||||||
reflect(Serialize, Deserialize)
|
reflect(Serialize, Deserialize, Clone)
|
||||||
)]
|
)]
|
||||||
pub enum AccessibilitySystem {
|
pub enum AccessibilitySystem {
|
||||||
/// Update the accessibility tree
|
/// Update the accessibility tree
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bevy_animation"
|
name = "bevy_animation"
|
||||||
version = "0.16.0-dev"
|
version = "0.16.0-dev"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
description = "Provides animation functionality for Bevy Engine"
|
description = "Provides animation functionality for Bevy Engine"
|
||||||
homepage = "https://bevyengine.org"
|
homepage = "https://bevyengine.org"
|
||||||
repository = "https://github.com/bevyengine/bevy"
|
repository = "https://github.com/bevyengine/bevy"
|
||||||
@ -16,8 +16,8 @@ bevy_color = { path = "../bevy_color", version = "0.16.0-dev" }
|
|||||||
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }
|
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }
|
||||||
bevy_log = { path = "../bevy_log", version = "0.16.0-dev" }
|
bevy_log = { path = "../bevy_log", version = "0.16.0-dev" }
|
||||||
bevy_math = { path = "../bevy_math", version = "0.16.0-dev" }
|
bevy_math = { path = "../bevy_math", version = "0.16.0-dev" }
|
||||||
|
bevy_mesh = { path = "../bevy_mesh", version = "0.16.0-dev" }
|
||||||
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", features = [
|
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", features = [
|
||||||
"bevy",
|
|
||||||
"petgraph",
|
"petgraph",
|
||||||
] }
|
] }
|
||||||
bevy_render = { path = "../bevy_render", version = "0.16.0-dev" }
|
bevy_render = { path = "../bevy_render", version = "0.16.0-dev" }
|
||||||
@ -25,13 +25,13 @@ bevy_time = { path = "../bevy_time", version = "0.16.0-dev" }
|
|||||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev" }
|
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev" }
|
||||||
bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev" }
|
bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev" }
|
||||||
bevy_transform = { path = "../bevy_transform", version = "0.16.0-dev" }
|
bevy_transform = { path = "../bevy_transform", version = "0.16.0-dev" }
|
||||||
bevy_platform_support = { path = "../bevy_platform_support", version = "0.16.0-dev", default-features = false, features = [
|
bevy_platform = { path = "../bevy_platform", version = "0.16.0-dev", default-features = false, features = [
|
||||||
"std",
|
"std",
|
||||||
"serialize",
|
"serialize",
|
||||||
] }
|
] }
|
||||||
|
|
||||||
# other
|
# other
|
||||||
petgraph = { version = "0.6", features = ["serde-1"] }
|
petgraph = { version = "0.7", features = ["serde-1"] }
|
||||||
ron = "0.8"
|
ron = "0.8"
|
||||||
serde = "1"
|
serde = "1"
|
||||||
blake3 = { version = "1.0" }
|
blake3 = { version = "1.0" }
|
||||||
@ -45,6 +45,7 @@ smallvec = "1"
|
|||||||
tracing = { version = "0.1", default-features = false, features = ["std"] }
|
tracing = { version = "0.1", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||||
|
# TODO: Assuming all wasm builds are for the browser. Require `no_std` support to break assumption.
|
||||||
uuid = { version = "1.13.1", default-features = false, features = ["js"] }
|
uuid = { version = "1.13.1", default-features = false, features = ["js"] }
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
|
|||||||
@ -100,43 +100,48 @@ use bevy_math::curve::{
|
|||||||
iterable::IterableCurve,
|
iterable::IterableCurve,
|
||||||
Curve, Interval,
|
Curve, Interval,
|
||||||
};
|
};
|
||||||
use bevy_platform_support::hash::Hashed;
|
use bevy_mesh::morph::MorphWeights;
|
||||||
|
use bevy_platform::hash::Hashed;
|
||||||
use bevy_reflect::{FromReflect, Reflect, Reflectable, TypeInfo, Typed};
|
use bevy_reflect::{FromReflect, Reflect, Reflectable, TypeInfo, Typed};
|
||||||
use bevy_render::mesh::morph::MorphWeights;
|
|
||||||
use downcast_rs::{impl_downcast, Downcast};
|
use downcast_rs::{impl_downcast, Downcast};
|
||||||
|
|
||||||
/// A value on a component that Bevy can animate.
|
/// A trait for exposing a value in an entity so that it can be animated.
|
||||||
///
|
///
|
||||||
/// You can implement this trait on a unit struct in order to support animating
|
/// `AnimatableProperty` allows any value contained in an entity to be animated
|
||||||
/// custom components other than transforms and morph weights. Use that type in
|
/// as long as it can be obtained by mutable reference. This makes it more
|
||||||
/// conjunction with [`AnimatableCurve`] (and perhaps [`AnimatableKeyframeCurve`]
|
/// flexible than [`animated_field`].
|
||||||
/// to define the animation itself).
|
///
|
||||||
/// For example, in order to animate field of view, you might use:
|
/// [`animated_field`]: crate::animated_field
|
||||||
|
///
|
||||||
|
/// Here, `AnimatableProperty` is used to animate a value inside an `Option`,
|
||||||
|
/// returning an error if the option is `None`.
|
||||||
///
|
///
|
||||||
/// # use bevy_animation::{prelude::AnimatableProperty, AnimationEntityMut, AnimationEvaluationError, animation_curves::EvaluatorId};
|
/// # use bevy_animation::{prelude::AnimatableProperty, AnimationEntityMut, AnimationEvaluationError, animation_curves::EvaluatorId};
|
||||||
/// # use bevy_reflect::Reflect;
|
/// # use bevy_ecs::component::Component;
|
||||||
/// # use std::any::TypeId;
|
/// # use std::any::TypeId;
|
||||||
/// # use bevy_render::camera::{Projection, PerspectiveProjection};
|
/// #[derive(Component)]
|
||||||
/// #[derive(Reflect)]
|
/// struct ExampleComponent {
|
||||||
/// struct FieldOfViewProperty;
|
/// power_level: Option<f32>
|
||||||
///
|
|
||||||
/// impl AnimatableProperty for FieldOfViewProperty {
|
|
||||||
/// type Property = f32;
|
|
||||||
/// fn get_mut<'a>(&self, entity: &'a mut AnimationEntityMut) -> Result<&'a mut Self::Property, AnimationEvaluationError> {
|
|
||||||
/// let component = entity
|
|
||||||
/// .get_mut::<Projection>()
|
|
||||||
/// .ok_or(AnimationEvaluationError::ComponentNotPresent(TypeId::of::<
|
|
||||||
/// Projection,
|
|
||||||
/// >(
|
|
||||||
/// )))?
|
|
||||||
/// .into_inner();
|
|
||||||
/// match component {
|
|
||||||
/// Projection::Perspective(perspective) => Ok(&mut perspective.fov),
|
|
||||||
/// _ => Err(AnimationEvaluationError::PropertyNotPresent(TypeId::of::<
|
|
||||||
/// PerspectiveProjection,
|
|
||||||
/// >(
|
|
||||||
/// ))),
|
|
||||||
/// }
|
/// }
|
||||||
|
///
|
||||||
|
/// #[derive(Clone)]
|
||||||
|
/// struct PowerLevelProperty;
|
||||||
|
///
|
||||||
|
/// impl AnimatableProperty for PowerLevelProperty {
|
||||||
|
/// type Property = f32;
|
||||||
|
/// fn get_mut<'a>(
|
||||||
|
/// &self,
|
||||||
|
/// entity: &'a mut AnimationEntityMut
|
||||||
|
/// ) -> Result<&'a mut Self::Property, AnimationEvaluationError> {
|
||||||
|
/// let component = entity
|
||||||
|
/// .get_mut::<ExampleComponent>()
|
||||||
|
/// .ok_or(AnimationEvaluationError::ComponentNotPresent(
|
||||||
|
/// TypeId::of::<ExampleComponent>()
|
||||||
|
/// ))?
|
||||||
|
/// .into_inner();
|
||||||
|
/// component.power_level.as_mut().ok_or(AnimationEvaluationError::PropertyNotPresent(
|
||||||
|
/// TypeId::of::<Option<f32>>()
|
||||||
|
/// ))
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// fn evaluator_id(&self) -> EvaluatorId {
|
/// fn evaluator_id(&self) -> EvaluatorId {
|
||||||
@ -144,58 +149,44 @@ use downcast_rs::{impl_downcast, Downcast};
|
|||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// You can then create an [`AnimationClip`] to animate this property like so:
|
|
||||||
///
|
///
|
||||||
/// # use bevy_animation::{AnimationClip, AnimationTargetId, VariableCurve, AnimationEntityMut, AnimationEvaluationError, animation_curves::EvaluatorId};
|
/// You can then create an [`AnimatableCurve`] to animate this property like so:
|
||||||
|
///
|
||||||
|
/// # use bevy_animation::{VariableCurve, AnimationEntityMut, AnimationEvaluationError, animation_curves::EvaluatorId};
|
||||||
/// # use bevy_animation::prelude::{AnimatableProperty, AnimatableKeyframeCurve, AnimatableCurve};
|
/// # use bevy_animation::prelude::{AnimatableProperty, AnimatableKeyframeCurve, AnimatableCurve};
|
||||||
/// # use bevy_ecs::name::Name;
|
/// # use bevy_ecs::{name::Name, component::Component};
|
||||||
/// # use bevy_reflect::Reflect;
|
|
||||||
/// # use bevy_render::camera::{Projection, PerspectiveProjection};
|
|
||||||
/// # use std::any::TypeId;
|
/// # use std::any::TypeId;
|
||||||
/// # let animation_target_id = AnimationTargetId::from(&Name::new("Test"));
|
/// # #[derive(Component)]
|
||||||
/// # #[derive(Reflect, Clone)]
|
/// # struct ExampleComponent { power_level: Option<f32> }
|
||||||
/// # struct FieldOfViewProperty;
|
/// # #[derive(Clone)]
|
||||||
/// # impl AnimatableProperty for FieldOfViewProperty {
|
/// # struct PowerLevelProperty;
|
||||||
|
/// # impl AnimatableProperty for PowerLevelProperty {
|
||||||
/// # type Property = f32;
|
/// # type Property = f32;
|
||||||
/// # fn get_mut<'a>(&self, entity: &'a mut AnimationEntityMut) -> Result<&'a mut Self::Property, AnimationEvaluationError> {
|
/// # fn get_mut<'a>(
|
||||||
|
/// # &self,
|
||||||
|
/// # entity: &'a mut AnimationEntityMut
|
||||||
|
/// # ) -> Result<&'a mut Self::Property, AnimationEvaluationError> {
|
||||||
/// # let component = entity
|
/// # let component = entity
|
||||||
/// # .get_mut::<Projection>()
|
/// # .get_mut::<ExampleComponent>()
|
||||||
/// # .ok_or(AnimationEvaluationError::ComponentNotPresent(TypeId::of::<
|
/// # .ok_or(AnimationEvaluationError::ComponentNotPresent(
|
||||||
/// # Projection,
|
/// # TypeId::of::<ExampleComponent>()
|
||||||
/// # >(
|
/// # ))?
|
||||||
/// # )))?
|
|
||||||
/// # .into_inner();
|
/// # .into_inner();
|
||||||
/// # match component {
|
/// # component.power_level.as_mut().ok_or(AnimationEvaluationError::PropertyNotPresent(
|
||||||
/// # Projection::Perspective(perspective) => Ok(&mut perspective.fov),
|
/// # TypeId::of::<Option<f32>>()
|
||||||
/// # _ => Err(AnimationEvaluationError::PropertyNotPresent(TypeId::of::<
|
/// # ))
|
||||||
/// # PerspectiveProjection,
|
|
||||||
/// # >(
|
|
||||||
/// # ))),
|
|
||||||
/// # }
|
|
||||||
/// # }
|
/// # }
|
||||||
/// # fn evaluator_id(&self) -> EvaluatorId {
|
/// # fn evaluator_id(&self) -> EvaluatorId {
|
||||||
/// # EvaluatorId::Type(TypeId::of::<Self>())
|
/// # EvaluatorId::Type(TypeId::of::<Self>())
|
||||||
/// # }
|
/// # }
|
||||||
/// # }
|
/// # }
|
||||||
/// let mut animation_clip = AnimationClip::default();
|
|
||||||
/// animation_clip.add_curve_to_target(
|
|
||||||
/// animation_target_id,
|
|
||||||
/// AnimatableCurve::new(
|
/// AnimatableCurve::new(
|
||||||
/// FieldOfViewProperty,
|
/// PowerLevelProperty,
|
||||||
/// AnimatableKeyframeCurve::new([
|
/// AnimatableKeyframeCurve::new([
|
||||||
/// (0.0, core::f32::consts::PI / 4.0),
|
/// (0.0, 0.0),
|
||||||
/// (1.0, core::f32::consts::PI / 3.0),
|
/// (1.0, 9001.0),
|
||||||
/// ]).expect("Failed to create font size curve")
|
/// ]).expect("Failed to create power level curve")
|
||||||
/// )
|
|
||||||
/// );
|
/// );
|
||||||
///
|
|
||||||
/// Here, the use of [`AnimatableKeyframeCurve`] creates a curve out of the given keyframe time-value
|
|
||||||
/// pairs, using the [`Animatable`] implementation of `f32` to interpolate between them. The
|
|
||||||
/// invocation of [`AnimatableCurve::new`] with `FieldOfViewProperty` indicates that the `f32`
|
|
||||||
/// output from that curve is to be used to animate the font size of a `PerspectiveProjection` component (as
|
|
||||||
/// configured above).
|
|
||||||
///
|
|
||||||
/// [`AnimationClip`]: crate::AnimationClip
|
|
||||||
pub trait AnimatableProperty: Send + Sync + 'static {
|
pub trait AnimatableProperty: Send + Sync + 'static {
|
||||||
/// The animated property type.
|
/// The animated property type.
|
||||||
type Property: Animatable;
|
type Property: Animatable;
|
||||||
|
|||||||
@ -111,6 +111,7 @@ impl<T> CubicKeyframeCurve<T> {
|
|||||||
/// A keyframe-defined curve that uses cubic spline interpolation, special-cased for quaternions
|
/// A keyframe-defined curve that uses cubic spline interpolation, special-cased for quaternions
|
||||||
/// since it uses `Vec4` internally.
|
/// since it uses `Vec4` internally.
|
||||||
#[derive(Debug, Clone, Reflect)]
|
#[derive(Debug, Clone, Reflect)]
|
||||||
|
#[reflect(Clone)]
|
||||||
pub struct CubicRotationCurve {
|
pub struct CubicRotationCurve {
|
||||||
// Note: The sample width here should be 3.
|
// Note: The sample width here should be 3.
|
||||||
core: ChunkedUnevenCore<Vec4>,
|
core: ChunkedUnevenCore<Vec4>,
|
||||||
@ -372,8 +373,9 @@ impl<T> WideCubicKeyframeCurve<T> {
|
|||||||
/// recommended to use its implementation of the [`IterableCurve`] trait, which allows iterating
|
/// recommended to use its implementation of the [`IterableCurve`] trait, which allows iterating
|
||||||
/// directly over information derived from the curve without allocating.
|
/// directly over information derived from the curve without allocating.
|
||||||
///
|
///
|
||||||
/// [`MorphWeights`]: bevy_render::prelude::MorphWeights
|
/// [`MorphWeights`]: bevy_mesh::morph::MorphWeights
|
||||||
#[derive(Debug, Clone, Reflect)]
|
#[derive(Debug, Clone, Reflect)]
|
||||||
|
#[reflect(Clone)]
|
||||||
pub enum WeightsCurve {
|
pub enum WeightsCurve {
|
||||||
/// A curve which takes a constant value over its domain. Notably, this is how animations with
|
/// A curve which takes a constant value over its domain. Notably, this is how animations with
|
||||||
/// only a single keyframe are interpreted.
|
/// only a single keyframe are interpreted.
|
||||||
|
|||||||
@ -17,7 +17,7 @@ use bevy_ecs::{
|
|||||||
resource::Resource,
|
resource::Resource,
|
||||||
system::{Res, ResMut},
|
system::{Res, ResMut},
|
||||||
};
|
};
|
||||||
use bevy_platform_support::collections::HashMap;
|
use bevy_platform::collections::HashMap;
|
||||||
use bevy_reflect::{prelude::ReflectDefault, Reflect, ReflectSerialize};
|
use bevy_reflect::{prelude::ReflectDefault, Reflect, ReflectSerialize};
|
||||||
use derive_more::derive::From;
|
use derive_more::derive::From;
|
||||||
use petgraph::{
|
use petgraph::{
|
||||||
@ -108,7 +108,7 @@ use crate::{AnimationClip, AnimationTargetId};
|
|||||||
///
|
///
|
||||||
/// [RFC 51]: https://github.com/bevyengine/rfcs/blob/main/rfcs/51-animation-composition.md
|
/// [RFC 51]: https://github.com/bevyengine/rfcs/blob/main/rfcs/51-animation-composition.md
|
||||||
#[derive(Asset, Reflect, Clone, Debug, Serialize)]
|
#[derive(Asset, Reflect, Clone, Debug, Serialize)]
|
||||||
#[reflect(Serialize, Debug)]
|
#[reflect(Serialize, Debug, Clone)]
|
||||||
#[serde(into = "SerializedAnimationGraph")]
|
#[serde(into = "SerializedAnimationGraph")]
|
||||||
pub struct AnimationGraph {
|
pub struct AnimationGraph {
|
||||||
/// The `petgraph` data structure that defines the animation graph.
|
/// The `petgraph` data structure that defines the animation graph.
|
||||||
@ -131,7 +131,7 @@ pub struct AnimationGraph {
|
|||||||
|
|
||||||
/// A [`Handle`] to the [`AnimationGraph`] to be used by the [`AnimationPlayer`](crate::AnimationPlayer) on the same entity.
|
/// A [`Handle`] to the [`AnimationGraph`] to be used by the [`AnimationPlayer`](crate::AnimationPlayer) on the same entity.
|
||||||
#[derive(Component, Clone, Debug, Default, Deref, DerefMut, Reflect, PartialEq, Eq, From)]
|
#[derive(Component, Clone, Debug, Default, Deref, DerefMut, Reflect, PartialEq, Eq, From)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
pub struct AnimationGraphHandle(pub Handle<AnimationGraph>);
|
pub struct AnimationGraphHandle(pub Handle<AnimationGraph>);
|
||||||
|
|
||||||
impl From<AnimationGraphHandle> for AssetId<AnimationGraph> {
|
impl From<AnimationGraphHandle> for AssetId<AnimationGraph> {
|
||||||
@ -164,6 +164,7 @@ pub type AnimationNodeIndex = NodeIndex<u32>;
|
|||||||
/// of the graph, contain animation clips to play. Blend and add nodes describe
|
/// of the graph, contain animation clips to play. Blend and add nodes describe
|
||||||
/// how to combine their children to produce a final animation.
|
/// how to combine their children to produce a final animation.
|
||||||
#[derive(Clone, Reflect, Debug)]
|
#[derive(Clone, Reflect, Debug)]
|
||||||
|
#[reflect(Clone)]
|
||||||
pub struct AnimationGraphNode {
|
pub struct AnimationGraphNode {
|
||||||
/// Animation node data specific to the type of node (clip, blend, or add).
|
/// Animation node data specific to the type of node (clip, blend, or add).
|
||||||
///
|
///
|
||||||
@ -205,6 +206,7 @@ pub struct AnimationGraphNode {
|
|||||||
/// In the case of clip nodes, this contains the actual animation clip
|
/// In the case of clip nodes, this contains the actual animation clip
|
||||||
/// associated with the node.
|
/// associated with the node.
|
||||||
#[derive(Clone, Default, Reflect, Debug)]
|
#[derive(Clone, Default, Reflect, Debug)]
|
||||||
|
#[reflect(Clone)]
|
||||||
pub enum AnimationNodeType {
|
pub enum AnimationNodeType {
|
||||||
/// A *clip node*, which plays an animation clip.
|
/// A *clip node*, which plays an animation clip.
|
||||||
///
|
///
|
||||||
@ -884,10 +886,10 @@ impl ThreadedAnimationGraph {
|
|||||||
|
|
||||||
self.sorted_edge_ranges.clear();
|
self.sorted_edge_ranges.clear();
|
||||||
self.sorted_edge_ranges
|
self.sorted_edge_ranges
|
||||||
.extend(iter::repeat(0..0).take(node_count));
|
.extend(iter::repeat_n(0..0, node_count));
|
||||||
|
|
||||||
self.computed_masks.clear();
|
self.computed_masks.clear();
|
||||||
self.computed_masks.extend(iter::repeat(0).take(node_count));
|
self.computed_masks.extend(iter::repeat_n(0, node_count));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursively constructs the [`ThreadedAnimationGraph`] for the subtree
|
/// Recursively constructs the [`ThreadedAnimationGraph`] for the subtree
|
||||||
|
|||||||
@ -35,7 +35,7 @@ use bevy_app::{Animation, App, Plugin, PostUpdate};
|
|||||||
use bevy_asset::{Asset, AssetApp, AssetEvents, Assets};
|
use bevy_asset::{Asset, AssetApp, AssetEvents, Assets};
|
||||||
use bevy_ecs::{prelude::*, world::EntityMutExcept};
|
use bevy_ecs::{prelude::*, world::EntityMutExcept};
|
||||||
use bevy_math::FloatOrd;
|
use bevy_math::FloatOrd;
|
||||||
use bevy_platform_support::{collections::HashMap, hash::NoOpHash};
|
use bevy_platform::{collections::HashMap, hash::NoOpHash};
|
||||||
use bevy_reflect::{prelude::ReflectDefault, Reflect, TypePath};
|
use bevy_reflect::{prelude::ReflectDefault, Reflect, TypePath};
|
||||||
use bevy_time::Time;
|
use bevy_time::Time;
|
||||||
use bevy_transform::TransformSystem;
|
use bevy_transform::TransformSystem;
|
||||||
@ -96,23 +96,26 @@ impl VariableCurve {
|
|||||||
/// Because animation clips refer to targets by UUID, they can target any
|
/// Because animation clips refer to targets by UUID, they can target any
|
||||||
/// [`AnimationTarget`] with that ID.
|
/// [`AnimationTarget`] with that ID.
|
||||||
#[derive(Asset, Reflect, Clone, Debug, Default)]
|
#[derive(Asset, Reflect, Clone, Debug, Default)]
|
||||||
|
#[reflect(Clone, Default)]
|
||||||
pub struct AnimationClip {
|
pub struct AnimationClip {
|
||||||
// This field is ignored by reflection because AnimationCurves can contain things that are not reflect-able
|
// This field is ignored by reflection because AnimationCurves can contain things that are not reflect-able
|
||||||
#[reflect(ignore)]
|
#[reflect(ignore, clone)]
|
||||||
curves: AnimationCurves,
|
curves: AnimationCurves,
|
||||||
events: AnimationEvents,
|
events: AnimationEvents,
|
||||||
duration: f32,
|
duration: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Reflect, Debug, Clone)]
|
#[derive(Reflect, Debug, Clone)]
|
||||||
|
#[reflect(Clone)]
|
||||||
struct TimedAnimationEvent {
|
struct TimedAnimationEvent {
|
||||||
time: f32,
|
time: f32,
|
||||||
event: AnimationEvent,
|
event: AnimationEvent,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Reflect, Debug, Clone)]
|
#[derive(Reflect, Debug, Clone)]
|
||||||
|
#[reflect(Clone)]
|
||||||
struct AnimationEvent {
|
struct AnimationEvent {
|
||||||
#[reflect(ignore)]
|
#[reflect(ignore, clone)]
|
||||||
trigger: AnimationEventFn,
|
trigger: AnimationEventFn,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,6 +127,7 @@ impl AnimationEvent {
|
|||||||
|
|
||||||
#[derive(Reflect, Clone)]
|
#[derive(Reflect, Clone)]
|
||||||
#[reflect(opaque)]
|
#[reflect(opaque)]
|
||||||
|
#[reflect(Clone, Default, Debug)]
|
||||||
struct AnimationEventFn(Arc<dyn Fn(&mut Commands, Entity, f32, f32) + Send + Sync>);
|
struct AnimationEventFn(Arc<dyn Fn(&mut Commands, Entity, f32, f32) + Send + Sync>);
|
||||||
|
|
||||||
impl Default for AnimationEventFn {
|
impl Default for AnimationEventFn {
|
||||||
@ -139,6 +143,7 @@ impl Debug for AnimationEventFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Reflect, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]
|
#[derive(Reflect, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]
|
||||||
|
#[reflect(Clone)]
|
||||||
enum AnimationEventTarget {
|
enum AnimationEventTarget {
|
||||||
Root,
|
Root,
|
||||||
Node(AnimationTargetId),
|
Node(AnimationTargetId),
|
||||||
@ -172,6 +177,7 @@ pub type AnimationCurves = HashMap<AnimationTargetId, Vec<VariableCurve>, NoOpHa
|
|||||||
///
|
///
|
||||||
/// [UUID]: https://en.wikipedia.org/wiki/Universally_unique_identifier
|
/// [UUID]: https://en.wikipedia.org/wiki/Universally_unique_identifier
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Reflect, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Reflect, Debug, Serialize, Deserialize)]
|
||||||
|
#[reflect(Clone)]
|
||||||
pub struct AnimationTargetId(pub Uuid);
|
pub struct AnimationTargetId(pub Uuid);
|
||||||
|
|
||||||
impl Hash for AnimationTargetId {
|
impl Hash for AnimationTargetId {
|
||||||
@ -203,7 +209,7 @@ impl Hash for AnimationTargetId {
|
|||||||
/// time. However, you can change [`AnimationTarget`]'s `player` property at
|
/// time. However, you can change [`AnimationTarget`]'s `player` property at
|
||||||
/// runtime to change which player is responsible for animating the entity.
|
/// runtime to change which player is responsible for animating the entity.
|
||||||
#[derive(Clone, Copy, Component, Reflect)]
|
#[derive(Clone, Copy, Component, Reflect)]
|
||||||
#[reflect(Component)]
|
#[reflect(Component, Clone)]
|
||||||
pub struct AnimationTarget {
|
pub struct AnimationTarget {
|
||||||
/// The ID of this animation target.
|
/// The ID of this animation target.
|
||||||
///
|
///
|
||||||
@ -425,6 +431,7 @@ impl AnimationClip {
|
|||||||
|
|
||||||
/// Repetition behavior of an animation.
|
/// Repetition behavior of an animation.
|
||||||
#[derive(Reflect, Debug, PartialEq, Eq, Copy, Clone, Default)]
|
#[derive(Reflect, Debug, PartialEq, Eq, Copy, Clone, Default)]
|
||||||
|
#[reflect(Clone, Default)]
|
||||||
pub enum RepeatAnimation {
|
pub enum RepeatAnimation {
|
||||||
/// The animation will finish after running once.
|
/// The animation will finish after running once.
|
||||||
#[default]
|
#[default]
|
||||||
@ -462,6 +469,7 @@ pub enum AnimationEvaluationError {
|
|||||||
///
|
///
|
||||||
/// A stopped animation is considered no longer active.
|
/// A stopped animation is considered no longer active.
|
||||||
#[derive(Debug, Clone, Copy, Reflect)]
|
#[derive(Debug, Clone, Copy, Reflect)]
|
||||||
|
#[reflect(Clone, Default)]
|
||||||
pub struct ActiveAnimation {
|
pub struct ActiveAnimation {
|
||||||
/// The factor by which the weight from the [`AnimationGraph`] is multiplied.
|
/// The factor by which the weight from the [`AnimationGraph`] is multiplied.
|
||||||
weight: f32,
|
weight: f32,
|
||||||
@ -674,10 +682,9 @@ impl ActiveAnimation {
|
|||||||
/// Automatically added to any root animations of a scene when it is
|
/// Automatically added to any root animations of a scene when it is
|
||||||
/// spawned.
|
/// spawned.
|
||||||
#[derive(Component, Default, Reflect)]
|
#[derive(Component, Default, Reflect)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
pub struct AnimationPlayer {
|
pub struct AnimationPlayer {
|
||||||
active_animations: HashMap<AnimationNodeIndex, ActiveAnimation>,
|
active_animations: HashMap<AnimationNodeIndex, ActiveAnimation>,
|
||||||
blend_weights: HashMap<AnimationNodeIndex, f32>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is needed since `#[derive(Clone)]` does not generate optimized `clone_from`.
|
// This is needed since `#[derive(Clone)]` does not generate optimized `clone_from`.
|
||||||
@ -685,13 +692,11 @@ impl Clone for AnimationPlayer {
|
|||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
active_animations: self.active_animations.clone(),
|
active_animations: self.active_animations.clone(),
|
||||||
blend_weights: self.blend_weights.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clone_from(&mut self, source: &Self) {
|
fn clone_from(&mut self, source: &Self) {
|
||||||
self.active_animations.clone_from(&source.active_animations);
|
self.active_animations.clone_from(&source.active_animations);
|
||||||
self.blend_weights.clone_from(&source.blend_weights);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -750,10 +755,10 @@ impl AnimationCurveEvaluators {
|
|||||||
.component_property_curve_evaluators
|
.component_property_curve_evaluators
|
||||||
.get_or_insert_with(component_property, func),
|
.get_or_insert_with(component_property, func),
|
||||||
EvaluatorId::Type(type_id) => match self.type_id_curve_evaluators.entry(type_id) {
|
EvaluatorId::Type(type_id) => match self.type_id_curve_evaluators.entry(type_id) {
|
||||||
bevy_platform_support::collections::hash_map::Entry::Occupied(occupied_entry) => {
|
bevy_platform::collections::hash_map::Entry::Occupied(occupied_entry) => {
|
||||||
&mut **occupied_entry.into_mut()
|
&mut **occupied_entry.into_mut()
|
||||||
}
|
}
|
||||||
bevy_platform_support::collections::hash_map::Entry::Vacant(vacant_entry) => {
|
bevy_platform::collections::hash_map::Entry::Vacant(vacant_entry) => {
|
||||||
&mut **vacant_entry.insert(func())
|
&mut **vacant_entry.insert(func())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -1525,6 +1530,8 @@ impl<'a> Iterator for TriggeredEventsIter<'a> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use bevy_reflect::{DynamicMap, Map};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[derive(Event, Reflect, Clone)]
|
#[derive(Event, Reflect, Clone)]
|
||||||
@ -1656,4 +1663,13 @@ mod tests {
|
|||||||
active_animation.update(clip.duration, clip.duration); // 0.3 : 0.0
|
active_animation.update(clip.duration, clip.duration); // 0.3 : 0.0
|
||||||
assert_triggered_events_with(&active_animation, &clip, [0.3, 0.2]);
|
assert_triggered_events_with(&active_animation, &clip, [0.3, 0.2]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_animation_node_index_as_key_of_dynamic_map() {
|
||||||
|
let mut map = DynamicMap::default();
|
||||||
|
map.insert_boxed(
|
||||||
|
Box::new(AnimationNodeIndex::new(0)),
|
||||||
|
Box::new(ActiveAnimation::default()),
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -29,7 +29,7 @@ use crate::{graph::AnimationNodeIndex, ActiveAnimation, AnimationPlayer};
|
|||||||
/// component to get confused about which animation is the "main" animation, and
|
/// component to get confused about which animation is the "main" animation, and
|
||||||
/// transitions will usually be incorrect as a result.
|
/// transitions will usually be incorrect as a result.
|
||||||
#[derive(Component, Default, Reflect)]
|
#[derive(Component, Default, Reflect)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
pub struct AnimationTransitions {
|
pub struct AnimationTransitions {
|
||||||
main_animation: Option<AnimationNodeIndex>,
|
main_animation: Option<AnimationNodeIndex>,
|
||||||
transitions: Vec<AnimationTransition>,
|
transitions: Vec<AnimationTransition>,
|
||||||
@ -52,6 +52,7 @@ impl Clone for AnimationTransitions {
|
|||||||
|
|
||||||
/// An animation that is being faded out as part of a transition
|
/// An animation that is being faded out as part of a transition
|
||||||
#[derive(Debug, Clone, Copy, Reflect)]
|
#[derive(Debug, Clone, Copy, Reflect)]
|
||||||
|
#[reflect(Clone)]
|
||||||
pub struct AnimationTransition {
|
pub struct AnimationTransition {
|
||||||
/// The current weight. Starts at 1.0 and goes to 0.0 during the fade-out.
|
/// The current weight. Starts at 1.0 and goes to 0.0 during the fade-out.
|
||||||
current_weight: f32,
|
current_weight: f32,
|
||||||
@ -117,8 +118,9 @@ pub fn advance_transitions(
|
|||||||
// is divided between all the other layers, eventually culminating in the
|
// is divided between all the other layers, eventually culminating in the
|
||||||
// currently-playing animation receiving whatever's left. This results in a
|
// currently-playing animation receiving whatever's left. This results in a
|
||||||
// nicely normalized weight.
|
// nicely normalized weight.
|
||||||
let mut remaining_weight = 1.0;
|
|
||||||
for (mut animation_transitions, mut player) in query.iter_mut() {
|
for (mut animation_transitions, mut player) in query.iter_mut() {
|
||||||
|
let mut remaining_weight = 1.0;
|
||||||
|
|
||||||
for transition in &mut animation_transitions.transitions.iter_mut().rev() {
|
for transition in &mut animation_transitions.transitions.iter_mut().rev() {
|
||||||
// Decrease weight.
|
// Decrease weight.
|
||||||
transition.current_weight = (transition.current_weight
|
transition.current_weight = (transition.current_weight
|
||||||
|
|||||||
39
crates/bevy_anti_aliasing/Cargo.toml
Normal file
39
crates/bevy_anti_aliasing/Cargo.toml
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
[package]
|
||||||
|
name = "bevy_anti_aliasing"
|
||||||
|
version = "0.16.0-dev"
|
||||||
|
edition = "2024"
|
||||||
|
description = "Provides various anti aliasing implementations for Bevy Engine"
|
||||||
|
homepage = "https://bevyengine.org"
|
||||||
|
repository = "https://github.com/bevyengine/bevy"
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
keywords = ["bevy"]
|
||||||
|
|
||||||
|
[features]
|
||||||
|
trace = []
|
||||||
|
webgl = []
|
||||||
|
webgpu = []
|
||||||
|
smaa_luts = ["bevy_render/ktx2", "bevy_image/ktx2", "bevy_image/zstd"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
# bevy
|
||||||
|
bevy_asset = { path = "../bevy_asset", version = "0.16.0-dev" }
|
||||||
|
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev" }
|
||||||
|
bevy_render = { path = "../bevy_render", version = "0.16.0-dev" }
|
||||||
|
bevy_math = { path = "../bevy_math", version = "0.16.0-dev" }
|
||||||
|
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev" }
|
||||||
|
bevy_app = { path = "../bevy_app", version = "0.16.0-dev" }
|
||||||
|
bevy_image = { path = "../bevy_image", version = "0.16.0-dev" }
|
||||||
|
bevy_derive = { path = "../bevy_derive", version = "0.16.0-dev" }
|
||||||
|
bevy_ecs = { path = "../bevy_ecs", version = "0.16.0-dev" }
|
||||||
|
bevy_core_pipeline = { path = "../bevy_core_pipeline", version = "0.16.0-dev" }
|
||||||
|
bevy_diagnostic = { path = "../bevy_diagnostic", version = "0.16.0-dev" }
|
||||||
|
|
||||||
|
# other
|
||||||
|
tracing = { version = "0.1", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
rustdoc-args = ["-Zunstable-options", "--generate-link-to-definition"]
|
||||||
|
all-features = true
|
||||||
7
crates/bevy_anti_aliasing/README.md
Normal file
7
crates/bevy_anti_aliasing/README.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
# Bevy Anti Aliasing
|
||||||
|
|
||||||
|
[](https://github.com/bevyengine/bevy#license)
|
||||||
|
[](https://crates.io/crates/bevy_core_pipeline)
|
||||||
|
[](https://crates.io/crates/bevy_core_pipeline)
|
||||||
|
[](https://docs.rs/bevy_core_pipeline/latest/bevy_core_pipeline/)
|
||||||
|
[](https://discord.gg/bevy)
|
||||||
@ -1,10 +1,10 @@
|
|||||||
use crate::{
|
use bevy_app::prelude::*;
|
||||||
|
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||||
|
use bevy_core_pipeline::{
|
||||||
core_2d::graph::{Core2d, Node2d},
|
core_2d::graph::{Core2d, Node2d},
|
||||||
core_3d::graph::{Core3d, Node3d},
|
core_3d::graph::{Core3d, Node3d},
|
||||||
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
||||||
};
|
};
|
||||||
use bevy_app::prelude::*;
|
|
||||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
|
||||||
use bevy_ecs::{prelude::*, query::QueryItem};
|
use bevy_ecs::{prelude::*, query::QueryItem};
|
||||||
use bevy_image::BevyDefault as _;
|
use bevy_image::BevyDefault as _;
|
||||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||||
@ -36,7 +36,7 @@ pub use node::CasNode;
|
|||||||
///
|
///
|
||||||
/// To use this, add the [`ContrastAdaptiveSharpening`] component to a 2D or 3D camera.
|
/// To use this, add the [`ContrastAdaptiveSharpening`] component to a 2D or 3D camera.
|
||||||
#[derive(Component, Reflect, Clone)]
|
#[derive(Component, Reflect, Clone)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
pub struct ContrastAdaptiveSharpening {
|
pub struct ContrastAdaptiveSharpening {
|
||||||
/// Enable or disable sharpening.
|
/// Enable or disable sharpening.
|
||||||
pub enabled: bool,
|
pub enabled: bool,
|
||||||
@ -65,7 +65,7 @@ impl Default for ContrastAdaptiveSharpening {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Component, Default, Reflect, Clone)]
|
#[derive(Component, Default, Reflect, Clone)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
pub struct DenoiseCas(bool);
|
pub struct DenoiseCas(bool);
|
||||||
|
|
||||||
/// The uniform struct extracted from [`ContrastAdaptiveSharpening`] attached to a [`Camera`].
|
/// The uniform struct extracted from [`ContrastAdaptiveSharpening`] attached to a [`Camera`].
|
||||||
9
crates/bevy_anti_aliasing/src/experimental/mod.rs
Normal file
9
crates/bevy_anti_aliasing/src/experimental/mod.rs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
//! Experimental rendering features.
|
||||||
|
//!
|
||||||
|
//! Experimental features are features with known problems, missing features,
|
||||||
|
//! compatibility issues, low performance, and/or future breaking changes, but
|
||||||
|
//! are included nonetheless for testing purposes.
|
||||||
|
|
||||||
|
pub mod taa {
|
||||||
|
pub use crate::taa::{TemporalAntiAliasNode, TemporalAntiAliasPlugin, TemporalAntiAliasing};
|
||||||
|
}
|
||||||
@ -1,10 +1,10 @@
|
|||||||
use crate::{
|
use bevy_app::prelude::*;
|
||||||
|
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||||
|
use bevy_core_pipeline::{
|
||||||
core_2d::graph::{Core2d, Node2d},
|
core_2d::graph::{Core2d, Node2d},
|
||||||
core_3d::graph::{Core3d, Node3d},
|
core_3d::graph::{Core3d, Node3d},
|
||||||
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
||||||
};
|
};
|
||||||
use bevy_app::prelude::*;
|
|
||||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
|
||||||
use bevy_ecs::prelude::*;
|
use bevy_ecs::prelude::*;
|
||||||
use bevy_image::BevyDefault as _;
|
use bevy_image::BevyDefault as _;
|
||||||
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||||
@ -27,7 +27,7 @@ mod node;
|
|||||||
pub use node::FxaaNode;
|
pub use node::FxaaNode;
|
||||||
|
|
||||||
#[derive(Debug, Reflect, Eq, PartialEq, Hash, Clone, Copy)]
|
#[derive(Debug, Reflect, Eq, PartialEq, Hash, Clone, Copy)]
|
||||||
#[reflect(PartialEq, Hash)]
|
#[reflect(PartialEq, Hash, Clone)]
|
||||||
pub enum Sensitivity {
|
pub enum Sensitivity {
|
||||||
Low,
|
Low,
|
||||||
Medium,
|
Medium,
|
||||||
@ -51,7 +51,7 @@ impl Sensitivity {
|
|||||||
/// A component for enabling Fast Approximate Anti-Aliasing (FXAA)
|
/// A component for enabling Fast Approximate Anti-Aliasing (FXAA)
|
||||||
/// for a [`bevy_render::camera::Camera`].
|
/// for a [`bevy_render::camera::Camera`].
|
||||||
#[derive(Reflect, Component, Clone, ExtractComponent)]
|
#[derive(Reflect, Component, Clone, ExtractComponent)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
#[extract_component_filter(With<Camera>)]
|
#[extract_component_filter(With<Camera>)]
|
||||||
#[doc(alias = "FastApproximateAntiAliasing")]
|
#[doc(alias = "FastApproximateAntiAliasing")]
|
||||||
pub struct Fxaa {
|
pub struct Fxaa {
|
||||||
27
crates/bevy_anti_aliasing/src/lib.rs
Normal file
27
crates/bevy_anti_aliasing/src/lib.rs
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
|
||||||
|
#![forbid(unsafe_code)]
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc(
|
||||||
|
html_logo_url = "https://bevyengine.org/assets/icon.png",
|
||||||
|
html_favicon_url = "https://bevyengine.org/assets/icon.png"
|
||||||
|
)]
|
||||||
|
|
||||||
|
use bevy_app::Plugin;
|
||||||
|
use contrast_adaptive_sharpening::CasPlugin;
|
||||||
|
use fxaa::FxaaPlugin;
|
||||||
|
use smaa::SmaaPlugin;
|
||||||
|
|
||||||
|
pub mod contrast_adaptive_sharpening;
|
||||||
|
pub mod experimental;
|
||||||
|
pub mod fxaa;
|
||||||
|
pub mod smaa;
|
||||||
|
|
||||||
|
mod taa;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct AntiAliasingPlugin;
|
||||||
|
impl Plugin for AntiAliasingPlugin {
|
||||||
|
fn build(&self, app: &mut bevy_app::App) {
|
||||||
|
app.add_plugins((FxaaPlugin, CasPlugin, SmaaPlugin));
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -29,16 +29,16 @@
|
|||||||
//! * Compatibility with SSAA and MSAA.
|
//! * Compatibility with SSAA and MSAA.
|
||||||
//!
|
//!
|
||||||
//! [SMAA]: https://www.iryoku.com/smaa/
|
//! [SMAA]: https://www.iryoku.com/smaa/
|
||||||
#[cfg(not(feature = "smaa_luts"))]
|
|
||||||
use crate::tonemapping::lut_placeholder;
|
|
||||||
use crate::{
|
|
||||||
core_2d::graph::{Core2d, Node2d},
|
|
||||||
core_3d::graph::{Core3d, Node3d},
|
|
||||||
};
|
|
||||||
use bevy_app::{App, Plugin};
|
use bevy_app::{App, Plugin};
|
||||||
#[cfg(feature = "smaa_luts")]
|
#[cfg(feature = "smaa_luts")]
|
||||||
use bevy_asset::load_internal_binary_asset;
|
use bevy_asset::load_internal_binary_asset;
|
||||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||||
|
#[cfg(not(feature = "smaa_luts"))]
|
||||||
|
use bevy_core_pipeline::tonemapping::lut_placeholder;
|
||||||
|
use bevy_core_pipeline::{
|
||||||
|
core_2d::graph::{Core2d, Node2d},
|
||||||
|
core_3d::graph::{Core3d, Node3d},
|
||||||
|
};
|
||||||
use bevy_derive::{Deref, DerefMut};
|
use bevy_derive::{Deref, DerefMut};
|
||||||
use bevy_ecs::{
|
use bevy_ecs::{
|
||||||
component::Component,
|
component::Component,
|
||||||
@ -46,7 +46,7 @@ use bevy_ecs::{
|
|||||||
query::{QueryItem, With},
|
query::{QueryItem, With},
|
||||||
reflect::ReflectComponent,
|
reflect::ReflectComponent,
|
||||||
resource::Resource,
|
resource::Resource,
|
||||||
schedule::IntoSystemConfigs as _,
|
schedule::IntoScheduleConfigs as _,
|
||||||
system::{lifetimeless::Read, Commands, Query, Res, ResMut},
|
system::{lifetimeless::Read, Commands, Query, Res, ResMut},
|
||||||
world::{FromWorld, World},
|
world::{FromWorld, World},
|
||||||
};
|
};
|
||||||
@ -95,7 +95,7 @@ pub struct SmaaPlugin;
|
|||||||
/// A component for enabling Subpixel Morphological Anti-Aliasing (SMAA)
|
/// A component for enabling Subpixel Morphological Anti-Aliasing (SMAA)
|
||||||
/// for a [`bevy_render::camera::Camera`].
|
/// for a [`bevy_render::camera::Camera`].
|
||||||
#[derive(Clone, Copy, Default, Component, Reflect, ExtractComponent)]
|
#[derive(Clone, Copy, Default, Component, Reflect, ExtractComponent)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
#[doc(alias = "SubpixelMorphologicalAntiAliasing")]
|
#[doc(alias = "SubpixelMorphologicalAntiAliasing")]
|
||||||
pub struct Smaa {
|
pub struct Smaa {
|
||||||
/// A predefined set of SMAA parameters: i.e. a quality level.
|
/// A predefined set of SMAA parameters: i.e. a quality level.
|
||||||
@ -110,7 +110,7 @@ pub struct Smaa {
|
|||||||
///
|
///
|
||||||
/// The default value is *high*.
|
/// The default value is *high*.
|
||||||
#[derive(Clone, Copy, Reflect, Default, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Reflect, Default, PartialEq, Eq, Hash)]
|
||||||
#[reflect(Default)]
|
#[reflect(Default, Clone, PartialEq, Hash)]
|
||||||
pub enum SmaaPreset {
|
pub enum SmaaPreset {
|
||||||
/// Four search steps; no diagonal or corner detection.
|
/// Four search steps; no diagonal or corner detection.
|
||||||
Low,
|
Low,
|
||||||
@ -297,8 +297,6 @@ impl Plugin for SmaaPlugin {
|
|||||||
SMAA_AREA_LUT_TEXTURE_HANDLE,
|
SMAA_AREA_LUT_TEXTURE_HANDLE,
|
||||||
"SMAAAreaLUT.ktx2",
|
"SMAAAreaLUT.ktx2",
|
||||||
|bytes, _: String| Image::from_buffer(
|
|bytes, _: String| Image::from_buffer(
|
||||||
#[cfg(all(debug_assertions, feature = "dds"))]
|
|
||||||
"SMAAAreaLUT".to_owned(),
|
|
||||||
bytes,
|
bytes,
|
||||||
bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2),
|
bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2),
|
||||||
bevy_image::CompressedImageFormats::NONE,
|
bevy_image::CompressedImageFormats::NONE,
|
||||||
@ -315,8 +313,6 @@ impl Plugin for SmaaPlugin {
|
|||||||
SMAA_SEARCH_LUT_TEXTURE_HANDLE,
|
SMAA_SEARCH_LUT_TEXTURE_HANDLE,
|
||||||
"SMAASearchLUT.ktx2",
|
"SMAASearchLUT.ktx2",
|
||||||
|bytes, _: String| Image::from_buffer(
|
|bytes, _: String| Image::from_buffer(
|
||||||
#[cfg(all(debug_assertions, feature = "dds"))]
|
|
||||||
"SMAASearchLUT".to_owned(),
|
|
||||||
bytes,
|
bytes,
|
||||||
bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2),
|
bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2),
|
||||||
bevy_image::CompressedImageFormats::NONE,
|
bevy_image::CompressedImageFormats::NONE,
|
||||||
@ -1,17 +1,17 @@
|
|||||||
use crate::{
|
use bevy_app::{App, Plugin};
|
||||||
|
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
||||||
|
use bevy_core_pipeline::{
|
||||||
core_3d::graph::{Core3d, Node3d},
|
core_3d::graph::{Core3d, Node3d},
|
||||||
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
fullscreen_vertex_shader::fullscreen_shader_vertex_state,
|
||||||
prelude::Camera3d,
|
prelude::Camera3d,
|
||||||
prepass::{DepthPrepass, MotionVectorPrepass, ViewPrepassTextures},
|
prepass::{DepthPrepass, MotionVectorPrepass, ViewPrepassTextures},
|
||||||
};
|
};
|
||||||
use bevy_app::{App, Plugin};
|
|
||||||
use bevy_asset::{load_internal_asset, weak_handle, Handle};
|
|
||||||
use bevy_diagnostic::FrameCount;
|
use bevy_diagnostic::FrameCount;
|
||||||
use bevy_ecs::{
|
use bevy_ecs::{
|
||||||
prelude::{require, Component, Entity, ReflectComponent},
|
prelude::{Component, Entity, ReflectComponent},
|
||||||
query::{QueryItem, With},
|
query::{QueryItem, With},
|
||||||
resource::Resource,
|
resource::Resource,
|
||||||
schedule::IntoSystemConfigs,
|
schedule::IntoScheduleConfigs,
|
||||||
system::{Commands, Query, Res, ResMut},
|
system::{Commands, Query, Res, ResMut},
|
||||||
world::{FromWorld, World},
|
world::{FromWorld, World},
|
||||||
};
|
};
|
||||||
@ -131,7 +131,7 @@ impl Plugin for TemporalAntiAliasPlugin {
|
|||||||
///
|
///
|
||||||
/// If no [`MipBias`] component is attached to the camera, TAA will add a `MipBias(-1.0)` component.
|
/// If no [`MipBias`] component is attached to the camera, TAA will add a `MipBias(-1.0)` component.
|
||||||
#[derive(Component, Reflect, Clone)]
|
#[derive(Component, Reflect, Clone)]
|
||||||
#[reflect(Component, Default)]
|
#[reflect(Component, Default, Clone)]
|
||||||
#[require(TemporalJitter, DepthPrepass, MotionVectorPrepass)]
|
#[require(TemporalJitter, DepthPrepass, MotionVectorPrepass)]
|
||||||
#[doc(alias = "Taa")]
|
#[doc(alias = "Taa")]
|
||||||
pub struct TemporalAntiAliasing {
|
pub struct TemporalAntiAliasing {
|
||||||
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bevy_app"
|
name = "bevy_app"
|
||||||
version = "0.16.0-dev"
|
version = "0.16.0-dev"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
description = "Provides core App functionality for Bevy Engine"
|
description = "Provides core App functionality for Bevy Engine"
|
||||||
homepage = "https://bevyengine.org"
|
homepage = "https://bevyengine.org"
|
||||||
repository = "https://github.com/bevyengine/bevy"
|
repository = "https://github.com/bevyengine/bevy"
|
||||||
@ -9,7 +9,7 @@ license = "MIT OR Apache-2.0"
|
|||||||
keywords = ["bevy"]
|
keywords = ["bevy"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std", "bevy_reflect", "bevy_tasks", "bevy_ecs/default"]
|
default = ["std", "bevy_reflect", "bevy_ecs/default", "error_panic_hook"]
|
||||||
|
|
||||||
# Functionality
|
# Functionality
|
||||||
|
|
||||||
@ -23,9 +23,6 @@ reflect_functions = [
|
|||||||
"bevy_ecs/reflect_functions",
|
"bevy_ecs/reflect_functions",
|
||||||
]
|
]
|
||||||
|
|
||||||
## Adds support for running async background tasks
|
|
||||||
bevy_tasks = ["dep:bevy_tasks"]
|
|
||||||
|
|
||||||
# Debugging Features
|
# Debugging Features
|
||||||
|
|
||||||
## Enables `tracing` integration, allowing spans and other metrics to be reported
|
## Enables `tracing` integration, allowing spans and other metrics to be reported
|
||||||
@ -36,6 +33,10 @@ trace = ["dep:tracing"]
|
|||||||
## other debug operations which can help with diagnosing certain behaviors.
|
## other debug operations which can help with diagnosing certain behaviors.
|
||||||
bevy_debug_stepping = []
|
bevy_debug_stepping = []
|
||||||
|
|
||||||
|
## Will set the BevyError panic hook, which gives cleaner filtered backtraces when
|
||||||
|
## a BevyError is hit.
|
||||||
|
error_panic_hook = []
|
||||||
|
|
||||||
# Platform Compatibility
|
# Platform Compatibility
|
||||||
|
|
||||||
## Allows access to the `std` crate. Enabling this feature will prevent compilation
|
## Allows access to the `std` crate. Enabling this feature will prevent compilation
|
||||||
@ -47,26 +48,28 @@ std = [
|
|||||||
"dep:ctrlc",
|
"dep:ctrlc",
|
||||||
"downcast-rs/std",
|
"downcast-rs/std",
|
||||||
"bevy_utils/std",
|
"bevy_utils/std",
|
||||||
"bevy_tasks?/std",
|
"bevy_tasks/std",
|
||||||
"bevy_platform_support/std",
|
"bevy_platform/std",
|
||||||
]
|
]
|
||||||
|
|
||||||
## `critical-section` provides the building blocks for synchronization primitives
|
## `critical-section` provides the building blocks for synchronization primitives
|
||||||
## on all platforms, including `no_std`.
|
## on all platforms, including `no_std`.
|
||||||
critical-section = [
|
critical-section = [
|
||||||
"bevy_tasks?/critical-section",
|
"bevy_tasks/critical-section",
|
||||||
"bevy_ecs/critical-section",
|
"bevy_ecs/critical-section",
|
||||||
"bevy_platform_support/critical-section",
|
"bevy_platform/critical-section",
|
||||||
"bevy_reflect?/critical-section",
|
"bevy_reflect?/critical-section",
|
||||||
]
|
]
|
||||||
|
|
||||||
## `portable-atomic` provides additional platform support for atomic types and
|
## Enables use of browser APIs.
|
||||||
## operations, even on targets without native support.
|
## Note this is currently only applicable on `wasm32` architectures.
|
||||||
portable-atomic = [
|
web = [
|
||||||
"bevy_tasks?/portable-atomic",
|
"bevy_platform/web",
|
||||||
"bevy_ecs/portable-atomic",
|
"bevy_tasks/web",
|
||||||
"bevy_platform_support/portable-atomic",
|
"bevy_reflect?/web",
|
||||||
"bevy_reflect?/portable-atomic",
|
"dep:wasm-bindgen",
|
||||||
|
"dep:web-sys",
|
||||||
|
"dep:console_error_panic_hook",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
@ -77,8 +80,8 @@ bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", default-featu
|
|||||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false, features = [
|
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev", default-features = false, features = [
|
||||||
"alloc",
|
"alloc",
|
||||||
] }
|
] }
|
||||||
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev", default-features = false, optional = true }
|
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev", default-features = false }
|
||||||
bevy_platform_support = { path = "../bevy_platform_support", version = "0.16.0-dev", default-features = false }
|
bevy_platform = { path = "../bevy_platform", version = "0.16.0-dev", default-features = false }
|
||||||
|
|
||||||
# other
|
# other
|
||||||
downcast-rs = { version = "2", default-features = false }
|
downcast-rs = { version = "2", default-features = false }
|
||||||
@ -86,14 +89,15 @@ thiserror = { version = "2", default-features = false }
|
|||||||
variadics_please = "1.1"
|
variadics_please = "1.1"
|
||||||
tracing = { version = "0.1", default-features = false, optional = true }
|
tracing = { version = "0.1", default-features = false, optional = true }
|
||||||
log = { version = "0.4", default-features = false }
|
log = { version = "0.4", default-features = false }
|
||||||
|
cfg-if = "1.0.0"
|
||||||
|
|
||||||
[target.'cfg(any(unix, windows))'.dependencies]
|
[target.'cfg(any(unix, windows))'.dependencies]
|
||||||
ctrlc = { version = "3.4.4", optional = true }
|
ctrlc = { version = "3.4.4", optional = true }
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||||
wasm-bindgen = { version = "0.2" }
|
wasm-bindgen = { version = "0.2", optional = true }
|
||||||
web-sys = { version = "0.3", features = ["Window"] }
|
web-sys = { version = "0.3", features = ["Window"], optional = true }
|
||||||
console_error_panic_hook = "0.1.6"
|
console_error_panic_hook = { version = "0.1.6", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
crossbeam-channel = "0.5.0"
|
crossbeam-channel = "0.5.0"
|
||||||
|
|||||||
@ -13,13 +13,12 @@ use bevy_ecs::{
|
|||||||
event::{event_update_system, EventCursor},
|
event::{event_update_system, EventCursor},
|
||||||
intern::Interned,
|
intern::Interned,
|
||||||
prelude::*,
|
prelude::*,
|
||||||
schedule::{ScheduleBuildSettings, ScheduleLabel},
|
schedule::{InternedSystemSet, ScheduleBuildSettings, ScheduleLabel},
|
||||||
system::{IntoObserverSystem, SystemId, SystemInput},
|
system::{IntoObserverSystem, ScheduleSystem, SystemId, SystemInput},
|
||||||
};
|
};
|
||||||
use bevy_platform_support::collections::HashMap;
|
use bevy_platform::collections::HashMap;
|
||||||
use core::{fmt::Debug, num::NonZero, panic::AssertUnwindSafe};
|
use core::{fmt::Debug, num::NonZero, panic::AssertUnwindSafe};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[cfg(feature = "trace")]
|
#[cfg(feature = "trace")]
|
||||||
use tracing::info_span;
|
use tracing::info_span;
|
||||||
@ -44,7 +43,7 @@ pub use bevy_ecs::label::DynEq;
|
|||||||
/// A shorthand for `Interned<dyn AppLabel>`.
|
/// A shorthand for `Interned<dyn AppLabel>`.
|
||||||
pub type InternedAppLabel = Interned<dyn AppLabel>;
|
pub type InternedAppLabel = Interned<dyn AppLabel>;
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub(crate) enum AppError {
|
pub(crate) enum AppError {
|
||||||
#[error("duplicate plugin {plugin_name:?}")]
|
#[error("duplicate plugin {plugin_name:?}")]
|
||||||
DuplicatePlugin { plugin_name: String },
|
DuplicatePlugin { plugin_name: String },
|
||||||
@ -302,7 +301,7 @@ impl App {
|
|||||||
pub fn add_systems<M>(
|
pub fn add_systems<M>(
|
||||||
&mut self,
|
&mut self,
|
||||||
schedule: impl ScheduleLabel,
|
schedule: impl ScheduleLabel,
|
||||||
systems: impl IntoSystemConfigs<M>,
|
systems: impl IntoScheduleConfigs<ScheduleSystem, M>,
|
||||||
) -> &mut Self {
|
) -> &mut Self {
|
||||||
self.main_mut().add_systems(schedule, systems);
|
self.main_mut().add_systems(schedule, systems);
|
||||||
self
|
self
|
||||||
@ -330,10 +329,10 @@ impl App {
|
|||||||
|
|
||||||
/// Configures a collection of system sets in the provided schedule, adding any sets that do not exist.
|
/// Configures a collection of system sets in the provided schedule, adding any sets that do not exist.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn configure_sets(
|
pub fn configure_sets<M>(
|
||||||
&mut self,
|
&mut self,
|
||||||
schedule: impl ScheduleLabel,
|
schedule: impl ScheduleLabel,
|
||||||
sets: impl IntoSystemSetConfigs,
|
sets: impl IntoScheduleConfigs<InternedSystemSet, M>,
|
||||||
) -> &mut Self {
|
) -> &mut Self {
|
||||||
self.main_mut().configure_sets(schedule, sets);
|
self.main_mut().configure_sets(schedule, sets);
|
||||||
self
|
self
|
||||||
@ -1034,6 +1033,17 @@ impl App {
|
|||||||
.try_register_required_components_with::<T, R>(constructor)
|
.try_register_required_components_with::<T, R>(constructor)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Registers a component type as "disabling",
|
||||||
|
/// using [default query filters](bevy_ecs::entity_disabling::DefaultQueryFilters) to exclude entities with the component from queries.
|
||||||
|
///
|
||||||
|
/// # Warning
|
||||||
|
///
|
||||||
|
/// As discussed in the [module docs](bevy_ecs::entity_disabling), this can have performance implications,
|
||||||
|
/// as well as create interoperability issues, and should be used with caution.
|
||||||
|
pub fn register_disabling_component<C: Component>(&mut self) {
|
||||||
|
self.world_mut().register_disabling_component::<C>();
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns a reference to the main [`SubApp`]'s [`World`]. This is the same as calling
|
/// Returns a reference to the main [`SubApp`]'s [`World`]. This is the same as calling
|
||||||
/// [`app.main().world()`].
|
/// [`app.main().world()`].
|
||||||
///
|
///
|
||||||
@ -1330,7 +1340,7 @@ type RunnerFn = Box<dyn FnOnce(App) -> AppExit>;
|
|||||||
|
|
||||||
fn run_once(mut app: App) -> AppExit {
|
fn run_once(mut app: App) -> AppExit {
|
||||||
while app.plugins_state() == PluginsState::Adding {
|
while app.plugins_state() == PluginsState::Adding {
|
||||||
#[cfg(all(not(target_arch = "wasm32"), feature = "bevy_tasks"))]
|
#[cfg(not(all(target_arch = "wasm32", feature = "web")))]
|
||||||
bevy_tasks::tick_global_task_pools_on_main_thread();
|
bevy_tasks::tick_global_task_pools_on_main_thread();
|
||||||
}
|
}
|
||||||
app.finish();
|
app.finish();
|
||||||
@ -1394,7 +1404,6 @@ impl AppExit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<u8> for AppExit {
|
impl From<u8> for AppExit {
|
||||||
#[must_use]
|
|
||||||
fn from(value: u8) -> Self {
|
fn from(value: u8) -> Self {
|
||||||
Self::from_code(value)
|
Self::from_code(value)
|
||||||
}
|
}
|
||||||
@ -1413,7 +1422,7 @@ impl Termination for AppExit {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use core::{iter, marker::PhantomData};
|
use core::marker::PhantomData;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
|
||||||
use bevy_ecs::{
|
use bevy_ecs::{
|
||||||
@ -1424,7 +1433,7 @@ mod tests {
|
|||||||
query::With,
|
query::With,
|
||||||
removal_detection::RemovedComponents,
|
removal_detection::RemovedComponents,
|
||||||
resource::Resource,
|
resource::Resource,
|
||||||
schedule::{IntoSystemConfigs, ScheduleLabel},
|
schedule::{IntoScheduleConfigs, ScheduleLabel},
|
||||||
system::{Commands, Query},
|
system::{Commands, Query},
|
||||||
world::{FromWorld, World},
|
world::{FromWorld, World},
|
||||||
};
|
};
|
||||||
@ -1637,7 +1646,7 @@ mod tests {
|
|||||||
struct Foo;
|
struct Foo;
|
||||||
|
|
||||||
let mut app = App::new();
|
let mut app = App::new();
|
||||||
app.world_mut().spawn_batch(iter::repeat(Foo).take(5));
|
app.world_mut().spawn_batch(core::iter::repeat_n(Foo, 5));
|
||||||
|
|
||||||
fn despawn_one_foo(mut commands: Commands, foos: Query<Entity, With<Foo>>) {
|
fn despawn_one_foo(mut commands: Commands, foos: Query<Entity, With<Foo>>) {
|
||||||
if let Some(e) = foos.iter().next() {
|
if let Some(e) = foos.iter().next() {
|
||||||
@ -1691,9 +1700,9 @@ mod tests {
|
|||||||
fn raise_exits(mut exits: EventWriter<AppExit>) {
|
fn raise_exits(mut exits: EventWriter<AppExit>) {
|
||||||
// Exit codes chosen by a fair dice roll.
|
// Exit codes chosen by a fair dice roll.
|
||||||
// Unlikely to overlap with default values.
|
// Unlikely to overlap with default values.
|
||||||
exits.send(AppExit::Success);
|
exits.write(AppExit::Success);
|
||||||
exits.send(AppExit::from_code(4));
|
exits.write(AppExit::from_code(4));
|
||||||
exits.send(AppExit::from_code(73));
|
exits.write(AppExit::from_code(73));
|
||||||
}
|
}
|
||||||
|
|
||||||
let exit = App::new().add_systems(Update, raise_exits).run();
|
let exit = App::new().add_systems(Update, raise_exits).run();
|
||||||
|
|||||||
@ -30,7 +30,6 @@ mod plugin;
|
|||||||
mod plugin_group;
|
mod plugin_group;
|
||||||
mod schedule_runner;
|
mod schedule_runner;
|
||||||
mod sub_app;
|
mod sub_app;
|
||||||
#[cfg(feature = "bevy_tasks")]
|
|
||||||
mod task_pool_plugin;
|
mod task_pool_plugin;
|
||||||
#[cfg(all(any(unix, windows), feature = "std"))]
|
#[cfg(all(any(unix, windows), feature = "std"))]
|
||||||
mod terminal_ctrl_c_handler;
|
mod terminal_ctrl_c_handler;
|
||||||
@ -42,7 +41,6 @@ pub use plugin::*;
|
|||||||
pub use plugin_group::*;
|
pub use plugin_group::*;
|
||||||
pub use schedule_runner::*;
|
pub use schedule_runner::*;
|
||||||
pub use sub_app::*;
|
pub use sub_app::*;
|
||||||
#[cfg(feature = "bevy_tasks")]
|
|
||||||
pub use task_pool_plugin::*;
|
pub use task_pool_plugin::*;
|
||||||
#[cfg(all(any(unix, windows), feature = "std"))]
|
#[cfg(all(any(unix, windows), feature = "std"))]
|
||||||
pub use terminal_ctrl_c_handler::*;
|
pub use terminal_ctrl_c_handler::*;
|
||||||
@ -60,10 +58,6 @@ pub mod prelude {
|
|||||||
RunFixedMainLoopSystem, SpawnScene, Startup, Update,
|
RunFixedMainLoopSystem, SpawnScene, Startup, Update,
|
||||||
},
|
},
|
||||||
sub_app::SubApp,
|
sub_app::SubApp,
|
||||||
Plugin, PluginGroup,
|
Plugin, PluginGroup, TaskPoolOptions, TaskPoolPlugin,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "bevy_tasks")]
|
|
||||||
#[doc(hidden)]
|
|
||||||
pub use crate::{NonSendMarker, TaskPoolOptions, TaskPoolPlugin};
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,7 +3,7 @@ use alloc::{vec, vec::Vec};
|
|||||||
use bevy_ecs::{
|
use bevy_ecs::{
|
||||||
resource::Resource,
|
resource::Resource,
|
||||||
schedule::{
|
schedule::{
|
||||||
ExecutorKind, InternedScheduleLabel, IntoSystemSetConfigs, Schedule, ScheduleLabel,
|
ExecutorKind, InternedScheduleLabel, IntoScheduleConfigs, Schedule, ScheduleLabel,
|
||||||
SystemSet,
|
SystemSet,
|
||||||
},
|
},
|
||||||
system::Local,
|
system::Local,
|
||||||
@ -15,6 +15,13 @@ use bevy_ecs::{
|
|||||||
/// By default, it will run the following schedules in the given order:
|
/// By default, it will run the following schedules in the given order:
|
||||||
///
|
///
|
||||||
/// On the first run of the schedule (and only on the first run), it will run:
|
/// On the first run of the schedule (and only on the first run), it will run:
|
||||||
|
/// * [`StateTransition`] [^1]
|
||||||
|
/// * This means that [`OnEnter(MyState::Foo)`] will be called *before* [`PreStartup`]
|
||||||
|
/// if `MyState` was added to the app with `MyState::Foo` as the initial state,
|
||||||
|
/// as well as [`OnEnter(MyComputedState)`] if it `compute`s to `Some(Self)` in `MyState::Foo`.
|
||||||
|
/// * If you want to run systems before any state transitions, regardless of which state is the starting state,
|
||||||
|
/// for example, for registering required components, you can add your own custom startup schedule
|
||||||
|
/// before [`StateTransition`]. See [`MainScheduleOrder::insert_startup_before`] for more details.
|
||||||
/// * [`PreStartup`]
|
/// * [`PreStartup`]
|
||||||
/// * [`Startup`]
|
/// * [`Startup`]
|
||||||
/// * [`PostStartup`]
|
/// * [`PostStartup`]
|
||||||
@ -22,7 +29,7 @@ use bevy_ecs::{
|
|||||||
/// Then it will run:
|
/// Then it will run:
|
||||||
/// * [`First`]
|
/// * [`First`]
|
||||||
/// * [`PreUpdate`]
|
/// * [`PreUpdate`]
|
||||||
/// * [`StateTransition`]
|
/// * [`StateTransition`] [^1]
|
||||||
/// * [`RunFixedMainLoop`]
|
/// * [`RunFixedMainLoop`]
|
||||||
/// * This will run [`FixedMain`] zero to many times, based on how much time has elapsed.
|
/// * This will run [`FixedMain`] zero to many times, based on how much time has elapsed.
|
||||||
/// * [`Update`]
|
/// * [`Update`]
|
||||||
@ -37,35 +44,39 @@ use bevy_ecs::{
|
|||||||
///
|
///
|
||||||
/// See [`RenderPlugin`] and [`PipelinedRenderingPlugin`] for more details.
|
/// See [`RenderPlugin`] and [`PipelinedRenderingPlugin`] for more details.
|
||||||
///
|
///
|
||||||
|
/// [^1]: [`StateTransition`] is inserted only if you have `bevy_state` feature enabled. It is enabled in `default` features.
|
||||||
|
///
|
||||||
/// [`StateTransition`]: https://docs.rs/bevy/latest/bevy/prelude/struct.StateTransition.html
|
/// [`StateTransition`]: https://docs.rs/bevy/latest/bevy/prelude/struct.StateTransition.html
|
||||||
|
/// [`OnEnter(MyState::Foo)`]: https://docs.rs/bevy/latest/bevy/prelude/struct.OnEnter.html
|
||||||
|
/// [`OnEnter(MyComputedState)`]: https://docs.rs/bevy/latest/bevy/prelude/struct.OnEnter.html
|
||||||
/// [`RenderPlugin`]: https://docs.rs/bevy/latest/bevy/render/struct.RenderPlugin.html
|
/// [`RenderPlugin`]: https://docs.rs/bevy/latest/bevy/render/struct.RenderPlugin.html
|
||||||
/// [`PipelinedRenderingPlugin`]: https://docs.rs/bevy/latest/bevy/render/pipelined_rendering/struct.PipelinedRenderingPlugin.html
|
/// [`PipelinedRenderingPlugin`]: https://docs.rs/bevy/latest/bevy/render/pipelined_rendering/struct.PipelinedRenderingPlugin.html
|
||||||
/// [`SubApp`]: crate::SubApp
|
/// [`SubApp`]: crate::SubApp
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct Main;
|
pub struct Main;
|
||||||
|
|
||||||
/// The schedule that runs before [`Startup`].
|
/// The schedule that runs before [`Startup`].
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct PreStartup;
|
pub struct PreStartup;
|
||||||
|
|
||||||
/// The schedule that runs once when the app starts.
|
/// The schedule that runs once when the app starts.
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct Startup;
|
pub struct Startup;
|
||||||
|
|
||||||
/// The schedule that runs once after [`Startup`].
|
/// The schedule that runs once after [`Startup`].
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct PostStartup;
|
pub struct PostStartup;
|
||||||
|
|
||||||
/// Runs first in the schedule.
|
/// Runs first in the schedule.
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct First;
|
pub struct First;
|
||||||
|
|
||||||
/// The schedule that contains logic that must run before [`Update`]. For example, a system that reads raw keyboard
|
/// The schedule that contains logic that must run before [`Update`]. For example, a system that reads raw keyboard
|
||||||
@ -76,7 +87,7 @@ pub struct First;
|
|||||||
/// [`PreUpdate`] abstracts out "pre work implementation details".
|
/// [`PreUpdate`] abstracts out "pre work implementation details".
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct PreUpdate;
|
pub struct PreUpdate;
|
||||||
|
|
||||||
/// Runs the [`FixedMain`] schedule in a loop according until all relevant elapsed time has been "consumed".
|
/// Runs the [`FixedMain`] schedule in a loop according until all relevant elapsed time has been "consumed".
|
||||||
@ -88,21 +99,21 @@ pub struct PreUpdate;
|
|||||||
/// [`RunFixedMainLoop`] will *not* be parallelized between each other.
|
/// [`RunFixedMainLoop`] will *not* be parallelized between each other.
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct RunFixedMainLoop;
|
pub struct RunFixedMainLoop;
|
||||||
|
|
||||||
/// Runs first in the [`FixedMain`] schedule.
|
/// Runs first in the [`FixedMain`] schedule.
|
||||||
///
|
///
|
||||||
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct FixedFirst;
|
pub struct FixedFirst;
|
||||||
|
|
||||||
/// The schedule that contains logic that must run before [`FixedUpdate`].
|
/// The schedule that contains logic that must run before [`FixedUpdate`].
|
||||||
///
|
///
|
||||||
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct FixedPreUpdate;
|
pub struct FixedPreUpdate;
|
||||||
|
|
||||||
/// The schedule that contains most gameplay logic, which runs at a fixed rate rather than every render frame.
|
/// The schedule that contains most gameplay logic, which runs at a fixed rate rather than every render frame.
|
||||||
@ -117,7 +128,7 @@ pub struct FixedPreUpdate;
|
|||||||
/// See the [`Update`] schedule for examples of systems that *should not* use this schedule.
|
/// See the [`Update`] schedule for examples of systems that *should not* use this schedule.
|
||||||
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct FixedUpdate;
|
pub struct FixedUpdate;
|
||||||
|
|
||||||
/// The schedule that runs after the [`FixedUpdate`] schedule, for reacting
|
/// The schedule that runs after the [`FixedUpdate`] schedule, for reacting
|
||||||
@ -125,14 +136,14 @@ pub struct FixedUpdate;
|
|||||||
///
|
///
|
||||||
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct FixedPostUpdate;
|
pub struct FixedPostUpdate;
|
||||||
|
|
||||||
/// The schedule that runs last in [`FixedMain`]
|
/// The schedule that runs last in [`FixedMain`]
|
||||||
///
|
///
|
||||||
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
/// See the [`FixedMain`] schedule for details on how fixed updates work.
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct FixedLast;
|
pub struct FixedLast;
|
||||||
|
|
||||||
/// The schedule that contains systems which only run after a fixed period of time has elapsed.
|
/// The schedule that contains systems which only run after a fixed period of time has elapsed.
|
||||||
@ -144,7 +155,7 @@ pub struct FixedLast;
|
|||||||
/// See [this example](https://github.com/bevyengine/bevy/blob/latest/examples/time/time.rs).
|
/// See [this example](https://github.com/bevyengine/bevy/blob/latest/examples/time/time.rs).
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct FixedMain;
|
pub struct FixedMain;
|
||||||
|
|
||||||
/// The schedule that contains any app logic that must run once per render frame.
|
/// The schedule that contains any app logic that must run once per render frame.
|
||||||
@ -157,13 +168,13 @@ pub struct FixedMain;
|
|||||||
///
|
///
|
||||||
/// See the [`FixedUpdate`] schedule for examples of systems that *should not* use this schedule.
|
/// See the [`FixedUpdate`] schedule for examples of systems that *should not* use this schedule.
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct Update;
|
pub struct Update;
|
||||||
|
|
||||||
/// The schedule that contains scene spawning.
|
/// The schedule that contains scene spawning.
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct SpawnScene;
|
pub struct SpawnScene;
|
||||||
|
|
||||||
/// The schedule that contains logic that must run after [`Update`]. For example, synchronizing "local transforms" in a hierarchy
|
/// The schedule that contains logic that must run after [`Update`]. For example, synchronizing "local transforms" in a hierarchy
|
||||||
@ -174,13 +185,13 @@ pub struct SpawnScene;
|
|||||||
/// [`PostUpdate`] abstracts out "implementation details" from users defining systems in [`Update`].
|
/// [`PostUpdate`] abstracts out "implementation details" from users defining systems in [`Update`].
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct PostUpdate;
|
pub struct PostUpdate;
|
||||||
|
|
||||||
/// Runs last in the schedule.
|
/// Runs last in the schedule.
|
||||||
///
|
///
|
||||||
/// See the [`Main`] schedule for some details about how schedules are run.
|
/// See the [`Main`] schedule for some details about how schedules are run.
|
||||||
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||||
pub struct Last;
|
pub struct Last;
|
||||||
|
|
||||||
/// Animation system set. This exists in [`PostUpdate`].
|
/// Animation system set. This exists in [`PostUpdate`].
|
||||||
@ -316,7 +327,7 @@ impl Plugin for MainSchedulePlugin {
|
|||||||
|
|
||||||
#[cfg(feature = "bevy_debug_stepping")]
|
#[cfg(feature = "bevy_debug_stepping")]
|
||||||
{
|
{
|
||||||
use bevy_ecs::schedule::{IntoSystemConfigs, Stepping};
|
use bevy_ecs::schedule::{IntoScheduleConfigs, Stepping};
|
||||||
app.add_systems(Main, Stepping::begin_frame.before(Main::run_main));
|
app.add_systems(Main, Stepping::begin_frame.before(Main::run_main));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -39,13 +39,23 @@ pub struct PanicHandlerPlugin;
|
|||||||
|
|
||||||
impl Plugin for PanicHandlerPlugin {
|
impl Plugin for PanicHandlerPlugin {
|
||||||
fn build(&self, _app: &mut App) {
|
fn build(&self, _app: &mut App) {
|
||||||
#[cfg(target_arch = "wasm32")]
|
#[cfg(feature = "std")]
|
||||||
{
|
{
|
||||||
console_error_panic_hook::set_once();
|
static SET_HOOK: std::sync::Once = std::sync::Once::new();
|
||||||
|
SET_HOOK.call_once(|| {
|
||||||
|
cfg_if::cfg_if! {
|
||||||
|
if #[cfg(all(target_arch = "wasm32", feature = "web"))] {
|
||||||
|
// This provides better panic handling in JS engines (displays the panic message and improves the backtrace).
|
||||||
|
std::panic::set_hook(alloc::boxed::Box::new(console_error_panic_hook::hook));
|
||||||
|
} else if #[cfg(feature = "error_panic_hook")] {
|
||||||
|
let current_hook = std::panic::take_hook();
|
||||||
|
std::panic::set_hook(alloc::boxed::Box::new(
|
||||||
|
bevy_ecs::error::bevy_error_panic_hook(current_hook),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
// Otherwise use the default target panic hook - Do nothing.
|
||||||
{
|
}
|
||||||
// Use the default target panic hook - Do nothing.
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,7 +4,7 @@ use alloc::{
|
|||||||
string::{String, ToString},
|
string::{String, ToString},
|
||||||
vec::Vec,
|
vec::Vec,
|
||||||
};
|
};
|
||||||
use bevy_platform_support::collections::hash_map::Entry;
|
use bevy_platform::collections::hash_map::Entry;
|
||||||
use bevy_utils::TypeIdMap;
|
use bevy_utils::TypeIdMap;
|
||||||
use core::any::TypeId;
|
use core::any::TypeId;
|
||||||
use log::{debug, warn};
|
use log::{debug, warn};
|
||||||
|
|||||||
@ -3,10 +3,10 @@ use crate::{
|
|||||||
plugin::Plugin,
|
plugin::Plugin,
|
||||||
PluginsState,
|
PluginsState,
|
||||||
};
|
};
|
||||||
use bevy_platform_support::time::Instant;
|
use bevy_platform::time::Instant;
|
||||||
use core::time::Duration;
|
use core::time::Duration;
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
#[cfg(all(target_arch = "wasm32", feature = "web"))]
|
||||||
use {
|
use {
|
||||||
alloc::{boxed::Box, rc::Rc},
|
alloc::{boxed::Box, rc::Rc},
|
||||||
core::cell::RefCell,
|
core::cell::RefCell,
|
||||||
@ -77,7 +77,7 @@ impl Plugin for ScheduleRunnerPlugin {
|
|||||||
let plugins_state = app.plugins_state();
|
let plugins_state = app.plugins_state();
|
||||||
if plugins_state != PluginsState::Cleaned {
|
if plugins_state != PluginsState::Cleaned {
|
||||||
while app.plugins_state() == PluginsState::Adding {
|
while app.plugins_state() == PluginsState::Adding {
|
||||||
#[cfg(all(not(target_arch = "wasm32"), feature = "bevy_tasks"))]
|
#[cfg(not(all(target_arch = "wasm32", feature = "web")))]
|
||||||
bevy_tasks::tick_global_task_pools_on_main_thread();
|
bevy_tasks::tick_global_task_pools_on_main_thread();
|
||||||
}
|
}
|
||||||
app.finish();
|
app.finish();
|
||||||
@ -118,22 +118,8 @@ impl Plugin for ScheduleRunnerPlugin {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
cfg_if::cfg_if! {
|
||||||
{
|
if #[cfg(all(target_arch = "wasm32", feature = "web"))] {
|
||||||
loop {
|
|
||||||
match tick(&mut app, wait) {
|
|
||||||
Ok(Some(_delay)) => {
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
std::thread::sleep(_delay);
|
|
||||||
}
|
|
||||||
Ok(None) => continue,
|
|
||||||
Err(exit) => return exit,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
{
|
|
||||||
fn set_timeout(callback: &Closure<dyn FnMut()>, dur: Duration) {
|
fn set_timeout(callback: &Closure<dyn FnMut()>, dur: Duration) {
|
||||||
web_sys::window()
|
web_sys::window()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@ -170,6 +156,17 @@ impl Plugin for ScheduleRunnerPlugin {
|
|||||||
set_timeout(base_tick_closure.borrow().as_ref().unwrap(), asap);
|
set_timeout(base_tick_closure.borrow().as_ref().unwrap(), asap);
|
||||||
|
|
||||||
exit.take()
|
exit.take()
|
||||||
|
} else {
|
||||||
|
loop {
|
||||||
|
match tick(&mut app, wait) {
|
||||||
|
Ok(Some(delay)) => {
|
||||||
|
bevy_platform::thread::sleep(delay);
|
||||||
|
}
|
||||||
|
Ok(None) => continue,
|
||||||
|
Err(exit) => return exit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,10 +3,10 @@ use alloc::{boxed::Box, string::String, vec::Vec};
|
|||||||
use bevy_ecs::{
|
use bevy_ecs::{
|
||||||
event::EventRegistry,
|
event::EventRegistry,
|
||||||
prelude::*,
|
prelude::*,
|
||||||
schedule::{InternedScheduleLabel, ScheduleBuildSettings, ScheduleLabel},
|
schedule::{InternedScheduleLabel, InternedSystemSet, ScheduleBuildSettings, ScheduleLabel},
|
||||||
system::{SystemId, SystemInput},
|
system::{ScheduleSystem, SystemId, SystemInput},
|
||||||
};
|
};
|
||||||
use bevy_platform_support::collections::{HashMap, HashSet};
|
use bevy_platform::collections::{HashMap, HashSet};
|
||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
|
|
||||||
#[cfg(feature = "trace")]
|
#[cfg(feature = "trace")]
|
||||||
@ -211,7 +211,7 @@ impl SubApp {
|
|||||||
pub fn add_systems<M>(
|
pub fn add_systems<M>(
|
||||||
&mut self,
|
&mut self,
|
||||||
schedule: impl ScheduleLabel,
|
schedule: impl ScheduleLabel,
|
||||||
systems: impl IntoSystemConfigs<M>,
|
systems: impl IntoScheduleConfigs<ScheduleSystem, M>,
|
||||||
) -> &mut Self {
|
) -> &mut Self {
|
||||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||||
schedules.add_systems(schedule, systems);
|
schedules.add_systems(schedule, systems);
|
||||||
@ -233,10 +233,10 @@ impl SubApp {
|
|||||||
|
|
||||||
/// See [`App::configure_sets`].
|
/// See [`App::configure_sets`].
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn configure_sets(
|
pub fn configure_sets<M>(
|
||||||
&mut self,
|
&mut self,
|
||||||
schedule: impl ScheduleLabel,
|
schedule: impl ScheduleLabel,
|
||||||
sets: impl IntoSystemSetConfigs,
|
sets: impl IntoScheduleConfigs<InternedSystemSet, M>,
|
||||||
) -> &mut Self {
|
) -> &mut Self {
|
||||||
let mut schedules = self.world.resource_mut::<Schedules>();
|
let mut schedules = self.world.resource_mut::<Schedules>();
|
||||||
schedules.configure_sets(schedule, sets);
|
schedules.configure_sets(schedule, sets);
|
||||||
|
|||||||
@ -1,24 +1,25 @@
|
|||||||
#![cfg_attr(
|
|
||||||
feature = "portable-atomic",
|
|
||||||
expect(
|
|
||||||
clippy::redundant_closure,
|
|
||||||
reason = "bevy_platform_support::sync::Arc has subtly different implicit behavior"
|
|
||||||
)
|
|
||||||
)]
|
|
||||||
|
|
||||||
use crate::{App, Plugin};
|
use crate::{App, Plugin};
|
||||||
|
|
||||||
use alloc::string::ToString;
|
use alloc::string::ToString;
|
||||||
use bevy_platform_support::sync::Arc;
|
use bevy_platform::sync::Arc;
|
||||||
use bevy_tasks::{AsyncComputeTaskPool, ComputeTaskPool, IoTaskPool, TaskPoolBuilder};
|
use bevy_tasks::{AsyncComputeTaskPool, ComputeTaskPool, IoTaskPool, TaskPoolBuilder};
|
||||||
use core::{fmt::Debug, marker::PhantomData};
|
use core::fmt::Debug;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
cfg_if::cfg_if! {
|
||||||
use {crate::Last, bevy_ecs::prelude::NonSend};
|
if #[cfg(not(all(target_arch = "wasm32", feature = "web")))] {
|
||||||
|
use {crate::Last, bevy_tasks::tick_global_task_pools_on_main_thread};
|
||||||
|
use bevy_ecs::system::NonSendMarker;
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
/// A system used to check and advanced our task pools.
|
||||||
use bevy_tasks::tick_global_task_pools_on_main_thread;
|
///
|
||||||
|
/// Calls [`tick_global_task_pools_on_main_thread`],
|
||||||
|
/// and uses [`NonSendMarker`] to ensure that this system runs on the main thread
|
||||||
|
fn tick_global_task_pools(_main_thread_marker: NonSendMarker) {
|
||||||
|
tick_global_task_pools_on_main_thread();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Setup of default task pools: [`AsyncComputeTaskPool`], [`ComputeTaskPool`], [`IoTaskPool`].
|
/// Setup of default task pools: [`AsyncComputeTaskPool`], [`ComputeTaskPool`], [`IoTaskPool`].
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
@ -32,21 +33,10 @@ impl Plugin for TaskPoolPlugin {
|
|||||||
// Setup the default bevy task pools
|
// Setup the default bevy task pools
|
||||||
self.task_pool_options.create_default_pools();
|
self.task_pool_options.create_default_pools();
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(all(target_arch = "wasm32", feature = "web")))]
|
||||||
_app.add_systems(Last, tick_global_task_pools);
|
_app.add_systems(Last, tick_global_task_pools);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// A dummy type that is [`!Send`](Send), to force systems to run on the main thread.
|
|
||||||
pub struct NonSendMarker(PhantomData<*mut ()>);
|
|
||||||
|
|
||||||
/// A system used to check and advanced our task pools.
|
|
||||||
///
|
|
||||||
/// Calls [`tick_global_task_pools_on_main_thread`],
|
|
||||||
/// and uses [`NonSendMarker`] to ensure that this system runs on the main thread
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
fn tick_global_task_pools(_main_thread_marker: Option<NonSend<NonSendMarker>>) {
|
|
||||||
tick_global_task_pools_on_main_thread();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Defines a simple way to determine how many threads to use given the number of remaining cores
|
/// Defines a simple way to determine how many threads to use given the number of remaining cores
|
||||||
/// and number of total cores
|
/// and number of total cores
|
||||||
@ -184,20 +174,21 @@ impl TaskPoolOptions {
|
|||||||
remaining_threads = remaining_threads.saturating_sub(io_threads);
|
remaining_threads = remaining_threads.saturating_sub(io_threads);
|
||||||
|
|
||||||
IoTaskPool::get_or_init(|| {
|
IoTaskPool::get_or_init(|| {
|
||||||
#[cfg_attr(target_arch = "wasm32", expect(unused_mut))]
|
let builder = TaskPoolBuilder::default()
|
||||||
let mut builder = TaskPoolBuilder::default()
|
|
||||||
.num_threads(io_threads)
|
.num_threads(io_threads)
|
||||||
.thread_name("IO Task Pool".to_string());
|
.thread_name("IO Task Pool".to_string());
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(all(target_arch = "wasm32", feature = "web")))]
|
||||||
{
|
let builder = {
|
||||||
|
let mut builder = builder;
|
||||||
if let Some(f) = self.io.on_thread_spawn.clone() {
|
if let Some(f) = self.io.on_thread_spawn.clone() {
|
||||||
builder = builder.on_thread_spawn(move || f());
|
builder = builder.on_thread_spawn(move || f());
|
||||||
}
|
}
|
||||||
if let Some(f) = self.io.on_thread_destroy.clone() {
|
if let Some(f) = self.io.on_thread_destroy.clone() {
|
||||||
builder = builder.on_thread_destroy(move || f());
|
builder = builder.on_thread_destroy(move || f());
|
||||||
}
|
}
|
||||||
}
|
builder
|
||||||
|
};
|
||||||
|
|
||||||
builder.build()
|
builder.build()
|
||||||
});
|
});
|
||||||
@ -213,20 +204,21 @@ impl TaskPoolOptions {
|
|||||||
remaining_threads = remaining_threads.saturating_sub(async_compute_threads);
|
remaining_threads = remaining_threads.saturating_sub(async_compute_threads);
|
||||||
|
|
||||||
AsyncComputeTaskPool::get_or_init(|| {
|
AsyncComputeTaskPool::get_or_init(|| {
|
||||||
#[cfg_attr(target_arch = "wasm32", expect(unused_mut))]
|
let builder = TaskPoolBuilder::default()
|
||||||
let mut builder = TaskPoolBuilder::default()
|
|
||||||
.num_threads(async_compute_threads)
|
.num_threads(async_compute_threads)
|
||||||
.thread_name("Async Compute Task Pool".to_string());
|
.thread_name("Async Compute Task Pool".to_string());
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(all(target_arch = "wasm32", feature = "web")))]
|
||||||
{
|
let builder = {
|
||||||
|
let mut builder = builder;
|
||||||
if let Some(f) = self.async_compute.on_thread_spawn.clone() {
|
if let Some(f) = self.async_compute.on_thread_spawn.clone() {
|
||||||
builder = builder.on_thread_spawn(move || f());
|
builder = builder.on_thread_spawn(move || f());
|
||||||
}
|
}
|
||||||
if let Some(f) = self.async_compute.on_thread_destroy.clone() {
|
if let Some(f) = self.async_compute.on_thread_destroy.clone() {
|
||||||
builder = builder.on_thread_destroy(move || f());
|
builder = builder.on_thread_destroy(move || f());
|
||||||
}
|
}
|
||||||
}
|
builder
|
||||||
|
};
|
||||||
|
|
||||||
builder.build()
|
builder.build()
|
||||||
});
|
});
|
||||||
@ -242,20 +234,21 @@ impl TaskPoolOptions {
|
|||||||
trace!("Compute Threads: {}", compute_threads);
|
trace!("Compute Threads: {}", compute_threads);
|
||||||
|
|
||||||
ComputeTaskPool::get_or_init(|| {
|
ComputeTaskPool::get_or_init(|| {
|
||||||
#[cfg_attr(target_arch = "wasm32", expect(unused_mut))]
|
let builder = TaskPoolBuilder::default()
|
||||||
let mut builder = TaskPoolBuilder::default()
|
|
||||||
.num_threads(compute_threads)
|
.num_threads(compute_threads)
|
||||||
.thread_name("Compute Task Pool".to_string());
|
.thread_name("Compute Task Pool".to_string());
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(all(target_arch = "wasm32", feature = "web")))]
|
||||||
{
|
let builder = {
|
||||||
|
let mut builder = builder;
|
||||||
if let Some(f) = self.compute.on_thread_spawn.clone() {
|
if let Some(f) = self.compute.on_thread_spawn.clone() {
|
||||||
builder = builder.on_thread_spawn(move || f());
|
builder = builder.on_thread_spawn(move || f());
|
||||||
}
|
}
|
||||||
if let Some(f) = self.compute.on_thread_destroy.clone() {
|
if let Some(f) = self.compute.on_thread_destroy.clone() {
|
||||||
builder = builder.on_thread_destroy(move || f());
|
builder = builder.on_thread_destroy(move || f());
|
||||||
}
|
}
|
||||||
}
|
builder
|
||||||
|
};
|
||||||
|
|
||||||
builder.build()
|
builder.build()
|
||||||
});
|
});
|
||||||
|
|||||||
@ -50,7 +50,7 @@ impl TerminalCtrlCHandlerPlugin {
|
|||||||
/// Sends a [`AppExit`] event when the user presses `Ctrl+C` on the terminal.
|
/// Sends a [`AppExit`] event when the user presses `Ctrl+C` on the terminal.
|
||||||
pub fn exit_on_flag(mut events: EventWriter<AppExit>) {
|
pub fn exit_on_flag(mut events: EventWriter<AppExit>) {
|
||||||
if SHOULD_EXIT.load(Ordering::Relaxed) {
|
if SHOULD_EXIT.load(Ordering::Relaxed) {
|
||||||
events.send(AppExit::from_code(130));
|
events.write(AppExit::from_code(130));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bevy_asset"
|
name = "bevy_asset"
|
||||||
version = "0.16.0-dev"
|
version = "0.16.0-dev"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
description = "Provides asset functionality for Bevy Engine"
|
description = "Provides asset functionality for Bevy Engine"
|
||||||
homepage = "https://bevyengine.org"
|
homepage = "https://bevyengine.org"
|
||||||
repository = "https://github.com/bevyengine/bevy"
|
repository = "https://github.com/bevyengine/bevy"
|
||||||
@ -29,7 +29,7 @@ bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", features = [
|
|||||||
] }
|
] }
|
||||||
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev" }
|
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev" }
|
||||||
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev" }
|
bevy_utils = { path = "../bevy_utils", version = "0.16.0-dev" }
|
||||||
bevy_platform_support = { path = "../bevy_platform_support", version = "0.16.0-dev", default-features = false, features = [
|
bevy_platform = { path = "../bevy_platform", version = "0.16.0-dev", default-features = false, features = [
|
||||||
"std",
|
"std",
|
||||||
] }
|
] }
|
||||||
|
|
||||||
@ -58,6 +58,7 @@ tracing = { version = "0.1", default-features = false, features = ["std"] }
|
|||||||
bevy_window = { path = "../bevy_window", version = "0.16.0-dev" }
|
bevy_window = { path = "../bevy_window", version = "0.16.0-dev" }
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||||
|
# TODO: Assuming all wasm builds are for the browser. Require `no_std` support to break assumption.
|
||||||
wasm-bindgen = { version = "0.2" }
|
wasm-bindgen = { version = "0.2" }
|
||||||
web-sys = { version = "0.3", features = [
|
web-sys = { version = "0.3", features = [
|
||||||
"Window",
|
"Window",
|
||||||
@ -67,6 +68,15 @@ web-sys = { version = "0.3", features = [
|
|||||||
wasm-bindgen-futures = "0.4"
|
wasm-bindgen-futures = "0.4"
|
||||||
js-sys = "0.3"
|
js-sys = "0.3"
|
||||||
uuid = { version = "1.13.1", default-features = false, features = ["js"] }
|
uuid = { version = "1.13.1", default-features = false, features = ["js"] }
|
||||||
|
bevy_app = { path = "../bevy_app", version = "0.16.0-dev", default-features = false, features = [
|
||||||
|
"web",
|
||||||
|
] }
|
||||||
|
bevy_tasks = { path = "../bevy_tasks", version = "0.16.0-dev", default-features = false, features = [
|
||||||
|
"web",
|
||||||
|
] }
|
||||||
|
bevy_reflect = { path = "../bevy_reflect", version = "0.16.0-dev", default-features = false, features = [
|
||||||
|
"web",
|
||||||
|
] }
|
||||||
|
|
||||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||||
notify-debouncer-full = { version = "0.5.0", optional = true }
|
notify-debouncer-full = { version = "0.5.0", optional = true }
|
||||||
@ -77,9 +87,6 @@ ureq = { version = "3", optional = true, default-features = false, features = [
|
|||||||
] }
|
] }
|
||||||
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
bevy_log = { path = "../bevy_log", version = "0.16.0-dev" }
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bevy_asset_macros"
|
name = "bevy_asset_macros"
|
||||||
version = "0.16.0-dev"
|
version = "0.16.0-dev"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
description = "Derive implementations for bevy_asset"
|
description = "Derive implementations for bevy_asset"
|
||||||
homepage = "https://bevyengine.org"
|
homepage = "https://bevyengine.org"
|
||||||
repository = "https://github.com/bevyengine/bevy"
|
repository = "https://github.com/bevyengine/bevy"
|
||||||
|
|||||||
@ -13,7 +13,7 @@ use bevy_ecs::{
|
|||||||
storage::{Table, TableRow},
|
storage::{Table, TableRow},
|
||||||
world::unsafe_world_cell::UnsafeWorldCell,
|
world::unsafe_world_cell::UnsafeWorldCell,
|
||||||
};
|
};
|
||||||
use bevy_platform_support::collections::HashMap;
|
use bevy_platform::collections::HashMap;
|
||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
use disqualified::ShortName;
|
use disqualified::ShortName;
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
@ -281,6 +281,7 @@ unsafe impl<A: AsAssetId> QueryFilter for AssetChanged<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
#[expect(clippy::print_stdout, reason = "Allowed in tests.")]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::{AssetEvents, AssetPlugin, Handle};
|
use crate::{AssetEvents, AssetPlugin, Handle};
|
||||||
use alloc::{vec, vec::Vec};
|
use alloc::{vec, vec::Vec};
|
||||||
@ -289,7 +290,7 @@ mod tests {
|
|||||||
|
|
||||||
use crate::{AssetApp, Assets};
|
use crate::{AssetApp, Assets};
|
||||||
use bevy_app::{App, AppExit, PostUpdate, Startup, TaskPoolPlugin, Update};
|
use bevy_app::{App, AppExit, PostUpdate, Startup, TaskPoolPlugin, Update};
|
||||||
use bevy_ecs::schedule::IntoSystemConfigs;
|
use bevy_ecs::schedule::IntoScheduleConfigs;
|
||||||
use bevy_ecs::{
|
use bevy_ecs::{
|
||||||
component::Component,
|
component::Component,
|
||||||
event::EventWriter,
|
event::EventWriter,
|
||||||
@ -330,7 +331,7 @@ mod tests {
|
|||||||
_query: Query<&mut MyComponent, AssetChanged<MyComponent>>,
|
_query: Query<&mut MyComponent, AssetChanged<MyComponent>>,
|
||||||
mut exit: EventWriter<AppExit>,
|
mut exit: EventWriter<AppExit>,
|
||||||
) {
|
) {
|
||||||
exit.send(AppExit::Error(NonZero::<u8>::MIN));
|
exit.write(AppExit::Error(NonZero::<u8>::MIN));
|
||||||
}
|
}
|
||||||
run_app(compatible_filter);
|
run_app(compatible_filter);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,7 +6,7 @@ use bevy_ecs::{
|
|||||||
resource::Resource,
|
resource::Resource,
|
||||||
system::{Res, ResMut, SystemChangeTick},
|
system::{Res, ResMut, SystemChangeTick},
|
||||||
};
|
};
|
||||||
use bevy_platform_support::collections::HashMap;
|
use bevy_platform::collections::HashMap;
|
||||||
use bevy_reflect::{Reflect, TypePath};
|
use bevy_reflect::{Reflect, TypePath};
|
||||||
use core::{any::TypeId, iter::Enumerate, marker::PhantomData, sync::atomic::AtomicU32};
|
use core::{any::TypeId, iter::Enumerate, marker::PhantomData, sync::atomic::AtomicU32};
|
||||||
use crossbeam_channel::{Receiver, Sender};
|
use crossbeam_channel::{Receiver, Sender};
|
||||||
@ -95,6 +95,7 @@ impl AssetIndexAllocator {
|
|||||||
/// [`AssetPath`]: crate::AssetPath
|
/// [`AssetPath`]: crate::AssetPath
|
||||||
#[derive(Asset, TypePath)]
|
#[derive(Asset, TypePath)]
|
||||||
pub struct LoadedUntypedAsset {
|
pub struct LoadedUntypedAsset {
|
||||||
|
/// The handle to the loaded asset.
|
||||||
#[dependency]
|
#[dependency]
|
||||||
pub handle: UntypedHandle,
|
pub handle: UntypedHandle,
|
||||||
}
|
}
|
||||||
@ -280,6 +281,8 @@ impl<A: Asset> DenseAssetStorage<A> {
|
|||||||
/// at compile time.
|
/// at compile time.
|
||||||
///
|
///
|
||||||
/// This tracks (and queues) [`AssetEvent`] events whenever changes to the collection occur.
|
/// This tracks (and queues) [`AssetEvent`] events whenever changes to the collection occur.
|
||||||
|
/// To check whether the asset used by a given component has changed (due to a change in the handle or the underlying asset)
|
||||||
|
/// use the [`AssetChanged`](crate::asset_changed::AssetChanged) query filter.
|
||||||
#[derive(Resource)]
|
#[derive(Resource)]
|
||||||
pub struct Assets<A: Asset> {
|
pub struct Assets<A: Asset> {
|
||||||
dense_storage: DenseAssetStorage<A>,
|
dense_storage: DenseAssetStorage<A>,
|
||||||
@ -459,16 +462,22 @@ impl<A: Asset> Assets<A> {
|
|||||||
/// Removes the [`Asset`] with the given `id`.
|
/// Removes the [`Asset`] with the given `id`.
|
||||||
pub(crate) fn remove_dropped(&mut self, id: AssetId<A>) {
|
pub(crate) fn remove_dropped(&mut self, id: AssetId<A>) {
|
||||||
match self.duplicate_handles.get_mut(&id) {
|
match self.duplicate_handles.get_mut(&id) {
|
||||||
None | Some(0) => {}
|
None => {}
|
||||||
|
Some(0) => {
|
||||||
|
self.duplicate_handles.remove(&id);
|
||||||
|
}
|
||||||
Some(value) => {
|
Some(value) => {
|
||||||
*value -= 1;
|
*value -= 1;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let existed = match id {
|
let existed = match id {
|
||||||
AssetId::Index { index, .. } => self.dense_storage.remove_dropped(index).is_some(),
|
AssetId::Index { index, .. } => self.dense_storage.remove_dropped(index).is_some(),
|
||||||
AssetId::Uuid { uuid } => self.hash_map.remove(&uuid).is_some(),
|
AssetId::Uuid { uuid } => self.hash_map.remove(&uuid).is_some(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
self.queued_events.push(AssetEvent::Unused { id });
|
||||||
if existed {
|
if existed {
|
||||||
self.queued_events.push(AssetEvent::Removed { id });
|
self.queued_events.push(AssetEvent::Removed { id });
|
||||||
}
|
}
|
||||||
@ -550,7 +559,6 @@ impl<A: Asset> Assets<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assets.queued_events.push(AssetEvent::Unused { id });
|
|
||||||
assets.remove_dropped(id);
|
assets.remove_dropped(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -576,7 +584,7 @@ impl<A: Asset> Assets<A> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
events.send_batch(assets.queued_events.drain(..));
|
events.write_batch(assets.queued_events.drain(..));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A run condition for [`asset_events`]. The system will not run if there are no events to
|
/// A run condition for [`asset_events`]. The system will not run if there are no events to
|
||||||
@ -592,7 +600,7 @@ impl<A: Asset> Assets<A> {
|
|||||||
pub struct AssetsMutIterator<'a, A: Asset> {
|
pub struct AssetsMutIterator<'a, A: Asset> {
|
||||||
queued_events: &'a mut Vec<AssetEvent<A>>,
|
queued_events: &'a mut Vec<AssetEvent<A>>,
|
||||||
dense_storage: Enumerate<core::slice::IterMut<'a, Entry<A>>>,
|
dense_storage: Enumerate<core::slice::IterMut<'a, Entry<A>>>,
|
||||||
hash_map: bevy_platform_support::collections::hash_map::IterMut<'a, Uuid, A>,
|
hash_map: bevy_platform::collections::hash_map::IterMut<'a, Uuid, A>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, A: Asset> Iterator for AssetsMutIterator<'a, A> {
|
impl<'a, A: Asset> Iterator for AssetsMutIterator<'a, A> {
|
||||||
@ -629,6 +637,7 @@ impl<'a, A: Asset> Iterator for AssetsMutIterator<'a, A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An error returned when an [`AssetIndex`] has an invalid generation.
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
#[error("AssetIndex {index:?} has an invalid generation. The current generation is: '{current_generation}'.")]
|
#[error("AssetIndex {index:?} has an invalid generation. The current generation is: '{current_generation}'.")]
|
||||||
pub struct InvalidGenerationError {
|
pub struct InvalidGenerationError {
|
||||||
|
|||||||
@ -5,6 +5,7 @@ use bevy_ecs::world::World;
|
|||||||
|
|
||||||
use crate::{meta::Settings, Asset, AssetPath, AssetServer, Assets, Handle};
|
use crate::{meta::Settings, Asset, AssetPath, AssetServer, Assets, Handle};
|
||||||
|
|
||||||
|
/// An extension trait for methods for working with assets directly from a [`World`].
|
||||||
pub trait DirectAssetAccessExt {
|
pub trait DirectAssetAccessExt {
|
||||||
/// Insert an asset similarly to [`Assets::add`].
|
/// Insert an asset similarly to [`Assets::add`].
|
||||||
fn add_asset<A: Asset>(&mut self, asset: impl Into<A>) -> Handle<A>;
|
fn add_asset<A: Asset>(&mut self, asset: impl Into<A>) -> Handle<A>;
|
||||||
|
|||||||
@ -8,6 +8,7 @@ use core::fmt::Debug;
|
|||||||
/// For an untyped equivalent, see [`UntypedAssetLoadFailedEvent`].
|
/// For an untyped equivalent, see [`UntypedAssetLoadFailedEvent`].
|
||||||
#[derive(Event, Clone, Debug)]
|
#[derive(Event, Clone, Debug)]
|
||||||
pub struct AssetLoadFailedEvent<A: Asset> {
|
pub struct AssetLoadFailedEvent<A: Asset> {
|
||||||
|
/// The stable identifier of the asset that failed to load.
|
||||||
pub id: AssetId<A>,
|
pub id: AssetId<A>,
|
||||||
/// The asset path that was attempted.
|
/// The asset path that was attempted.
|
||||||
pub path: AssetPath<'static>,
|
pub path: AssetPath<'static>,
|
||||||
@ -25,6 +26,7 @@ impl<A: Asset> AssetLoadFailedEvent<A> {
|
|||||||
/// An untyped version of [`AssetLoadFailedEvent`].
|
/// An untyped version of [`AssetLoadFailedEvent`].
|
||||||
#[derive(Event, Clone, Debug)]
|
#[derive(Event, Clone, Debug)]
|
||||||
pub struct UntypedAssetLoadFailedEvent {
|
pub struct UntypedAssetLoadFailedEvent {
|
||||||
|
/// The stable identifier of the asset that failed to load.
|
||||||
pub id: UntypedAssetId,
|
pub id: UntypedAssetId,
|
||||||
/// The asset path that was attempted.
|
/// The asset path that was attempted.
|
||||||
pub path: AssetPath<'static>,
|
pub path: AssetPath<'static>,
|
||||||
@ -43,6 +45,7 @@ impl<A: Asset> From<&AssetLoadFailedEvent<A>> for UntypedAssetLoadFailedEvent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Events that occur for a specific loaded [`Asset`], such as "value changed" events and "dependency" events.
|
/// Events that occur for a specific loaded [`Asset`], such as "value changed" events and "dependency" events.
|
||||||
|
#[expect(missing_docs, reason = "Documenting the id fields is unhelpful.")]
|
||||||
#[derive(Event, Reflect)]
|
#[derive(Event, Reflect)]
|
||||||
pub enum AssetEvent<A: Asset> {
|
pub enum AssetEvent<A: Asset> {
|
||||||
/// Emitted whenever an [`Asset`] is added.
|
/// Emitted whenever an [`Asset`] is added.
|
||||||
|
|||||||
@ -5,9 +5,12 @@ use bevy_reflect::TypePath;
|
|||||||
|
|
||||||
/// A "loaded folder" containing handles for all assets stored in a given [`AssetPath`].
|
/// A "loaded folder" containing handles for all assets stored in a given [`AssetPath`].
|
||||||
///
|
///
|
||||||
|
/// This is produced by [`AssetServer::load_folder`](crate::prelude::AssetServer::load_folder).
|
||||||
|
///
|
||||||
/// [`AssetPath`]: crate::AssetPath
|
/// [`AssetPath`]: crate::AssetPath
|
||||||
#[derive(Asset, TypePath)]
|
#[derive(Asset, TypePath)]
|
||||||
pub struct LoadedFolder {
|
pub struct LoadedFolder {
|
||||||
|
/// The handles of all assets stored in the folder.
|
||||||
#[dependency]
|
#[dependency]
|
||||||
pub handles: Vec<UntypedHandle>,
|
pub handles: Vec<UntypedHandle>,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -113,16 +113,23 @@ impl core::fmt::Debug for StrongHandle {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A strong or weak handle to a specific [`Asset`]. If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept
|
/// A handle to a specific [`Asset`] of type `A`. Handles act as abstract "references" to
|
||||||
|
/// assets, whose data are stored in the [`Assets<A>`](crate::prelude::Assets) resource,
|
||||||
|
/// avoiding the need to store multiple copies of the same data.
|
||||||
|
///
|
||||||
|
/// If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept
|
||||||
/// alive until the [`Handle`] is dropped. If a [`Handle`] is [`Handle::Weak`], it does not necessarily reference a live [`Asset`],
|
/// alive until the [`Handle`] is dropped. If a [`Handle`] is [`Handle::Weak`], it does not necessarily reference a live [`Asset`],
|
||||||
/// nor will it keep assets alive.
|
/// nor will it keep assets alive.
|
||||||
///
|
///
|
||||||
|
/// Modifying a *handle* will change which existing asset is referenced, but modifying the *asset*
|
||||||
|
/// (by mutating the [`Assets`](crate::prelude::Assets) resource) will change the asset for all handles referencing it.
|
||||||
|
///
|
||||||
/// [`Handle`] can be cloned. If a [`Handle::Strong`] is cloned, the referenced [`Asset`] will not be freed until _all_ instances
|
/// [`Handle`] can be cloned. If a [`Handle::Strong`] is cloned, the referenced [`Asset`] will not be freed until _all_ instances
|
||||||
/// of the [`Handle`] are dropped.
|
/// of the [`Handle`] are dropped.
|
||||||
///
|
///
|
||||||
/// [`Handle::Strong`] also provides access to useful [`Asset`] metadata, such as the [`AssetPath`] (if it exists).
|
/// [`Handle::Strong`], via [`StrongHandle`] also provides access to useful [`Asset`] metadata, such as the [`AssetPath`] (if it exists).
|
||||||
#[derive(Reflect)]
|
#[derive(Reflect)]
|
||||||
#[reflect(Default, Debug, Hash, PartialEq)]
|
#[reflect(Default, Debug, Hash, PartialEq, Clone)]
|
||||||
pub enum Handle<A: Asset> {
|
pub enum Handle<A: Asset> {
|
||||||
/// A "strong" reference to a live (or loading) [`Asset`]. If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept
|
/// A "strong" reference to a live (or loading) [`Asset`]. If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept
|
||||||
/// alive until the [`Handle`] is dropped. Strong handles also provide access to additional asset metadata.
|
/// alive until the [`Handle`] is dropped. Strong handles also provide access to additional asset metadata.
|
||||||
@ -143,7 +150,10 @@ impl<T: Asset> Clone for Handle<T> {
|
|||||||
|
|
||||||
impl<A: Asset> Handle<A> {
|
impl<A: Asset> Handle<A> {
|
||||||
/// Create a new [`Handle::Weak`] with the given [`u128`] encoding of a [`Uuid`].
|
/// Create a new [`Handle::Weak`] with the given [`u128`] encoding of a [`Uuid`].
|
||||||
#[deprecated = "use the `weak_handle!` macro with a UUID string instead"]
|
#[deprecated(
|
||||||
|
since = "0.16.0",
|
||||||
|
note = "use the `weak_handle!` macro with a UUID string instead"
|
||||||
|
)]
|
||||||
pub const fn weak_from_u128(value: u128) -> Self {
|
pub const fn weak_from_u128(value: u128) -> Self {
|
||||||
Handle::Weak(AssetId::Uuid {
|
Handle::Weak(AssetId::Uuid {
|
||||||
uuid: Uuid::from_u128(value),
|
uuid: Uuid::from_u128(value),
|
||||||
@ -284,7 +294,9 @@ impl<A: Asset> From<&mut Handle<A>> for UntypedAssetId {
|
|||||||
/// See [`Handle`] for more information.
|
/// See [`Handle`] for more information.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum UntypedHandle {
|
pub enum UntypedHandle {
|
||||||
|
/// A strong handle, which will keep the referenced [`Asset`] alive until all strong handles are dropped.
|
||||||
Strong(Arc<StrongHandle>),
|
Strong(Arc<StrongHandle>),
|
||||||
|
/// A weak handle, which does not keep the referenced [`Asset`] alive.
|
||||||
Weak(UntypedAssetId),
|
Weak(UntypedAssetId),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -528,13 +540,18 @@ pub enum UntypedAssetConversionError {
|
|||||||
#[error(
|
#[error(
|
||||||
"This UntypedHandle is for {found:?} and cannot be converted into a Handle<{expected:?}>"
|
"This UntypedHandle is for {found:?} and cannot be converted into a Handle<{expected:?}>"
|
||||||
)]
|
)]
|
||||||
TypeIdMismatch { expected: TypeId, found: TypeId },
|
TypeIdMismatch {
|
||||||
|
/// The expected [`TypeId`] of the [`Handle`] being converted to.
|
||||||
|
expected: TypeId,
|
||||||
|
/// The [`TypeId`] of the [`UntypedHandle`] being converted from.
|
||||||
|
found: TypeId,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use alloc::boxed::Box;
|
use alloc::boxed::Box;
|
||||||
use bevy_platform_support::hash::FixedHasher;
|
use bevy_platform::hash::FixedHasher;
|
||||||
use bevy_reflect::PartialReflect;
|
use bevy_reflect::PartialReflect;
|
||||||
use core::hash::BuildHasher;
|
use core::hash::BuildHasher;
|
||||||
|
|
||||||
@ -644,7 +661,7 @@ mod tests {
|
|||||||
assert_eq!(UntypedHandle::from(typed.clone()), untyped);
|
assert_eq!(UntypedHandle::from(typed.clone()), untyped);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `Reflect::clone_value` should increase the strong count of a strong handle
|
/// `PartialReflect::reflect_clone`/`PartialReflect::to_dynamic` should increase the strong count of a strong handle
|
||||||
#[test]
|
#[test]
|
||||||
fn strong_handle_reflect_clone() {
|
fn strong_handle_reflect_clone() {
|
||||||
use crate::{AssetApp, AssetPlugin, Assets, VisitAssetDependencies};
|
use crate::{AssetApp, AssetPlugin, Assets, VisitAssetDependencies};
|
||||||
@ -675,7 +692,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let reflected: &dyn Reflect = &handle;
|
let reflected: &dyn Reflect = &handle;
|
||||||
let cloned_handle: Box<dyn PartialReflect> = reflected.clone_value();
|
let _cloned_handle: Box<dyn Reflect> = reflected.reflect_clone().unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Arc::strong_count(strong),
|
Arc::strong_count(strong),
|
||||||
@ -683,10 +700,18 @@ mod tests {
|
|||||||
"Cloning the handle with reflect should increase the strong count to 2"
|
"Cloning the handle with reflect should increase the strong count to 2"
|
||||||
);
|
);
|
||||||
|
|
||||||
let from_reflect_handle: Handle<MyAsset> =
|
let dynamic_handle: Box<dyn PartialReflect> = reflected.to_dynamic();
|
||||||
FromReflect::from_reflect(&*cloned_handle).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(Arc::strong_count(strong), 3, "Converting the reflected value back to a handle should increase the strong count to 3");
|
assert_eq!(
|
||||||
|
Arc::strong_count(strong),
|
||||||
|
3,
|
||||||
|
"Converting the handle to a dynamic should increase the strong count to 3"
|
||||||
|
);
|
||||||
|
|
||||||
|
let from_reflect_handle: Handle<MyAsset> =
|
||||||
|
FromReflect::from_reflect(&*dynamic_handle).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(Arc::strong_count(strong), 4, "Converting the reflected value back to a handle should increase the strong count to 4");
|
||||||
assert!(
|
assert!(
|
||||||
from_reflect_handle.is_strong(),
|
from_reflect_handle.is_strong(),
|
||||||
"The cloned handle should still be strong"
|
"The cloned handle should still be strong"
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use crate::{Asset, AssetIndex};
|
use crate::{Asset, AssetIndex};
|
||||||
use bevy_reflect::Reflect;
|
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
@ -19,6 +19,7 @@ use thiserror::Error;
|
|||||||
///
|
///
|
||||||
/// For an "untyped" / "generic-less" id, see [`UntypedAssetId`].
|
/// For an "untyped" / "generic-less" id, see [`UntypedAssetId`].
|
||||||
#[derive(Reflect, Serialize, Deserialize, From)]
|
#[derive(Reflect, Serialize, Deserialize, From)]
|
||||||
|
#[reflect(Clone, Default, Debug, PartialEq, Hash)]
|
||||||
pub enum AssetId<A: Asset> {
|
pub enum AssetId<A: Asset> {
|
||||||
/// A small / efficient runtime identifier that can be used to efficiently look up an asset stored in [`Assets`]. This is
|
/// A small / efficient runtime identifier that can be used to efficiently look up an asset stored in [`Assets`]. This is
|
||||||
/// the "default" identifier used for assets. The alternative(s) (ex: [`AssetId::Uuid`]) will only be used if assets are
|
/// the "default" identifier used for assets. The alternative(s) (ex: [`AssetId::Uuid`]) will only be used if assets are
|
||||||
@ -26,15 +27,20 @@ pub enum AssetId<A: Asset> {
|
|||||||
///
|
///
|
||||||
/// [`Assets`]: crate::Assets
|
/// [`Assets`]: crate::Assets
|
||||||
Index {
|
Index {
|
||||||
|
/// The unstable, opaque index of the asset.
|
||||||
index: AssetIndex,
|
index: AssetIndex,
|
||||||
#[reflect(ignore)]
|
/// A marker to store the type information of the asset.
|
||||||
|
#[reflect(ignore, clone)]
|
||||||
marker: PhantomData<fn() -> A>,
|
marker: PhantomData<fn() -> A>,
|
||||||
},
|
},
|
||||||
/// A stable-across-runs / const asset identifier. This will only be used if an asset is explicitly registered in [`Assets`]
|
/// A stable-across-runs / const asset identifier. This will only be used if an asset is explicitly registered in [`Assets`]
|
||||||
/// with one.
|
/// with one.
|
||||||
///
|
///
|
||||||
/// [`Assets`]: crate::Assets
|
/// [`Assets`]: crate::Assets
|
||||||
Uuid { uuid: Uuid },
|
Uuid {
|
||||||
|
/// The UUID provided during asset registration.
|
||||||
|
uuid: Uuid,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: Asset> AssetId<A> {
|
impl<A: Asset> AssetId<A> {
|
||||||
@ -165,12 +171,22 @@ pub enum UntypedAssetId {
|
|||||||
/// explicitly registered that way.
|
/// explicitly registered that way.
|
||||||
///
|
///
|
||||||
/// [`Assets`]: crate::Assets
|
/// [`Assets`]: crate::Assets
|
||||||
Index { type_id: TypeId, index: AssetIndex },
|
Index {
|
||||||
|
/// An identifier that records the underlying asset type.
|
||||||
|
type_id: TypeId,
|
||||||
|
/// The unstable, opaque index of the asset.
|
||||||
|
index: AssetIndex,
|
||||||
|
},
|
||||||
/// A stable-across-runs / const asset identifier. This will only be used if an asset is explicitly registered in [`Assets`]
|
/// A stable-across-runs / const asset identifier. This will only be used if an asset is explicitly registered in [`Assets`]
|
||||||
/// with one.
|
/// with one.
|
||||||
///
|
///
|
||||||
/// [`Assets`]: crate::Assets
|
/// [`Assets`]: crate::Assets
|
||||||
Uuid { type_id: TypeId, uuid: Uuid },
|
Uuid {
|
||||||
|
/// An identifier that records the underlying asset type.
|
||||||
|
type_id: TypeId,
|
||||||
|
/// The UUID provided during asset registration.
|
||||||
|
uuid: Uuid,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UntypedAssetId {
|
impl UntypedAssetId {
|
||||||
@ -404,7 +420,12 @@ impl<A: Asset> TryFrom<UntypedAssetId> for AssetId<A> {
|
|||||||
pub enum UntypedAssetIdConversionError {
|
pub enum UntypedAssetIdConversionError {
|
||||||
/// Caused when trying to convert an [`UntypedAssetId`] into an [`AssetId`] of the wrong type.
|
/// Caused when trying to convert an [`UntypedAssetId`] into an [`AssetId`] of the wrong type.
|
||||||
#[error("This UntypedAssetId is for {found:?} and cannot be converted into an AssetId<{expected:?}>")]
|
#[error("This UntypedAssetId is for {found:?} and cannot be converted into an AssetId<{expected:?}>")]
|
||||||
TypeIdMismatch { expected: TypeId, found: TypeId },
|
TypeIdMismatch {
|
||||||
|
/// The [`TypeId`] of the asset that we are trying to convert to.
|
||||||
|
expected: TypeId,
|
||||||
|
/// The [`TypeId`] of the asset that we are trying to convert from.
|
||||||
|
found: TypeId,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -420,7 +441,7 @@ mod tests {
|
|||||||
fn hash<T: Hash>(data: &T) -> u64 {
|
fn hash<T: Hash>(data: &T) -> u64 {
|
||||||
use core::hash::BuildHasher;
|
use core::hash::BuildHasher;
|
||||||
|
|
||||||
bevy_platform_support::hash::FixedHasher.hash_one(data)
|
bevy_platform::hash::FixedHasher.hash_one(data)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Typed and Untyped `AssetIds` should be equivalent to each other and themselves
|
/// Typed and Untyped `AssetIds` should be equivalent to each other and themselves
|
||||||
|
|||||||
@ -4,7 +4,7 @@ use crate::io::{
|
|||||||
AssetSourceEvent, AssetWatcher,
|
AssetSourceEvent, AssetWatcher,
|
||||||
};
|
};
|
||||||
use alloc::{boxed::Box, sync::Arc, vec::Vec};
|
use alloc::{boxed::Box, sync::Arc, vec::Vec};
|
||||||
use bevy_platform_support::collections::HashMap;
|
use bevy_platform::collections::HashMap;
|
||||||
use core::time::Duration;
|
use core::time::Duration;
|
||||||
use notify_debouncer_full::{notify::RecommendedWatcher, Debouncer, RecommendedCache};
|
use notify_debouncer_full::{notify::RecommendedWatcher, Debouncer, RecommendedCache};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
@ -24,6 +24,7 @@ pub struct EmbeddedWatcher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EmbeddedWatcher {
|
impl EmbeddedWatcher {
|
||||||
|
/// Creates a new `EmbeddedWatcher` that watches for changes to the embedded assets in the given `dir`.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
dir: Dir,
|
dir: Dir,
|
||||||
root_paths: Arc<RwLock<HashMap<Box<Path>, PathBuf>>>,
|
root_paths: Arc<RwLock<HashMap<Box<Path>, PathBuf>>>,
|
||||||
|
|||||||
@ -15,6 +15,8 @@ use std::path::{Path, PathBuf};
|
|||||||
#[cfg(feature = "embedded_watcher")]
|
#[cfg(feature = "embedded_watcher")]
|
||||||
use alloc::borrow::ToOwned;
|
use alloc::borrow::ToOwned;
|
||||||
|
|
||||||
|
/// The name of the `embedded` [`AssetSource`],
|
||||||
|
/// as stored in the [`AssetSourceBuilders`] resource.
|
||||||
pub const EMBEDDED: &str = "embedded";
|
pub const EMBEDDED: &str = "embedded";
|
||||||
|
|
||||||
/// A [`Resource`] that manages "rust source files" in a virtual in memory [`Dir`], which is intended
|
/// A [`Resource`] that manages "rust source files" in a virtual in memory [`Dir`], which is intended
|
||||||
@ -27,7 +29,7 @@ pub struct EmbeddedAssetRegistry {
|
|||||||
dir: Dir,
|
dir: Dir,
|
||||||
#[cfg(feature = "embedded_watcher")]
|
#[cfg(feature = "embedded_watcher")]
|
||||||
root_paths: alloc::sync::Arc<
|
root_paths: alloc::sync::Arc<
|
||||||
parking_lot::RwLock<bevy_platform_support::collections::HashMap<Box<Path>, PathBuf>>,
|
parking_lot::RwLock<bevy_platform::collections::HashMap<Box<Path>, PathBuf>>,
|
||||||
>,
|
>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,6 +79,7 @@ impl EmbeddedAssetRegistry {
|
|||||||
self.dir.remove_asset(full_path)
|
self.dir.remove_asset(full_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Registers the [`EMBEDDED`] [`AssetSource`] with the given [`AssetSourceBuilders`].
|
||||||
pub fn register_source(&self, sources: &mut AssetSourceBuilders) {
|
pub fn register_source(&self, sources: &mut AssetSourceBuilders) {
|
||||||
let dir = self.dir.clone();
|
let dir = self.dir.clone();
|
||||||
let processed_dir = self.dir.clone();
|
let processed_dir = self.dir.clone();
|
||||||
|
|||||||
@ -18,7 +18,9 @@ use std::path::{Path, PathBuf};
|
|||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
/// An [`AssetWatcher`] that watches the filesystem for changes to asset files in a given root folder and emits [`AssetSourceEvent`]
|
/// An [`AssetWatcher`] that watches the filesystem for changes to asset files in a given root folder and emits [`AssetSourceEvent`]
|
||||||
/// for each relevant change. This uses [`notify_debouncer_full`] to retrieve "debounced" filesystem events.
|
/// for each relevant change.
|
||||||
|
///
|
||||||
|
/// This uses [`notify_debouncer_full`] to retrieve "debounced" filesystem events.
|
||||||
/// "Debouncing" defines a time window to hold on to events and then removes duplicate events that fall into this window.
|
/// "Debouncing" defines a time window to hold on to events and then removes duplicate events that fall into this window.
|
||||||
/// This introduces a small delay in processing events, but it helps reduce event duplicates. A small delay is also necessary
|
/// This introduces a small delay in processing events, but it helps reduce event duplicates. A small delay is also necessary
|
||||||
/// on some systems to avoid processing a change event before it has actually been applied.
|
/// on some systems to avoid processing a change event before it has actually been applied.
|
||||||
@ -27,12 +29,13 @@ pub struct FileWatcher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl FileWatcher {
|
impl FileWatcher {
|
||||||
|
/// Creates a new [`FileWatcher`] that watches for changes to the asset files in the given `path`.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
sender: Sender<AssetSourceEvent>,
|
sender: Sender<AssetSourceEvent>,
|
||||||
debounce_wait_time: Duration,
|
debounce_wait_time: Duration,
|
||||||
) -> Result<Self, notify::Error> {
|
) -> Result<Self, notify::Error> {
|
||||||
let root = normalize_path(&path);
|
let root = normalize_path(&path).canonicalize()?;
|
||||||
let watcher = new_asset_event_debouncer(
|
let watcher = new_asset_event_debouncer(
|
||||||
path.clone(),
|
path.clone(),
|
||||||
debounce_wait_time,
|
debounce_wait_time,
|
||||||
@ -259,7 +262,8 @@ impl FilesystemEventHandler for FileEventHandler {
|
|||||||
self.last_event = None;
|
self.last_event = None;
|
||||||
}
|
}
|
||||||
fn get_path(&self, absolute_path: &Path) -> Option<(PathBuf, bool)> {
|
fn get_path(&self, absolute_path: &Path) -> Option<(PathBuf, bool)> {
|
||||||
Some(get_asset_path(&self.root, absolute_path))
|
let absolute_path = absolute_path.canonicalize().ok()?;
|
||||||
|
Some(get_asset_path(&self.root, &absolute_path))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle(&mut self, _absolute_paths: &[PathBuf], event: AssetSourceEvent) {
|
fn handle(&mut self, _absolute_paths: &[PathBuf], event: AssetSourceEvent) {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user