Update hashbrown to 0.15 (#15801)

Updating dependencies; adopted version of #15696. (Supercedes #15696.)

Long answer: hashbrown is no longer using ahash by default, meaning that
we can't use the default-hasher methods with ahasher. So, we have to use
the longer-winded versions instead. This takes the opportunity to also
switch our default hasher as well, but without actually enabling the
default-hasher feature for hashbrown, meaning that we'll be able to
change our hasher more easily at the cost of all of these method calls
being obnoxious forever.

One large change from 0.15 is that `insert_unique_unchecked` is now
`unsafe`, and for cases where unsafe code was denied at the crate level,
I replaced it with `insert`.

## Migration Guide

`bevy_utils` has updated its version of `hashbrown` to 0.15 and now
defaults to `foldhash` instead of `ahash`. This means that if you've
hard-coded your hasher to `bevy_utils::AHasher` or separately used the
`ahash` crate in your code, you may need to switch to `foldhash` to
ensure that everything works like it does in Bevy.
This commit is contained in:
Clar Fon 2024-12-10 14:45:50 -05:00 committed by GitHub
parent f3974aaaea
commit 711246aa34
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
78 changed files with 416 additions and 350 deletions

View File

@ -1,7 +1,9 @@
//! The animation graph, which allows animations to be blended together. //! The animation graph, which allows animations to be blended together.
use core::iter; use core::{
use core::ops::{Index, IndexMut, Range}; iter,
ops::{Index, IndexMut, Range},
};
use std::io::{self, Write}; use std::io::{self, Write};
use bevy_asset::{ use bevy_asset::{
@ -420,7 +422,7 @@ impl AnimationGraph {
Self { Self {
graph, graph,
root, root,
mask_groups: HashMap::new(), mask_groups: HashMap::default(),
} }
} }

View File

@ -45,9 +45,8 @@ use bevy_reflect::{prelude::ReflectDefault, Reflect, TypePath};
use bevy_time::Time; use bevy_time::Time;
use bevy_transform::TransformSystem; use bevy_transform::TransformSystem;
use bevy_utils::{ use bevy_utils::{
hashbrown::HashMap,
tracing::{trace, warn}, tracing::{trace, warn},
NoOpHash, PreHashMap, PreHashMapExt, TypeIdMap, HashMap, NoOpHash, PreHashMap, PreHashMapExt, TypeIdMap,
}; };
use petgraph::graph::NodeIndex; use petgraph::graph::NodeIndex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};

View File

@ -124,7 +124,7 @@ impl App {
Self { Self {
sub_apps: SubApps { sub_apps: SubApps {
main: SubApp::new(), main: SubApp::new(),
sub_apps: HashMap::new(), sub_apps: HashMap::default(),
}, },
runner: Box::new(run_once), runner: Box::new(run_once),
} }

View File

@ -515,6 +515,8 @@ pub enum UntypedAssetConversionError {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use bevy_reflect::PartialReflect; use bevy_reflect::PartialReflect;
use bevy_utils::FixedHasher;
use core::hash::BuildHasher;
use super::*; use super::*;
@ -525,9 +527,7 @@ mod tests {
/// Simple utility to directly hash a value using a fixed hasher /// Simple utility to directly hash a value using a fixed hasher
fn hash<T: Hash>(data: &T) -> u64 { fn hash<T: Hash>(data: &T) -> u64 {
let mut hasher = bevy_utils::AHasher::default(); FixedHasher.hash_one(data)
data.hash(&mut hasher);
hasher.finish()
} }
/// Typed and Untyped `Handles` should be equivalent to each other and themselves /// Typed and Untyped `Handles` should be equivalent to each other and themselves

View File

@ -418,11 +418,9 @@ mod tests {
/// Simple utility to directly hash a value using a fixed hasher /// Simple utility to directly hash a value using a fixed hasher
fn hash<T: Hash>(data: &T) -> u64 { fn hash<T: Hash>(data: &T) -> u64 {
use core::hash::Hasher; use core::hash::BuildHasher;
let mut hasher = bevy_utils::AHasher::default(); bevy_utils::FixedHasher.hash_one(data)
data.hash(&mut hasher);
hasher.finish()
} }
/// Typed and Untyped `AssetIds` should be equivalent to each other and themselves /// Typed and Untyped `AssetIds` should be equivalent to each other and themselves

View File

@ -44,7 +44,7 @@ impl<R: AssetReader> GatedReader<R> {
/// Creates a new [`GatedReader`], which wraps the given `reader`. Also returns a [`GateOpener`] which /// Creates a new [`GatedReader`], which wraps the given `reader`. Also returns a [`GateOpener`] which
/// can be used to open "path gates" for this [`GatedReader`]. /// can be used to open "path gates" for this [`GatedReader`].
pub fn new(reader: R) -> (Self, GateOpener) { pub fn new(reader: R) -> (Self, GateOpener) {
let gates = Arc::new(RwLock::new(HashMap::new())); let gates = Arc::new(RwLock::new(HashMap::default()));
( (
Self { Self {
reader, reader,

View File

@ -343,7 +343,7 @@ impl AssetSourceBuilders {
/// Builds a new [`AssetSources`] collection. If `watch` is true, the unprocessed sources will watch for changes. /// Builds a new [`AssetSources`] collection. If `watch` is true, the unprocessed sources will watch for changes.
/// If `watch_processed` is true, the processed sources will watch for changes. /// If `watch_processed` is true, the processed sources will watch for changes.
pub fn build_sources(&mut self, watch: bool, watch_processed: bool) -> AssetSources { pub fn build_sources(&mut self, watch: bool, watch_processed: bool) -> AssetSources {
let mut sources = HashMap::new(); let mut sources = <HashMap<_, _>>::default();
for (id, source) in &mut self.sources { for (id, source) in &mut self.sources {
if let Some(data) = source.build( if let Some(data) = source.build(
AssetSourceId::Name(id.clone_owned()), AssetSourceId::Name(id.clone_owned()),

View File

@ -153,7 +153,7 @@ pub struct LoadedAsset<A: Asset> {
impl<A: Asset> LoadedAsset<A> { impl<A: Asset> LoadedAsset<A> {
/// Create a new loaded asset. This will use [`VisitAssetDependencies`](crate::VisitAssetDependencies) to populate `dependencies`. /// Create a new loaded asset. This will use [`VisitAssetDependencies`](crate::VisitAssetDependencies) to populate `dependencies`.
pub fn new_with_dependencies(value: A, meta: Option<Box<dyn AssetMetaDyn>>) -> Self { pub fn new_with_dependencies(value: A, meta: Option<Box<dyn AssetMetaDyn>>) -> Self {
let mut dependencies = HashSet::new(); let mut dependencies = <HashSet<_>>::default();
value.visit_dependencies(&mut |id| { value.visit_dependencies(&mut |id| {
dependencies.insert(id); dependencies.insert(id);
}); });

View File

@ -395,10 +395,10 @@ impl AssetInfos {
loaded_asset.value.insert(loaded_asset_id, world); loaded_asset.value.insert(loaded_asset_id, world);
let mut loading_deps = loaded_asset.dependencies; let mut loading_deps = loaded_asset.dependencies;
let mut failed_deps = HashSet::new(); let mut failed_deps = <HashSet<_>>::default();
let mut dep_error = None; let mut dep_error = None;
let mut loading_rec_deps = loading_deps.clone(); let mut loading_rec_deps = loading_deps.clone();
let mut failed_rec_deps = HashSet::new(); let mut failed_rec_deps = <HashSet<_>>::default();
let mut rec_dep_error = None; let mut rec_dep_error = None;
loading_deps.retain(|dep_id| { loading_deps.retain(|dep_id| {
if let Some(dep_info) = self.get_mut(*dep_id) { if let Some(dep_info) = self.get_mut(*dep_id) {

View File

@ -1544,7 +1544,7 @@ pub fn handle_internal_asset_events(world: &mut World) {
} }
}; };
let mut paths_to_reload = HashSet::new(); let mut paths_to_reload = <HashSet<_>>::default();
let mut handle_event = |source: AssetSourceId<'static>, event: AssetSourceEvent| { let mut handle_event = |source: AssetSourceId<'static>, event: AssetSourceEvent| {
match event { match event {
// TODO: if the asset was processed and the processed file was changed, the first modified event // TODO: if the asset was processed and the processed file was changed, the first modified event

View File

@ -7,9 +7,9 @@ use alloc::borrow::Cow;
use bevy_reflect::std_traits::ReflectDefault; use bevy_reflect::std_traits::ReflectDefault;
#[cfg(feature = "bevy_reflect")] #[cfg(feature = "bevy_reflect")]
use bevy_reflect::Reflect; use bevy_reflect::Reflect;
use bevy_utils::AHasher; use bevy_utils::FixedHasher;
use core::{ use core::{
hash::{Hash, Hasher}, hash::{BuildHasher, Hash, Hasher},
ops::Deref, ops::Deref,
}; };
@ -80,9 +80,7 @@ impl Name {
} }
fn update_hash(&mut self) { fn update_hash(&mut self) {
let mut hasher = AHasher::default(); self.hash = FixedHasher.hash_one(&self.name);
self.name.hash(&mut hasher);
self.hash = hasher.finish();
} }
} }

View File

@ -33,8 +33,9 @@ pub mod graph {
use core::ops::Range; use core::ops::Range;
use bevy_asset::UntypedAssetId; use bevy_asset::UntypedAssetId;
use bevy_render::batching::gpu_preprocessing::GpuPreprocessingMode; use bevy_render::{
use bevy_render::render_phase::PhaseItemBinKey; batching::gpu_preprocessing::GpuPreprocessingMode, render_phase::PhaseItemBinKey,
};
use bevy_utils::HashMap; use bevy_utils::HashMap;
pub use camera_2d::*; pub use camera_2d::*;
pub use main_opaque_pass_2d_node::*; pub use main_opaque_pass_2d_node::*;
@ -44,7 +45,6 @@ use crate::{tonemapping::TonemappingNode, upscaling::UpscalingNode};
use bevy_app::{App, Plugin}; use bevy_app::{App, Plugin};
use bevy_ecs::{entity::EntityHashSet, prelude::*}; use bevy_ecs::{entity::EntityHashSet, prelude::*};
use bevy_math::FloatOrd; use bevy_math::FloatOrd;
use bevy_render::sync_world::MainEntity;
use bevy_render::{ use bevy_render::{
camera::{Camera, ExtractedCamera}, camera::{Camera, ExtractedCamera},
extract_component::ExtractComponentPlugin, extract_component::ExtractComponentPlugin,
@ -59,7 +59,7 @@ use bevy_render::{
TextureFormat, TextureUsages, TextureFormat, TextureUsages,
}, },
renderer::RenderDevice, renderer::RenderDevice,
sync_world::RenderEntity, sync_world::{MainEntity, RenderEntity},
texture::TextureCache, texture::TextureCache,
view::{Msaa, ViewDepthTexture}, view::{Msaa, ViewDepthTexture},
Extract, ExtractSchedule, Render, RenderApp, RenderSet, Extract, ExtractSchedule, Render, RenderApp, RenderSet,
@ -423,7 +423,7 @@ pub fn prepare_core_2d_depth_textures(
opaque_2d_phases: Res<ViewBinnedRenderPhases<Opaque2d>>, opaque_2d_phases: Res<ViewBinnedRenderPhases<Opaque2d>>,
views_2d: Query<(Entity, &ExtractedCamera, &Msaa), (With<Camera2d>,)>, views_2d: Query<(Entity, &ExtractedCamera, &Msaa), (With<Camera2d>,)>,
) { ) {
let mut textures = HashMap::default(); let mut textures = <HashMap<_, _>>::default();
for (view, camera, msaa) in &views_2d { for (view, camera, msaa) in &views_2d {
if !opaque_2d_phases.contains_key(&view) || !transparent_2d_phases.contains_key(&view) { if !opaque_2d_phases.contains_key(&view) || !transparent_2d_phases.contains_key(&view) {
continue; continue;

View File

@ -65,10 +65,12 @@ pub const DEPTH_TEXTURE_SAMPLING_SUPPORTED: bool = true;
use core::ops::Range; use core::ops::Range;
use bevy_render::batching::gpu_preprocessing::{GpuPreprocessingMode, GpuPreprocessingSupport}; use bevy_render::{
use bevy_render::mesh::allocator::SlabId; batching::gpu_preprocessing::{GpuPreprocessingMode, GpuPreprocessingSupport},
use bevy_render::render_phase::PhaseItemBinKey; mesh::allocator::SlabId,
use bevy_render::view::GpuCulling; render_phase::PhaseItemBinKey,
view::GpuCulling,
};
pub use camera_3d::*; pub use camera_3d::*;
pub use main_opaque_pass_3d_node::*; pub use main_opaque_pass_3d_node::*;
pub use main_transparent_pass_3d_node::*; pub use main_transparent_pass_3d_node::*;
@ -79,7 +81,6 @@ use bevy_color::LinearRgba;
use bevy_ecs::{entity::EntityHashSet, prelude::*}; use bevy_ecs::{entity::EntityHashSet, prelude::*};
use bevy_image::{BevyDefault, Image}; use bevy_image::{BevyDefault, Image};
use bevy_math::FloatOrd; use bevy_math::FloatOrd;
use bevy_render::sync_world::MainEntity;
use bevy_render::{ use bevy_render::{
camera::{Camera, ExtractedCamera}, camera::{Camera, ExtractedCamera},
extract_component::ExtractComponentPlugin, extract_component::ExtractComponentPlugin,
@ -95,7 +96,7 @@ use bevy_render::{
TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, TextureView, TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, TextureView,
}, },
renderer::RenderDevice, renderer::RenderDevice,
sync_world::RenderEntity, sync_world::{MainEntity, RenderEntity},
texture::{ColorAttachment, TextureCache}, texture::{ColorAttachment, TextureCache},
view::{ExtractedView, ViewDepthTexture, ViewTarget}, view::{ExtractedView, ViewDepthTexture, ViewTarget},
Extract, ExtractSchedule, Render, RenderApp, RenderSet, Extract, ExtractSchedule, Render, RenderApp, RenderSet,
@ -700,7 +701,7 @@ pub fn prepare_core_3d_depth_textures(
&Msaa, &Msaa,
)>, )>,
) { ) {
let mut render_target_usage = HashMap::default(); let mut render_target_usage = <HashMap<_, _>>::default();
for (view, camera, depth_prepass, camera_3d, _msaa) in &views_3d { for (view, camera, depth_prepass, camera_3d, _msaa) in &views_3d {
if !opaque_3d_phases.contains_key(&view) if !opaque_3d_phases.contains_key(&view)
|| !alpha_mask_3d_phases.contains_key(&view) || !alpha_mask_3d_phases.contains_key(&view)
@ -722,7 +723,7 @@ pub fn prepare_core_3d_depth_textures(
.or_insert_with(|| usage); .or_insert_with(|| usage);
} }
let mut textures = HashMap::default(); let mut textures = <HashMap<_, _>>::default();
for (entity, camera, _, camera_3d, msaa) in &views_3d { for (entity, camera, _, camera_3d, msaa) in &views_3d {
let Some(physical_target_size) = camera.physical_target_size else { let Some(physical_target_size) = camera.physical_target_size else {
continue; continue;
@ -785,7 +786,7 @@ pub fn prepare_core_3d_transmission_textures(
transparent_3d_phases: Res<ViewSortedRenderPhases<Transparent3d>>, transparent_3d_phases: Res<ViewSortedRenderPhases<Transparent3d>>,
views_3d: Query<(Entity, &ExtractedCamera, &Camera3d, &ExtractedView)>, views_3d: Query<(Entity, &ExtractedCamera, &Camera3d, &ExtractedView)>,
) { ) {
let mut textures = HashMap::default(); let mut textures = <HashMap<_, _>>::default();
for (entity, camera, camera_3d, view) in &views_3d { for (entity, camera, camera_3d, view) in &views_3d {
if !opaque_3d_phases.contains_key(&entity) if !opaque_3d_phases.contains_key(&entity)
|| !alpha_mask_3d_phases.contains_key(&entity) || !alpha_mask_3d_phases.contains_key(&entity)
@ -893,11 +894,11 @@ pub fn prepare_prepass_textures(
Has<DeferredPrepass>, Has<DeferredPrepass>,
)>, )>,
) { ) {
let mut depth_textures = HashMap::default(); let mut depth_textures = <HashMap<_, _>>::default();
let mut normal_textures = HashMap::default(); let mut normal_textures = <HashMap<_, _>>::default();
let mut deferred_textures = HashMap::default(); let mut deferred_textures = <HashMap<_, _>>::default();
let mut deferred_lighting_id_textures = HashMap::default(); let mut deferred_lighting_id_textures = <HashMap<_, _>>::default();
let mut motion_vectors_textures = HashMap::default(); let mut motion_vectors_textures = <HashMap<_, _>>::default();
for ( for (
entity, entity,
camera, camera,

View File

@ -160,7 +160,7 @@ fn configure_depth_texture_usages(
// Find all the render target that potentially uses OIT // Find all the render target that potentially uses OIT
let primary_window = p.get_single().ok(); let primary_window = p.get_single().ok();
let mut render_target_has_oit = HashSet::new(); let mut render_target_has_oit = <HashSet<_>>::default();
for (camera, has_oit) in &cameras { for (camera, has_oit) in &cameras {
if has_oit { if has_oit {
render_target_has_oit.insert(camera.target.normalize(primary_window)); render_target_has_oit.insert(camera.target.normalize(primary_window));

View File

@ -43,7 +43,7 @@ fn prepare_view_upscaling_pipelines(
blit_pipeline: Res<BlitPipeline>, blit_pipeline: Res<BlitPipeline>,
view_targets: Query<(Entity, &ViewTarget, Option<&ExtractedCamera>)>, view_targets: Query<(Entity, &ViewTarget, Option<&ExtractedCamera>)>,
) { ) {
let mut output_textures = HashSet::new(); let mut output_textures = <HashSet<_>>::default();
for (entity, view_target, camera) in view_targets.iter() { for (entity, view_target, camera) in view_targets.iter() {
let out_texture_id = view_target.out_texture().id(); let out_texture_id = view_target.out_texture().id();
let blend_state = if let Some(extracted_camera) = camera { let blend_state = if let Some(extracted_camera) = camera {

View File

@ -52,7 +52,7 @@ struct DrawnLines {
impl DrawnLines { impl DrawnLines {
fn new(width: f32) -> Self { fn new(width: f32) -> Self {
DrawnLines { DrawnLines {
lines: HashMap::new(), lines: HashMap::default(),
width, width,
} }
} }

View File

@ -3,7 +3,7 @@ use core::hash::{Hash, Hasher};
use bevy_app::{App, SubApp}; use bevy_app::{App, SubApp};
use bevy_ecs::system::{Deferred, Res, Resource, SystemBuffer, SystemParam}; use bevy_ecs::system::{Deferred, Res, Resource, SystemBuffer, SystemParam};
use bevy_utils::{hashbrown::HashMap, Duration, Instant, PassHash}; use bevy_utils::{Duration, HashMap, Instant, PassHash};
use const_fnv1a_hash::fnv1a_hash_str_64; use const_fnv1a_hash::fnv1a_hash_str_64;
use crate::DEFAULT_MAX_HISTORY_LENGTH; use crate::DEFAULT_MAX_HISTORY_LENGTH;

View File

@ -402,7 +402,7 @@ impl Archetype {
// component in the `table_components` vector // component in the `table_components` vector
component_index component_index
.entry(component_id) .entry(component_id)
.or_insert_with(HashMap::new) .or_default()
.insert(id, ArchetypeRecord { column: Some(idx) }); .insert(id, ArchetypeRecord { column: Some(idx) });
} }
@ -420,7 +420,7 @@ impl Archetype {
); );
component_index component_index
.entry(component_id) .entry(component_id)
.or_insert_with(HashMap::new) .or_default()
.insert(id, ArchetypeRecord { column: None }); .insert(id, ArchetypeRecord { column: None });
} }
Self { Self {

View File

@ -381,7 +381,7 @@ impl BundleInfo {
if deduped.len() != component_ids.len() { if deduped.len() != component_ids.len() {
// TODO: Replace with `Vec::partition_dedup` once https://github.com/rust-lang/rust/issues/54279 is stabilized // TODO: Replace with `Vec::partition_dedup` once https://github.com/rust-lang/rust/issues/54279 is stabilized
let mut seen = HashSet::new(); let mut seen = <HashSet<_>>::default();
let mut dups = Vec::new(); let mut dups = Vec::new();
for id in component_ids { for id in component_ids {
if !seen.insert(id) { if !seen.insert(id) {
@ -1422,8 +1422,11 @@ impl Bundles {
.or_insert_with(|| { .or_insert_with(|| {
let (id, storages) = let (id, storages) =
initialize_dynamic_bundle(bundle_infos, components, Vec::from(component_ids)); initialize_dynamic_bundle(bundle_infos, components, Vec::from(component_ids));
self.dynamic_bundle_storages // SAFETY: The ID always increases when new bundles are added, and so, the ID is unique.
.insert_unique_unchecked(id, storages); unsafe {
self.dynamic_bundle_storages
.insert_unique_unchecked(id, storages);
}
(component_ids.into(), id) (component_ids.into(), id)
}); });
*bundle_id *bundle_id

View File

@ -28,7 +28,8 @@ impl BuildHasher for EntityHash {
/// ///
/// If you have an unusual case -- say all your indices are multiples of 256 /// If you have an unusual case -- say all your indices are multiples of 256
/// or most of the entities are dead generations -- then you might want also to /// or most of the entities are dead generations -- then you might want also to
/// try [`AHasher`](bevy_utils::AHasher) for a slower hash computation but fewer lookup conflicts. /// try [`DefaultHasher`](bevy_utils::DefaultHasher) for a slower hash
/// computation but fewer lookup conflicts.
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct EntityHasher { pub struct EntityHasher {
hash: u64, hash: u64,

View File

@ -113,7 +113,7 @@ mod tests {
let mut entity_map = EntityHashMap::<Entity>::default(); let mut entity_map = EntityHashMap::<Entity>::default();
let mut remapped = Foo { let mut remapped = Foo {
ordered: vec![], ordered: vec![],
unordered: HashSet::new(), unordered: HashSet::default(),
single: Entity::PLACEHOLDER, single: Entity::PLACEHOLDER,
not_an_entity: foo.not_an_entity.clone(), not_an_entity: foo.not_an_entity.clone(),
}; };

View File

@ -164,8 +164,8 @@ impl<T: ?Sized> Default for Interner<T> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use core::hash::{Hash, Hasher}; use bevy_utils::FixedHasher;
use std::collections::hash_map::DefaultHasher; use core::hash::{BuildHasher, Hash, Hasher};
use crate::intern::{Internable, Interned, Interner}; use crate::intern::{Internable, Interned, Interner};
@ -250,13 +250,8 @@ mod tests {
assert_eq!(a, b); assert_eq!(a, b);
let mut hasher = DefaultHasher::default(); let hash_a = FixedHasher.hash_one(a);
a.hash(&mut hasher); let hash_b = FixedHasher.hash_one(b);
let hash_a = hasher.finish();
let mut hasher = DefaultHasher::default();
b.hash(&mut hasher);
let hash_b = hasher.finish();
assert_eq!(hash_a, hash_b); assert_eq!(hash_a, hash_b);
} }

View File

@ -421,7 +421,7 @@ mod tests {
let mut world = World::new(); let mut world = World::new();
let e = world.spawn((TableStored("abc"), A(123))).id(); let e = world.spawn((TableStored("abc"), A(123))).id();
let f = world.spawn((TableStored("def"), A(456), B(1))).id(); let f = world.spawn((TableStored("def"), A(456), B(1))).id();
let mut results = HashSet::new(); let mut results = <HashSet<_>>::default();
world world
.query::<(Entity, &A)>() .query::<(Entity, &A)>()
.iter(&world) .iter(&world)
@ -598,7 +598,9 @@ mod tests {
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
assert_eq!( assert_eq!(
ents, ents,
HashSet::from([(e, None, A(123)), (f, Some(SparseStored(1)), A(456))]) [(e, None, A(123)), (f, Some(SparseStored(1)), A(456))]
.into_iter()
.collect::<HashSet<_>>()
); );
} }
@ -630,7 +632,9 @@ mod tests {
.iter(&world) .iter(&world)
.map(|(e, &i, &b)| (e, i, b)) .map(|(e, &i, &b)| (e, i, b))
.collect::<HashSet<_>>(), .collect::<HashSet<_>>(),
HashSet::from([(e1, A(1), B(3)), (e2, A(2), B(4))]) [(e1, A(1), B(3)), (e2, A(2), B(4))]
.into_iter()
.collect::<HashSet<_>>()
); );
assert_eq!(world.entity_mut(e1).take::<A>(), Some(A(1))); assert_eq!(world.entity_mut(e1).take::<A>(), Some(A(1)));
assert_eq!( assert_eq!(
@ -647,7 +651,9 @@ mod tests {
.iter(&world) .iter(&world)
.map(|(e, &B(b), &TableStored(s))| (e, b, s)) .map(|(e, &B(b), &TableStored(s))| (e, b, s))
.collect::<HashSet<_>>(), .collect::<HashSet<_>>(),
HashSet::from([(e2, 4, "xyz"), (e1, 3, "abc")]) [(e2, 4, "xyz"), (e1, 3, "abc")]
.into_iter()
.collect::<HashSet<_>>()
); );
world.entity_mut(e1).insert(A(43)); world.entity_mut(e1).insert(A(43));
assert_eq!( assert_eq!(
@ -656,7 +662,9 @@ mod tests {
.iter(&world) .iter(&world)
.map(|(e, &i, &b)| (e, i, b)) .map(|(e, &i, &b)| (e, i, b))
.collect::<HashSet<_>>(), .collect::<HashSet<_>>(),
HashSet::from([(e2, A(2), B(4)), (e1, A(43), B(3))]) [(e2, A(2), B(4)), (e1, A(43), B(3))]
.into_iter()
.collect::<HashSet<_>>()
); );
world.entity_mut(e1).insert(C); world.entity_mut(e1).insert(C);
assert_eq!( assert_eq!(
@ -954,7 +962,7 @@ mod tests {
assert_eq!( assert_eq!(
get_filtered::<Changed<A>>(&mut world), get_filtered::<Changed<A>>(&mut world),
HashSet::from([e1, e3]) [e1, e3].into_iter().collect::<HashSet<_>>()
); );
// ensure changing an entity's archetypes also moves its changed state // ensure changing an entity's archetypes also moves its changed state
@ -962,7 +970,7 @@ mod tests {
assert_eq!( assert_eq!(
get_filtered::<Changed<A>>(&mut world), get_filtered::<Changed<A>>(&mut world),
HashSet::from([e3, e1]), [e3, e1].into_iter().collect::<HashSet<_>>(),
"changed entities list should not change" "changed entities list should not change"
); );
@ -971,7 +979,7 @@ mod tests {
assert_eq!( assert_eq!(
get_filtered::<Changed<A>>(&mut world), get_filtered::<Changed<A>>(&mut world),
HashSet::from([e3, e1]), [e3, e1].into_iter().collect::<HashSet<_>>(),
"changed entities list should not change" "changed entities list should not change"
); );
@ -979,7 +987,7 @@ mod tests {
assert!(world.despawn(e2)); assert!(world.despawn(e2));
assert_eq!( assert_eq!(
get_filtered::<Changed<A>>(&mut world), get_filtered::<Changed<A>>(&mut world),
HashSet::from([e3, e1]), [e3, e1].into_iter().collect::<HashSet<_>>(),
"changed entities list should not change" "changed entities list should not change"
); );
@ -987,7 +995,7 @@ mod tests {
assert!(world.despawn(e1)); assert!(world.despawn(e1));
assert_eq!( assert_eq!(
get_filtered::<Changed<A>>(&mut world), get_filtered::<Changed<A>>(&mut world),
HashSet::from([e3]), [e3].into_iter().collect::<HashSet<_>>(),
"e1 should no longer be returned" "e1 should no longer be returned"
); );
@ -998,11 +1006,20 @@ mod tests {
let e4 = world.spawn_empty().id(); let e4 = world.spawn_empty().id();
world.entity_mut(e4).insert(A(0)); world.entity_mut(e4).insert(A(0));
assert_eq!(get_filtered::<Changed<A>>(&mut world), HashSet::from([e4])); assert_eq!(
assert_eq!(get_filtered::<Added<A>>(&mut world), HashSet::from([e4])); get_filtered::<Changed<A>>(&mut world),
[e4].into_iter().collect::<HashSet<_>>()
);
assert_eq!(
get_filtered::<Added<A>>(&mut world),
[e4].into_iter().collect::<HashSet<_>>()
);
world.entity_mut(e4).insert(A(1)); world.entity_mut(e4).insert(A(1));
assert_eq!(get_filtered::<Changed<A>>(&mut world), HashSet::from([e4])); assert_eq!(
get_filtered::<Changed<A>>(&mut world),
[e4].into_iter().collect::<HashSet<_>>()
);
world.clear_trackers(); world.clear_trackers();
@ -1011,9 +1028,18 @@ mod tests {
world.entity_mut(e4).insert((A(0), B(0))); world.entity_mut(e4).insert((A(0), B(0)));
assert!(get_filtered::<Added<A>>(&mut world).is_empty()); assert!(get_filtered::<Added<A>>(&mut world).is_empty());
assert_eq!(get_filtered::<Changed<A>>(&mut world), HashSet::from([e4])); assert_eq!(
assert_eq!(get_filtered::<Added<B>>(&mut world), HashSet::from([e4])); get_filtered::<Changed<A>>(&mut world),
assert_eq!(get_filtered::<Changed<B>>(&mut world), HashSet::from([e4])); [e4].into_iter().collect::<HashSet<_>>()
);
assert_eq!(
get_filtered::<Added<B>>(&mut world),
[e4].into_iter().collect::<HashSet<_>>()
);
assert_eq!(
get_filtered::<Changed<B>>(&mut world),
[e4].into_iter().collect::<HashSet<_>>()
);
} }
#[test] #[test]
@ -1045,19 +1071,19 @@ mod tests {
assert_eq!( assert_eq!(
get_filtered::<Changed<SparseStored>>(&mut world), get_filtered::<Changed<SparseStored>>(&mut world),
HashSet::from([e1, e3]) [e1, e3].into_iter().collect::<HashSet<_>>()
); );
// ensure changing an entity's archetypes also moves its changed state // ensure changing an entity's archetypes also moves its changed state
world.entity_mut(e1).insert(C); world.entity_mut(e1).insert(C);
assert_eq!(get_filtered::<Changed<SparseStored>>(&mut world), HashSet::from([e3, e1]), "changed entities list should not change (although the order will due to archetype moves)"); assert_eq!(get_filtered::<Changed<SparseStored>>(&mut world), [e3, e1].into_iter().collect::<HashSet<_>>(), "changed entities list should not change (although the order will due to archetype moves)");
// spawning a new SparseStored entity should not change existing changed state // spawning a new SparseStored entity should not change existing changed state
world.entity_mut(e1).insert(SparseStored(0)); world.entity_mut(e1).insert(SparseStored(0));
assert_eq!( assert_eq!(
get_filtered::<Changed<SparseStored>>(&mut world), get_filtered::<Changed<SparseStored>>(&mut world),
HashSet::from([e3, e1]), [e3, e1].into_iter().collect::<HashSet<_>>(),
"changed entities list should not change" "changed entities list should not change"
); );
@ -1065,7 +1091,7 @@ mod tests {
assert!(world.despawn(e2)); assert!(world.despawn(e2));
assert_eq!( assert_eq!(
get_filtered::<Changed<SparseStored>>(&mut world), get_filtered::<Changed<SparseStored>>(&mut world),
HashSet::from([e3, e1]), [e3, e1].into_iter().collect::<HashSet<_>>(),
"changed entities list should not change" "changed entities list should not change"
); );
@ -1073,7 +1099,7 @@ mod tests {
assert!(world.despawn(e1)); assert!(world.despawn(e1));
assert_eq!( assert_eq!(
get_filtered::<Changed<SparseStored>>(&mut world), get_filtered::<Changed<SparseStored>>(&mut world),
HashSet::from([e3]), [e3].into_iter().collect::<HashSet<_>>(),
"e1 should no longer be returned" "e1 should no longer be returned"
); );
@ -1086,17 +1112,17 @@ mod tests {
world.entity_mut(e4).insert(SparseStored(0)); world.entity_mut(e4).insert(SparseStored(0));
assert_eq!( assert_eq!(
get_filtered::<Changed<SparseStored>>(&mut world), get_filtered::<Changed<SparseStored>>(&mut world),
HashSet::from([e4]) [e4].into_iter().collect::<HashSet<_>>()
); );
assert_eq!( assert_eq!(
get_filtered::<Added<SparseStored>>(&mut world), get_filtered::<Added<SparseStored>>(&mut world),
HashSet::from([e4]) [e4].into_iter().collect::<HashSet<_>>()
); );
world.entity_mut(e4).insert(A(1)); world.entity_mut(e4).insert(A(1));
assert_eq!( assert_eq!(
get_filtered::<Changed<SparseStored>>(&mut world), get_filtered::<Changed<SparseStored>>(&mut world),
HashSet::from([e4]) [e4].into_iter().collect::<HashSet<_>>()
); );
world.clear_trackers(); world.clear_trackers();
@ -1108,7 +1134,7 @@ mod tests {
assert!(get_filtered::<Added<SparseStored>>(&mut world).is_empty()); assert!(get_filtered::<Added<SparseStored>>(&mut world).is_empty());
assert_eq!( assert_eq!(
get_filtered::<Changed<SparseStored>>(&mut world), get_filtered::<Changed<SparseStored>>(&mut world),
HashSet::from([e4]) [e4].into_iter().collect::<HashSet<_>>()
); );
} }
@ -1292,7 +1318,12 @@ mod tests {
.iter(&world) .iter(&world)
.map(|(a, b)| (a.0, b.0)) .map(|(a, b)| (a.0, b.0))
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
assert_eq!(results, HashSet::from([(1, "1"), (2, "2"), (3, "3"),])); assert_eq!(
results,
[(1, "1"), (2, "2"), (3, "3"),]
.into_iter()
.collect::<HashSet<_>>()
);
let removed_bundle = world.entity_mut(e2).take::<(B, TableStored)>().unwrap(); let removed_bundle = world.entity_mut(e2).take::<(B, TableStored)>().unwrap();
assert_eq!(removed_bundle, (B(2), TableStored("2"))); assert_eq!(removed_bundle, (B(2), TableStored("2")));
@ -1301,11 +1332,14 @@ mod tests {
.iter(&world) .iter(&world)
.map(|(a, b)| (a.0, b.0)) .map(|(a, b)| (a.0, b.0))
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
assert_eq!(results, HashSet::from([(1, "1"), (3, "3"),])); assert_eq!(
results,
[(1, "1"), (3, "3"),].into_iter().collect::<HashSet<_>>()
);
let mut a_query = world.query::<&A>(); let mut a_query = world.query::<&A>();
let results = a_query.iter(&world).map(|a| a.0).collect::<HashSet<_>>(); let results = a_query.iter(&world).map(|a| a.0).collect::<HashSet<_>>();
assert_eq!(results, HashSet::from([1, 3, 2])); assert_eq!(results, [1, 3, 2].into_iter().collect::<HashSet<_>>());
let entity_ref = world.entity(e2); let entity_ref = world.entity(e2);
assert_eq!( assert_eq!(

View File

@ -4,10 +4,10 @@
//! //!
//! [`petgraph`]: https://docs.rs/petgraph/0.6.5/petgraph/ //! [`petgraph`]: https://docs.rs/petgraph/0.6.5/petgraph/
use bevy_utils::{hashbrown::HashSet, AHasher}; use bevy_utils::{hashbrown::HashSet, FixedHasher};
use core::{ use core::{
fmt, fmt,
hash::{BuildHasher, BuildHasherDefault, Hash}, hash::{BuildHasher, Hash},
}; };
use indexmap::IndexMap; use indexmap::IndexMap;
use smallvec::SmallVec; use smallvec::SmallVec;
@ -20,13 +20,13 @@ use Direction::{Incoming, Outgoing};
/// ///
/// For example, an edge between *1* and *2* is equivalent to an edge between /// For example, an edge between *1* and *2* is equivalent to an edge between
/// *2* and *1*. /// *2* and *1*.
pub type UnGraph<S = BuildHasherDefault<AHasher>> = Graph<false, S>; pub type UnGraph<S = FixedHasher> = Graph<false, S>;
/// A `Graph` with directed edges. /// A `Graph` with directed edges.
/// ///
/// For example, an edge from *1* to *2* is distinct from an edge from *2* to /// For example, an edge from *1* to *2* is distinct from an edge from *2* to
/// *1*. /// *1*.
pub type DiGraph<S = BuildHasherDefault<AHasher>> = Graph<true, S>; pub type DiGraph<S = FixedHasher> = Graph<true, S>;
/// `Graph<DIRECTED>` is a graph datastructure using an associative array /// `Graph<DIRECTED>` is a graph datastructure using an associative array
/// of its node weights `NodeId`. /// of its node weights `NodeId`.
@ -45,7 +45,7 @@ pub type DiGraph<S = BuildHasherDefault<AHasher>> = Graph<true, S>;
/// ///
/// `Graph` does not allow parallel edges, but self loops are allowed. /// `Graph` does not allow parallel edges, but self loops are allowed.
#[derive(Clone)] #[derive(Clone)]
pub struct Graph<const DIRECTED: bool, S = BuildHasherDefault<AHasher>> pub struct Graph<const DIRECTED: bool, S = FixedHasher>
where where
S: BuildHasher, S: BuildHasher,
{ {
@ -63,14 +63,6 @@ impl<const DIRECTED: bool, S> Graph<DIRECTED, S>
where where
S: BuildHasher, S: BuildHasher,
{ {
/// Create a new `Graph`
pub(crate) fn new() -> Self
where
S: Default,
{
Self::default()
}
/// Create a new `Graph` with estimated capacity. /// Create a new `Graph` with estimated capacity.
pub(crate) fn with_capacity(nodes: usize, edges: usize) -> Self pub(crate) fn with_capacity(nodes: usize, edges: usize) -> Self
where where
@ -274,7 +266,7 @@ where
} }
} }
impl<S: BuildHasher> Graph<true, S> { impl<S: BuildHasher> DiGraph<S> {
/// Iterate over all *Strongly Connected Components* in this graph. /// Iterate over all *Strongly Connected Components* in this graph.
pub(crate) fn iter_sccs(&self) -> impl Iterator<Item = SmallVec<[NodeId; 4]>> + '_ { pub(crate) fn iter_sccs(&self) -> impl Iterator<Item = SmallVec<[NodeId; 4]>> + '_ {
super::tarjan_scc::new_tarjan_scc(self) super::tarjan_scc::new_tarjan_scc(self)
@ -408,7 +400,7 @@ mod tests {
fn node_order_preservation() { fn node_order_preservation() {
use NodeId::System; use NodeId::System;
let mut graph = Graph::<true>::new(); let mut graph = <DiGraph>::default();
graph.add_node(System(1)); graph.add_node(System(1));
graph.add_node(System(2)); graph.add_node(System(2));
@ -450,7 +442,7 @@ mod tests {
fn strongly_connected_components() { fn strongly_connected_components() {
use NodeId::System; use NodeId::System;
let mut graph = Graph::<true>::new(); let mut graph = <DiGraph>::default();
graph.add_edge(System(1), System(2)); graph.add_edge(System(1), System(2));
graph.add_edge(System(2), System(1)); graph.add_edge(System(2), System(1));

View File

@ -1,10 +1,8 @@
use alloc::vec; use alloc::{vec, vec::Vec};
use alloc::vec::Vec;
use core::fmt::Debug; use core::fmt::Debug;
use core::hash::BuildHasherDefault;
use smallvec::SmallVec; use smallvec::SmallVec;
use bevy_utils::{AHasher, HashMap, HashSet}; use bevy_utils::{HashMap, HashSet};
use fixedbitset::FixedBitSet; use fixedbitset::FixedBitSet;
use crate::schedule::set::*; use crate::schedule::set::*;
@ -96,11 +94,11 @@ impl Default for CheckGraphResults {
fn default() -> Self { fn default() -> Self {
Self { Self {
reachable: FixedBitSet::new(), reachable: FixedBitSet::new(),
connected: HashSet::new(), connected: HashSet::default(),
disconnected: Vec::new(), disconnected: Vec::new(),
transitive_edges: Vec::new(), transitive_edges: Vec::new(),
transitive_reduction: DiGraph::new(), transitive_reduction: DiGraph::default(),
transitive_closure: DiGraph::new(), transitive_closure: DiGraph::default(),
} }
} }
} }
@ -124,8 +122,8 @@ pub(crate) fn check_graph(graph: &DiGraph, topological_order: &[NodeId]) -> Chec
let n = graph.node_count(); let n = graph.node_count();
// build a copy of the graph where the nodes and edges appear in topsorted order // build a copy of the graph where the nodes and edges appear in topsorted order
let mut map = HashMap::with_capacity(n); let mut map = <HashMap<_, _>>::with_capacity_and_hasher(n, Default::default());
let mut topsorted = DiGraph::<BuildHasherDefault<AHasher>>::new(); let mut topsorted = <DiGraph>::default();
// iterate nodes in topological order // iterate nodes in topological order
for (i, &node) in topological_order.iter().enumerate() { for (i, &node) in topological_order.iter().enumerate() {
map.insert(node, i); map.insert(node, i);
@ -137,12 +135,12 @@ pub(crate) fn check_graph(graph: &DiGraph, topological_order: &[NodeId]) -> Chec
} }
let mut reachable = FixedBitSet::with_capacity(n * n); let mut reachable = FixedBitSet::with_capacity(n * n);
let mut connected = HashSet::new(); let mut connected = <HashSet<_>>::default();
let mut disconnected = Vec::new(); let mut disconnected = Vec::new();
let mut transitive_edges = Vec::new(); let mut transitive_edges = Vec::new();
let mut transitive_reduction = DiGraph::new(); let mut transitive_reduction = DiGraph::default();
let mut transitive_closure = DiGraph::new(); let mut transitive_closure = DiGraph::default();
let mut visited = FixedBitSet::with_capacity(n); let mut visited = FixedBitSet::with_capacity(n);
@ -227,7 +225,7 @@ pub fn simple_cycles_in_component(graph: &DiGraph, scc: &[NodeId]) -> Vec<Vec<No
while let Some(mut scc) = sccs.pop() { while let Some(mut scc) = sccs.pop() {
// only look at nodes and edges in this strongly-connected component // only look at nodes and edges in this strongly-connected component
let mut subgraph = DiGraph::<BuildHasherDefault<AHasher>>::new(); let mut subgraph = <DiGraph>::default();
for &node in &scc { for &node in &scc {
subgraph.add_node(node); subgraph.add_node(node);
} }
@ -243,16 +241,17 @@ pub fn simple_cycles_in_component(graph: &DiGraph, scc: &[NodeId]) -> Vec<Vec<No
// path of nodes that may form a cycle // path of nodes that may form a cycle
let mut path = Vec::with_capacity(subgraph.node_count()); let mut path = Vec::with_capacity(subgraph.node_count());
// we mark nodes as "blocked" to avoid finding permutations of the same cycles // we mark nodes as "blocked" to avoid finding permutations of the same cycles
let mut blocked = HashSet::with_capacity(subgraph.node_count()); let mut blocked: HashSet<_> =
HashSet::with_capacity_and_hasher(subgraph.node_count(), Default::default());
// connects nodes along path segments that can't be part of a cycle (given current root) // connects nodes along path segments that can't be part of a cycle (given current root)
// those nodes can be unblocked at the same time // those nodes can be unblocked at the same time
let mut unblock_together: HashMap<NodeId, HashSet<NodeId>> = let mut unblock_together: HashMap<NodeId, HashSet<NodeId>> =
HashMap::with_capacity(subgraph.node_count()); HashMap::with_capacity_and_hasher(subgraph.node_count(), Default::default());
// stack for unblocking nodes // stack for unblocking nodes
let mut unblock_stack = Vec::with_capacity(subgraph.node_count()); let mut unblock_stack = Vec::with_capacity(subgraph.node_count());
// nodes can be involved in multiple cycles // nodes can be involved in multiple cycles
let mut maybe_in_more_cycles: HashSet<NodeId> = let mut maybe_in_more_cycles: HashSet<NodeId> =
HashSet::with_capacity(subgraph.node_count()); HashSet::with_capacity_and_hasher(subgraph.node_count(), Default::default());
// stack for DFS // stack for DFS
let mut stack = Vec::with_capacity(subgraph.node_count()); let mut stack = Vec::with_capacity(subgraph.node_count());

View File

@ -1,13 +1,12 @@
use alloc::collections::BTreeSet; use alloc::collections::BTreeSet;
use core::fmt::{Debug, Write}; use core::fmt::{Debug, Write};
use core::hash::BuildHasherDefault;
#[cfg(feature = "trace")] #[cfg(feature = "trace")]
use bevy_utils::tracing::info_span; use bevy_utils::tracing::info_span;
use bevy_utils::{ use bevy_utils::{
default, default,
tracing::{error, info, warn}, tracing::{error, info, warn},
AHasher, HashMap, HashSet, HashMap, HashSet,
}; };
use disqualified::ShortName; use disqualified::ShortName;
use fixedbitset::FixedBitSet; use fixedbitset::FixedBitSet;
@ -39,7 +38,7 @@ impl Schedules {
/// Constructs an empty `Schedules` with zero initial capacity. /// Constructs an empty `Schedules` with zero initial capacity.
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
inner: HashMap::new(), inner: HashMap::default(),
ignored_scheduling_ambiguities: BTreeSet::new(), ignored_scheduling_ambiguities: BTreeSet::new(),
} }
} }
@ -516,7 +515,7 @@ impl Schedule {
#[derive(Default)] #[derive(Default)]
pub struct Dag { pub struct Dag {
/// A directed graph. /// A directed graph.
graph: DiGraph<BuildHasherDefault<AHasher>>, graph: DiGraph,
/// A cached topological ordering of the graph. /// A cached topological ordering of the graph.
topsort: Vec<NodeId>, topsort: Vec<NodeId>,
} }
@ -524,7 +523,7 @@ pub struct Dag {
impl Dag { impl Dag {
fn new() -> Self { fn new() -> Self {
Self { Self {
graph: DiGraph::new(), graph: DiGraph::default(),
topsort: Vec::new(), topsort: Vec::new(),
} }
} }
@ -609,7 +608,7 @@ pub struct ScheduleGraph {
hierarchy: Dag, hierarchy: Dag,
/// Directed acyclic graph of the dependency (which systems/sets have to run before which other systems/sets) /// Directed acyclic graph of the dependency (which systems/sets have to run before which other systems/sets)
dependency: Dag, dependency: Dag,
ambiguous_with: UnGraph<BuildHasherDefault<AHasher>>, ambiguous_with: UnGraph,
ambiguous_with_all: HashSet<NodeId>, ambiguous_with_all: HashSet<NodeId>,
conflicting_systems: Vec<(NodeId, NodeId, Vec<ComponentId>)>, conflicting_systems: Vec<(NodeId, NodeId, Vec<ComponentId>)>,
anonymous_sets: usize, anonymous_sets: usize,
@ -628,18 +627,18 @@ impl ScheduleGraph {
system_conditions: Vec::new(), system_conditions: Vec::new(),
system_sets: Vec::new(), system_sets: Vec::new(),
system_set_conditions: Vec::new(), system_set_conditions: Vec::new(),
system_set_ids: HashMap::new(), system_set_ids: HashMap::default(),
uninit: Vec::new(), uninit: Vec::new(),
hierarchy: Dag::new(), hierarchy: Dag::new(),
dependency: Dag::new(), dependency: Dag::new(),
ambiguous_with: UnGraph::new(), ambiguous_with: UnGraph::default(),
ambiguous_with_all: HashSet::new(), ambiguous_with_all: HashSet::default(),
conflicting_systems: Vec::new(), conflicting_systems: Vec::new(),
anonymous_sets: 0, anonymous_sets: 0,
changed: false, changed: false,
settings: default(), settings: default(),
no_sync_edges: BTreeSet::new(), no_sync_edges: BTreeSet::new(),
auto_sync_node_ids: HashMap::new(), auto_sync_node_ids: HashMap::default(),
} }
} }
@ -1154,7 +1153,8 @@ impl ScheduleGraph {
// calculate the number of sync points each sync point is from the beginning of the graph // calculate the number of sync points each sync point is from the beginning of the graph
// use the same sync point if the distance is the same // use the same sync point if the distance is the same
let mut distances: HashMap<usize, Option<u32>> = HashMap::with_capacity(topo.len()); let mut distances: HashMap<usize, Option<u32>> =
HashMap::with_capacity_and_hasher(topo.len(), Default::default());
for node in &topo { for node in &topo {
let add_sync_after = self.systems[node.index()].get().unwrap().has_deferred(); let add_sync_after = self.systems[node.index()].get().unwrap().has_deferred();
@ -1231,8 +1231,9 @@ impl ScheduleGraph {
hierarchy_graph: &DiGraph, hierarchy_graph: &DiGraph,
) -> (HashMap<NodeId, Vec<NodeId>>, HashMap<NodeId, FixedBitSet>) { ) -> (HashMap<NodeId, Vec<NodeId>>, HashMap<NodeId, FixedBitSet>) {
let mut set_systems: HashMap<NodeId, Vec<NodeId>> = let mut set_systems: HashMap<NodeId, Vec<NodeId>> =
HashMap::with_capacity(self.system_sets.len()); HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default());
let mut set_system_bitsets = HashMap::with_capacity(self.system_sets.len()); let mut set_system_bitsets =
HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default());
for &id in hierarchy_topsort.iter().rev() { for &id in hierarchy_topsort.iter().rev() {
if id.is_system() { if id.is_system() {
continue; continue;
@ -1311,7 +1312,7 @@ impl ScheduleGraph {
} }
fn get_ambiguous_with_flattened(&self, set_systems: &HashMap<NodeId, Vec<NodeId>>) -> UnGraph { fn get_ambiguous_with_flattened(&self, set_systems: &HashMap<NodeId, Vec<NodeId>>) -> UnGraph {
let mut ambiguous_with_flattened = UnGraph::new(); let mut ambiguous_with_flattened = UnGraph::default();
for (lhs, rhs) in self.ambiguous_with.all_edges() { for (lhs, rhs) in self.ambiguous_with.all_edges() {
match (lhs, rhs) { match (lhs, rhs) {
(NodeId::System(_), NodeId::System(_)) => { (NodeId::System(_), NodeId::System(_)) => {
@ -1919,7 +1920,7 @@ impl ScheduleGraph {
} }
fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> { fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> {
let mut sets = HashSet::new(); let mut sets = <HashSet<_>>::default();
self.traverse_sets_containing_node(*id, &mut |set_id| { self.traverse_sets_containing_node(*id, &mut |set_id| {
!self.system_sets[set_id.index()].is_system_type() && sets.insert(set_id) !self.system_sets[set_id.index()].is_system_type() && sets.insert(set_id)
}); });

View File

@ -728,7 +728,7 @@ impl<'w> EntityMut<'w> {
/// let mut entity_mut = world.entity_mut(entity); /// let mut entity_mut = world.entity_mut(entity);
/// let mut ptrs = entity_mut.get_mut_by_id(&HashSet::from_iter([x_id, y_id])) /// let mut ptrs = entity_mut.get_mut_by_id(&HashSet::from_iter([x_id, y_id]))
/// # .unwrap(); /// # .unwrap();
/// # let [mut x_ptr, mut y_ptr] = ptrs.get_many_mut([&x_id, &y_id]).unwrap(); /// # let [Some(mut x_ptr), Some(mut y_ptr)] = ptrs.get_many_mut([&x_id, &y_id]) else { unreachable!() };
/// # assert_eq!((unsafe { x_ptr.as_mut().deref_mut::<X>() }, unsafe { y_ptr.as_mut().deref_mut::<Y>() }), (&mut X(42), &mut Y(10))); /// # assert_eq!((unsafe { x_ptr.as_mut().deref_mut::<X>() }, unsafe { y_ptr.as_mut().deref_mut::<Y>() }), (&mut X(42), &mut Y(10)));
/// ``` /// ```
#[inline] #[inline]
@ -3656,7 +3656,7 @@ unsafe impl DynamicComponentFetch for &'_ HashSet<ComponentId> {
self, self,
cell: UnsafeEntityCell<'_>, cell: UnsafeEntityCell<'_>,
) -> Result<Self::Ref<'_>, EntityComponentError> { ) -> Result<Self::Ref<'_>, EntityComponentError> {
let mut ptrs = HashMap::with_capacity(self.len()); let mut ptrs = HashMap::with_capacity_and_hasher(self.len(), Default::default());
for &id in self { for &id in self {
ptrs.insert( ptrs.insert(
id, id,
@ -3671,7 +3671,7 @@ unsafe impl DynamicComponentFetch for &'_ HashSet<ComponentId> {
self, self,
cell: UnsafeEntityCell<'_>, cell: UnsafeEntityCell<'_>,
) -> Result<Self::Mut<'_>, EntityComponentError> { ) -> Result<Self::Mut<'_>, EntityComponentError> {
let mut ptrs = HashMap::with_capacity(self.len()); let mut ptrs = HashMap::with_capacity_and_hasher(self.len(), Default::default());
for &id in self { for &id in self {
ptrs.insert( ptrs.insert(
id, id,

View File

@ -1235,8 +1235,7 @@ impl World {
/// # use bevy_ecs::prelude::*; /// # use bevy_ecs::prelude::*;
/// # use bevy_ecs::entity::EntityHash; /// # use bevy_ecs::entity::EntityHash;
/// # use bevy_ecs::entity::EntityHashSet; /// # use bevy_ecs::entity::EntityHashSet;
/// # use bevy_utils::hashbrown::HashSet; /// # use bevy_utils::HashSet;
/// # use bevy_utils::hashbrown::hash_map::DefaultHashBuilder;
/// # let mut world = World::new(); /// # let mut world = World::new();
/// # let id1 = world.spawn_empty().id(); /// # let id1 = world.spawn_empty().id();
/// # let id2 = world.spawn_empty().id(); /// # let id2 = world.spawn_empty().id();
@ -3462,7 +3461,7 @@ impl World {
/// // probably use something like `ReflectFromPtr` in a real-world scenario. /// // probably use something like `ReflectFromPtr` in a real-world scenario.
/// ///
/// // Create the hash map that will store the closures for each resource type /// // Create the hash map that will store the closures for each resource type
/// let mut closures: HashMap<TypeId, Box<dyn Fn(&Ptr<'_>)>> = HashMap::new(); /// let mut closures: HashMap<TypeId, Box<dyn Fn(&Ptr<'_>)>> = HashMap::default();
/// ///
/// // Add closure for `A` /// // Add closure for `A`
/// closures.insert(TypeId::of::<A>(), Box::new(|ptr| { /// closures.insert(TypeId::of::<A>(), Box::new(|ptr| {
@ -3539,7 +3538,7 @@ impl World {
/// // probably use something like `ReflectFromPtr` in a real-world scenario. /// // probably use something like `ReflectFromPtr` in a real-world scenario.
/// ///
/// // Create the hash map that will store the mutator closures for each resource type /// // Create the hash map that will store the mutator closures for each resource type
/// let mut mutators: HashMap<TypeId, Box<dyn Fn(&mut MutUntyped<'_>)>> = HashMap::new(); /// let mut mutators: HashMap<TypeId, Box<dyn Fn(&mut MutUntyped<'_>)>> = HashMap::default();
/// ///
/// // Add mutator closure for `A` /// // Add mutator closure for `A`
/// mutators.insert(TypeId::of::<A>(), Box::new(|mut_untyped| { /// mutators.insert(TypeId::of::<A>(), Box::new(|mut_untyped| {
@ -4299,38 +4298,46 @@ mod tests {
let baz_id = TypeId::of::<Baz>(); let baz_id = TypeId::of::<Baz>();
assert_eq!( assert_eq!(
to_type_ids(world.inspect_entity(ent0).collect()), to_type_ids(world.inspect_entity(ent0).collect()),
[Some(foo_id), Some(bar_id), Some(baz_id)].into() [Some(foo_id), Some(bar_id), Some(baz_id)]
.into_iter()
.collect::<HashSet<_>>()
); );
assert_eq!( assert_eq!(
to_type_ids(world.inspect_entity(ent1).collect()), to_type_ids(world.inspect_entity(ent1).collect()),
[Some(foo_id), Some(bar_id)].into() [Some(foo_id), Some(bar_id)]
.into_iter()
.collect::<HashSet<_>>()
); );
assert_eq!( assert_eq!(
to_type_ids(world.inspect_entity(ent2).collect()), to_type_ids(world.inspect_entity(ent2).collect()),
[Some(bar_id), Some(baz_id)].into() [Some(bar_id), Some(baz_id)]
.into_iter()
.collect::<HashSet<_>>()
); );
assert_eq!( assert_eq!(
to_type_ids(world.inspect_entity(ent3).collect()), to_type_ids(world.inspect_entity(ent3).collect()),
[Some(foo_id), Some(baz_id)].into() [Some(foo_id), Some(baz_id)]
.into_iter()
.collect::<HashSet<_>>()
); );
assert_eq!( assert_eq!(
to_type_ids(world.inspect_entity(ent4).collect()), to_type_ids(world.inspect_entity(ent4).collect()),
[Some(foo_id)].into() [Some(foo_id)].into_iter().collect::<HashSet<_>>()
); );
assert_eq!( assert_eq!(
to_type_ids(world.inspect_entity(ent5).collect()), to_type_ids(world.inspect_entity(ent5).collect()),
[Some(bar_id)].into() [Some(bar_id)].into_iter().collect::<HashSet<_>>()
); );
assert_eq!( assert_eq!(
to_type_ids(world.inspect_entity(ent6).collect()), to_type_ids(world.inspect_entity(ent6).collect()),
[Some(baz_id)].into() [Some(baz_id)].into_iter().collect::<HashSet<_>>()
); );
} }
#[test] #[test]
fn iterate_entities() { fn iterate_entities() {
let mut world = World::new(); let mut world = World::new();
let mut entity_counters = HashMap::new(); let mut entity_counters = <HashMap<_, _>>::default();
let iterate_and_count_entities = |world: &World, entity_counters: &mut HashMap<_, _>| { let iterate_and_count_entities = |world: &World, entity_counters: &mut HashMap<_, _>| {
entity_counters.clear(); entity_counters.clear();

View File

@ -217,7 +217,7 @@ async fn load_gltf<'a, 'b, 'c>(
.to_string(); .to_string();
let buffer_data = load_buffers(&gltf, load_context).await?; let buffer_data = load_buffers(&gltf, load_context).await?;
let mut linear_textures = HashSet::default(); let mut linear_textures = <HashSet<_>>::default();
for material in gltf.materials() { for material in gltf.materials() {
if let Some(texture) = material.normal_texture() { if let Some(texture) = material.normal_texture() {
@ -259,11 +259,11 @@ async fn load_gltf<'a, 'b, 'c>(
#[cfg(feature = "bevy_animation")] #[cfg(feature = "bevy_animation")]
let paths = { let paths = {
let mut paths = HashMap::<usize, (usize, Vec<Name>)>::new(); let mut paths = HashMap::<usize, (usize, Vec<Name>)>::default();
for scene in gltf.scenes() { for scene in gltf.scenes() {
for node in scene.nodes() { for node in scene.nodes() {
let root_index = node.index(); let root_index = node.index();
paths_recur(node, &[], &mut paths, root_index, &mut HashSet::new()); paths_recur(node, &[], &mut paths, root_index, &mut HashSet::default());
} }
} }
paths paths
@ -272,12 +272,14 @@ async fn load_gltf<'a, 'b, 'c>(
#[cfg(feature = "bevy_animation")] #[cfg(feature = "bevy_animation")]
let (animations, named_animations, animation_roots) = { let (animations, named_animations, animation_roots) = {
use bevy_animation::{animated_field, animation_curves::*, gltf_curves::*, VariableCurve}; use bevy_animation::{animated_field, animation_curves::*, gltf_curves::*, VariableCurve};
use bevy_math::curve::{ConstantCurve, Interval, UnevenSampleAutoCurve}; use bevy_math::{
use bevy_math::{Quat, Vec4}; curve::{ConstantCurve, Interval, UnevenSampleAutoCurve},
Quat, Vec4,
};
use gltf::animation::util::ReadOutputs; use gltf::animation::util::ReadOutputs;
let mut animations = vec![]; let mut animations = vec![];
let mut named_animations = HashMap::default(); let mut named_animations = <HashMap<_, _>>::default();
let mut animation_roots = HashSet::default(); let mut animation_roots = <HashSet<_>>::default();
for animation in gltf.animations() { for animation in gltf.animations() {
let mut animation_clip = AnimationClip::default(); let mut animation_clip = AnimationClip::default();
for channel in animation.channels() { for channel in animation.channels() {
@ -603,7 +605,7 @@ async fn load_gltf<'a, 'b, 'c>(
} }
let mut materials = vec![]; let mut materials = vec![];
let mut named_materials = HashMap::default(); let mut named_materials = <HashMap<_, _>>::default();
// Only include materials in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_materials flag // Only include materials in the output if they're set to be retained in the MAIN_WORLD and/or RENDER_WORLD by the load_materials flag
if !settings.load_materials.is_empty() { if !settings.load_materials.is_empty() {
// NOTE: materials must be loaded after textures because image load() calls will happen before load_with_settings, preventing is_srgb from being set properly // NOTE: materials must be loaded after textures because image load() calls will happen before load_with_settings, preventing is_srgb from being set properly
@ -616,9 +618,9 @@ async fn load_gltf<'a, 'b, 'c>(
} }
} }
let mut meshes = vec![]; let mut meshes = vec![];
let mut named_meshes = HashMap::default(); let mut named_meshes = <HashMap<_, _>>::default();
let mut meshes_on_skinned_nodes = HashSet::default(); let mut meshes_on_skinned_nodes = <HashSet<_>>::default();
let mut meshes_on_non_skinned_nodes = HashSet::default(); let mut meshes_on_non_skinned_nodes = <HashSet<_>>::default();
for gltf_node in gltf.nodes() { for gltf_node in gltf.nodes() {
if gltf_node.skin().is_some() { if gltf_node.skin().is_some() {
if let Some(mesh) = gltf_node.mesh() { if let Some(mesh) = gltf_node.mesh() {
@ -783,10 +785,10 @@ async fn load_gltf<'a, 'b, 'c>(
}) })
.collect(); .collect();
let mut nodes = HashMap::<usize, Handle<GltfNode>>::new(); let mut nodes = HashMap::<usize, Handle<GltfNode>>::default();
let mut named_nodes = HashMap::new(); let mut named_nodes = <HashMap<_, _>>::default();
let mut skins = vec![]; let mut skins = vec![];
let mut named_skins = HashMap::default(); let mut named_skins = <HashMap<_, _>>::default();
for node in GltfTreeIterator::try_new(&gltf)? { for node in GltfTreeIterator::try_new(&gltf)? {
let skin = node.skin().map(|skin| { let skin = node.skin().map(|skin| {
let joints = skin let joints = skin
@ -848,12 +850,12 @@ async fn load_gltf<'a, 'b, 'c>(
.collect(); .collect();
let mut scenes = vec![]; let mut scenes = vec![];
let mut named_scenes = HashMap::default(); let mut named_scenes = <HashMap<_, _>>::default();
let mut active_camera_found = false; let mut active_camera_found = false;
for scene in gltf.scenes() { for scene in gltf.scenes() {
let mut err = None; let mut err = None;
let mut world = World::default(); let mut world = World::default();
let mut node_index_to_entity_map = HashMap::new(); let mut node_index_to_entity_map = <HashMap<_, _>>::default();
let mut entity_to_skin_index_map = EntityHashMap::default(); let mut entity_to_skin_index_map = EntityHashMap::default();
let mut scene_load_context = load_context.begin_labeled_asset(); let mut scene_load_context = load_context.begin_labeled_asset();
@ -1904,7 +1906,7 @@ impl<'a> GltfTreeIterator<'a> {
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
let mut nodes = Vec::new(); let mut nodes = Vec::new();
let mut warned_about_max_joints = HashSet::new(); let mut warned_about_max_joints = <HashSet<_>>::default();
while let Some(index) = empty_children.pop_front() { while let Some(index) = empty_children.pop_front() {
if let Some(skin) = unprocessed_nodes.get(&index).unwrap().0.skin() { if let Some(skin) = unprocessed_nodes.get(&index).unwrap().0.skin() {
if skin.joints().len() > MAX_JOINTS && warned_about_max_joints.insert(skin.index()) if skin.joints().len() > MAX_JOINTS && warned_about_max_joints.insert(skin.index())

View File

@ -1059,7 +1059,7 @@ mod tests {
use crate::{Mesh, MeshBuilder, Meshable, VertexAttributeValues}; use crate::{Mesh, MeshBuilder, Meshable, VertexAttributeValues};
fn count_distinct_positions(points: &[[f32; 3]]) -> usize { fn count_distinct_positions(points: &[[f32; 3]]) -> usize {
let mut map = HashSet::new(); let mut map = <HashSet<_>>::default();
for point in points { for point in points {
map.insert(point.map(FloatOrd)); map.insert(point.map(FloatOrd));
} }

View File

@ -24,7 +24,7 @@ use bevy_render::{
sync_world::RenderEntity, sync_world::RenderEntity,
Extract, Extract,
}; };
use bevy_utils::{hashbrown::HashSet, tracing::warn}; use bevy_utils::{tracing::warn, HashSet};
pub(crate) use crate::cluster::assign::assign_objects_to_clusters; pub(crate) use crate::cluster::assign::assign_objects_to_clusters;
use crate::MeshPipeline; use crate::MeshPipeline;

View File

@ -631,7 +631,7 @@ where
fn new() -> RenderViewLightProbes<C> { fn new() -> RenderViewLightProbes<C> {
RenderViewLightProbes { RenderViewLightProbes {
binding_index_to_textures: vec![], binding_index_to_textures: vec![],
cubemap_to_binding_index: HashMap::new(), cubemap_to_binding_index: HashMap::default(),
render_light_probes: vec![], render_light_probes: vec![],
view_light_probe_info: C::ViewLightProbeInfo::default(), view_light_probe_info: C::ViewLightProbeInfo::default(),
} }

View File

@ -23,10 +23,7 @@ use bevy_render::{
texture::FallbackImage, texture::FallbackImage,
}; };
use bevy_utils::{default, tracing::error, HashMap}; use bevy_utils::{default, tracing::error, HashMap};
use core::any; use core::{any, iter, marker::PhantomData, num::NonZero};
use core::iter;
use core::marker::PhantomData;
use core::num::NonZero;
/// An object that creates and stores bind groups for a single material type. /// An object that creates and stores bind groups for a single material type.
/// ///
@ -818,7 +815,7 @@ impl MaterialFallbackBuffers {
render_device: &RenderDevice, render_device: &RenderDevice,
bind_group_layout_entries: &[BindGroupLayoutEntry], bind_group_layout_entries: &[BindGroupLayoutEntry],
) -> MaterialFallbackBuffers { ) -> MaterialFallbackBuffers {
let mut fallback_buffers = HashMap::new(); let mut fallback_buffers = HashMap::default();
for bind_group_layout_entry in bind_group_layout_entries { for bind_group_layout_entry in bind_group_layout_entries {
// Create a dummy buffer of the appropriate size. // Create a dummy buffer of the appropriate size.
let BindingType::Buffer { let BindingType::Buffer {

View File

@ -273,7 +273,7 @@ fn find_connected_meshlets(
} }
// For each meshlet pair, count how many vertices they share // For each meshlet pair, count how many vertices they share
let mut meshlet_pair_to_shared_vertex_count = HashMap::new(); let mut meshlet_pair_to_shared_vertex_count = <HashMap<_, _>>::default();
for vertex_meshlet_ids in vertices_to_meshlets { for vertex_meshlet_ids in vertices_to_meshlets {
for (meshlet_queue_id1, meshlet_queue_id2) in for (meshlet_queue_id1, meshlet_queue_id2) in
vertex_meshlet_ids.into_iter().tuple_combinations() vertex_meshlet_ids.into_iter().tuple_combinations()

View File

@ -76,8 +76,8 @@ impl InstanceManager {
view_instance_visibility: EntityHashMap::default(), view_instance_visibility: EntityHashMap::default(),
next_material_id: 0, next_material_id: 0,
material_id_lookup: HashMap::new(), material_id_lookup: HashMap::default(),
material_ids_present_in_scene: HashSet::new(), material_ids_present_in_scene: HashSet::default(),
} }
} }

View File

@ -47,7 +47,7 @@ impl FromWorld for MeshletMeshManager {
"meshlet_simplification_errors", "meshlet_simplification_errors",
render_device, render_device,
), ),
meshlet_mesh_slices: HashMap::new(), meshlet_mesh_slices: HashMap::default(),
} }
} }
} }

View File

@ -932,11 +932,13 @@ impl RenderMeshInstanceGpuBuilder {
// Write in the new mesh input uniform. // Write in the new mesh input uniform.
current_input_buffer.set(current_uniform_index, mesh_input_uniform); current_input_buffer.set(current_uniform_index, mesh_input_uniform);
occupied_entry.replace_entry(RenderMeshInstanceGpu { occupied_entry.replace_entry_with(|_, _| {
translation: self.world_from_local.translation, Some(RenderMeshInstanceGpu {
shared: self.shared, translation: self.world_from_local.translation,
current_uniform_index: NonMaxU32::new(current_uniform_index) shared: self.shared,
.unwrap_or_default(), current_uniform_index: NonMaxU32::new(current_uniform_index)
.unwrap_or_default(),
})
}); });
} }
@ -1124,7 +1126,7 @@ pub fn extract_meshes_for_cpu_building(
render_mesh_instances.clear(); render_mesh_instances.clear();
for queue in render_mesh_instance_queues.iter_mut() { for queue in render_mesh_instance_queues.iter_mut() {
for (entity, render_mesh_instance) in queue.drain(..) { for (entity, render_mesh_instance) in queue.drain(..) {
render_mesh_instances.insert_unique_unchecked(entity.into(), render_mesh_instance); render_mesh_instances.insert(entity.into(), render_mesh_instance);
} }
} }
} }

View File

@ -226,7 +226,7 @@ pub fn update_interactions(
// need to be able to insert the interaction component on entities if they do not exist. To do // need to be able to insert the interaction component on entities if they do not exist. To do
// so we need to know the final aggregated interaction state to avoid the scenario where we set // so we need to know the final aggregated interaction state to avoid the scenario where we set
// an entity to `Pressed`, then overwrite that with a lower precedent like `Hovered`. // an entity to `Pressed`, then overwrite that with a lower precedent like `Hovered`.
let mut new_interaction_state = HashMap::<Entity, PickingInteraction>::new(); let mut new_interaction_state = HashMap::<Entity, PickingInteraction>::default();
for (pointer, pointer_press, mut pointer_interaction) in &mut pointers { for (pointer, pointer_press, mut pointer_interaction) in &mut pointers {
if let Some(pointers_hovered_entities) = hover_map.get(pointer) { if let Some(pointers_hovered_entities) = hover_map.get(pointer) {
// Insert a sorted list of hit entities into the pointer's interaction component. // Insert a sorted list of hit entities into the pointer's interaction component.

View File

@ -45,6 +45,9 @@ bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev", default-features
] } ] }
bevy_ptr = { path = "../bevy_ptr", version = "0.15.0-dev" } bevy_ptr = { path = "../bevy_ptr", version = "0.15.0-dev" }
# used by bevy-utils, but it also needs reflect impls
foldhash = { version = "0.1.3", default-features = false }
# other # other
erased-serde = { version = "0.4", default-features = false, features = [ erased-serde = { version = "0.4", default-features = false, features = [
"alloc", "alloc",

View File

@ -24,7 +24,7 @@ impl SerializationDataDef {
fields: &[StructField<'_>], fields: &[StructField<'_>],
bevy_reflect_path: &Path, bevy_reflect_path: &Path,
) -> Result<Option<Self>, syn::Error> { ) -> Result<Option<Self>, syn::Error> {
let mut skipped = HashMap::default(); let mut skipped = <HashMap<_, _>>::default();
for field in fields { for field in fields {
match field.attrs.ignore { match field.attrs.ignore {

View File

@ -755,10 +755,12 @@ mod tests {
assert_eq!( assert_eq!(
result.unwrap_err(), result.unwrap_err(),
FunctionError::NoOverload { FunctionError::NoOverload {
expected: HashSet::from([ expected: [
ArgumentSignature::from_iter(vec![Type::of::<i32>(), Type::of::<i32>()]), ArgumentSignature::from_iter(vec![Type::of::<i32>(), Type::of::<i32>()]),
ArgumentSignature::from_iter(vec![Type::of::<f32>(), Type::of::<f32>()]) ArgumentSignature::from_iter(vec![Type::of::<f32>(), Type::of::<f32>()])
]), ]
.into_iter()
.collect::<HashSet<_>>(),
received: ArgumentSignature::from_iter(vec![Type::of::<u32>(), Type::of::<u32>()]), received: ArgumentSignature::from_iter(vec![Type::of::<u32>(), Type::of::<u32>()]),
} }
); );

View File

@ -2,7 +2,7 @@ use crate::func::args::ArgCount;
use crate::func::signature::{ArgListSignature, ArgumentSignature}; use crate::func::signature::{ArgListSignature, ArgumentSignature};
use crate::func::{ArgList, FunctionError, FunctionInfo, FunctionOverloadError}; use crate::func::{ArgList, FunctionError, FunctionInfo, FunctionOverloadError};
use alloc::borrow::Cow; use alloc::borrow::Cow;
use bevy_utils::hashbrown::HashMap; use bevy_utils::HashMap;
use core::fmt::{Debug, Formatter}; use core::fmt::{Debug, Formatter};
/// An internal structure for storing a function and its corresponding [function information]. /// An internal structure for storing a function and its corresponding [function information].
@ -141,19 +141,19 @@ impl<F> DynamicFunctionInternal<F> {
pub fn merge(&mut self, mut other: Self) -> Result<(), FunctionOverloadError> { pub fn merge(&mut self, mut other: Self) -> Result<(), FunctionOverloadError> {
// Keep a separate map of the new indices to avoid mutating the existing one // Keep a separate map of the new indices to avoid mutating the existing one
// until we can be sure the merge will be successful. // until we can be sure the merge will be successful.
let mut new_signatures = HashMap::new(); let mut new_signatures = <HashMap<_, _>>::default();
for (sig, index) in other.arg_map { for (sig, index) in other.arg_map {
if self.arg_map.contains_key(&sig) { if self.arg_map.contains_key(&sig) {
return Err(FunctionOverloadError::DuplicateSignature(sig)); return Err(FunctionOverloadError::DuplicateSignature(sig));
} }
new_signatures.insert_unique_unchecked(sig, self.functions.len() + index); new_signatures.insert(sig, self.functions.len() + index);
} }
self.arg_map.reserve(new_signatures.len()); self.arg_map.reserve(new_signatures.len());
for (sig, index) in new_signatures { for (sig, index) in new_signatures {
self.arg_map.insert_unique_unchecked(sig, index); self.arg_map.insert(sig, index);
} }
self.functions.append(&mut other.functions); self.functions.append(&mut other.functions);

View File

@ -0,0 +1,8 @@
use crate::{self as bevy_reflect, impl_type_path};
impl_type_path!(::foldhash::fast::FoldHasher);
impl_type_path!(::foldhash::fast::FixedState);
impl_type_path!(::foldhash::fast::RandomState);
impl_type_path!(::foldhash::quality::FoldHasher);
impl_type_path!(::foldhash::quality::FixedState);
impl_type_path!(::foldhash::quality::RandomState);

View File

@ -13,7 +13,13 @@ use crate::{
ReflectFromReflect, ReflectKind, ReflectMut, ReflectOwned, ReflectRef, ReflectSerialize, Set, ReflectFromReflect, ReflectKind, ReflectMut, ReflectOwned, ReflectRef, ReflectSerialize, Set,
SetInfo, TypeInfo, TypeParamInfo, TypePath, TypeRegistration, TypeRegistry, Typed, SetInfo, TypeInfo, TypeParamInfo, TypePath, TypeRegistration, TypeRegistry, Typed,
}; };
use alloc::{borrow::Cow, borrow::ToOwned, boxed::Box, collections::VecDeque, format, vec::Vec}; use alloc::{
borrow::{Cow, ToOwned},
boxed::Box,
collections::VecDeque,
format,
vec::Vec,
};
use bevy_reflect_derive::{impl_reflect, impl_reflect_opaque}; use bevy_reflect_derive::{impl_reflect, impl_reflect_opaque};
use core::{ use core::{
any::Any, any::Any,
@ -832,6 +838,7 @@ macro_rules! impl_reflect_for_hashmap {
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl_reflect_for_hashmap!(::std::collections::HashMap<K, V, S>); impl_reflect_for_hashmap!(::std::collections::HashMap<K, V, S>);
impl_type_path!(::core::hash::BuildHasherDefault<H>);
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl_type_path!(::std::collections::hash_map::RandomState); impl_type_path!(::std::collections::hash_map::RandomState);
#[cfg(feature = "std")] #[cfg(feature = "std")]
@ -846,7 +853,6 @@ crate::func::macros::impl_function_traits!(::std::collections::HashMap<K, V, S>;
); );
impl_reflect_for_hashmap!(bevy_utils::hashbrown::HashMap<K, V, S>); impl_reflect_for_hashmap!(bevy_utils::hashbrown::HashMap<K, V, S>);
impl_type_path!(::bevy_utils::hashbrown::hash_map::DefaultHashBuilder);
impl_type_path!(::bevy_utils::hashbrown::HashMap<K, V, S>); impl_type_path!(::bevy_utils::hashbrown::HashMap<K, V, S>);
#[cfg(feature = "functions")] #[cfg(feature = "functions")]
crate::func::macros::impl_function_traits!(::bevy_utils::hashbrown::HashMap<K, V, S>; crate::func::macros::impl_function_traits!(::bevy_utils::hashbrown::HashMap<K, V, S>;
@ -1060,7 +1066,7 @@ macro_rules! impl_reflect_for_hashset {
} }
impl_type_path!(::bevy_utils::NoOpHash); impl_type_path!(::bevy_utils::NoOpHash);
impl_type_path!(::bevy_utils::FixedState); impl_type_path!(::bevy_utils::FixedHasher);
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl_reflect_for_hashset!(::std::collections::HashSet<V,S>); impl_reflect_for_hashset!(::std::collections::HashSet<V,S>);
@ -2342,10 +2348,10 @@ mod tests {
#[test] #[test]
fn should_partial_eq_hash_map() { fn should_partial_eq_hash_map() {
let mut a = HashMap::new(); let mut a = <HashMap<_, _>>::default();
a.insert(0usize, 1.23_f64); a.insert(0usize, 1.23_f64);
let b = a.clone(); let b = a.clone();
let mut c = HashMap::new(); let mut c = <HashMap<_, _>>::default();
c.insert(0usize, 3.21_f64); c.insert(0usize, 3.21_f64);
let a: &dyn PartialReflect = &a; let a: &dyn PartialReflect = &a;

View File

@ -297,7 +297,7 @@ mod tests {
#[test] #[test]
fn should_cast_mut() { fn should_cast_mut() {
let mut value: HashSet<i32> = HashSet::new(); let mut value: HashSet<i32> = HashSet::default();
let result = value.reflect_mut().as_set(); let result = value.reflect_mut().as_set();
assert!(result.is_ok()); assert!(result.is_ok());

View File

@ -578,6 +578,7 @@ mod type_path;
mod type_registry; mod type_registry;
mod impls { mod impls {
mod foldhash;
mod std; mod std;
#[cfg(feature = "glam")] #[cfg(feature = "glam")]
@ -1158,11 +1159,11 @@ mod tests {
#[derive(Reflect, Eq, PartialEq, Debug)] #[derive(Reflect, Eq, PartialEq, Debug)]
struct Baz(String); struct Baz(String);
let mut hash_map = HashMap::default(); let mut hash_map = <HashMap<_, _>>::default();
hash_map.insert(1, 1); hash_map.insert(1, 1);
hash_map.insert(2, 2); hash_map.insert(2, 2);
let mut hash_map_baz = HashMap::default(); let mut hash_map_baz = <HashMap<_, _>>::default();
hash_map_baz.insert(1, Bar { x: 0 }); hash_map_baz.insert(1, Bar { x: 0 });
let mut foo = Foo { let mut foo = Foo {
@ -1227,12 +1228,12 @@ mod tests {
foo.apply(&foo_patch); foo.apply(&foo_patch);
let mut hash_map = HashMap::default(); let mut hash_map = <HashMap<_, _>>::default();
hash_map.insert(1, 1); hash_map.insert(1, 1);
hash_map.insert(2, 3); hash_map.insert(2, 3);
hash_map.insert(3, 4); hash_map.insert(3, 4);
let mut hash_map_baz = HashMap::default(); let mut hash_map_baz = <HashMap<_, _>>::default();
hash_map_baz.insert(1, Bar { x: 7 }); hash_map_baz.insert(1, Bar { x: 7 });
let expected_foo = Foo { let expected_foo = Foo {
@ -1251,7 +1252,7 @@ mod tests {
let new_foo = Foo::from_reflect(&foo_patch) let new_foo = Foo::from_reflect(&foo_patch)
.expect("error while creating a concrete type from a dynamic type"); .expect("error while creating a concrete type from a dynamic type");
let mut hash_map = HashMap::default(); let mut hash_map = <HashMap<_, _>>::default();
hash_map.insert(2, 3); hash_map.insert(2, 3);
hash_map.insert(3, 4); hash_map.insert(3, 4);
@ -1408,7 +1409,7 @@ mod tests {
x: u32, x: u32,
} }
let mut hash_map = HashMap::default(); let mut hash_map = <HashMap<_, _>>::default();
hash_map.insert(1, 1); hash_map.insert(1, 1);
hash_map.insert(2, 2); hash_map.insert(2, 2);
let foo = Foo { let foo = Foo {
@ -1497,7 +1498,8 @@ mod tests {
assert!(fields[0].reflect_partial_eq(&123_i32).unwrap_or_default()); assert!(fields[0].reflect_partial_eq(&123_i32).unwrap_or_default());
assert!(fields[1].reflect_partial_eq(&321_i32).unwrap_or_default()); assert!(fields[1].reflect_partial_eq(&321_i32).unwrap_or_default());
let mut map_value: Box<dyn Map> = Box::new(HashMap::from([(123_i32, 321_i32)])); let mut map_value: Box<dyn Map> =
Box::new([(123_i32, 321_i32)].into_iter().collect::<HashMap<_, _>>());
let fields = map_value.drain(); let fields = map_value.drain();
assert!(fields[0].0.reflect_partial_eq(&123_i32).unwrap_or_default()); assert!(fields[0].0.reflect_partial_eq(&123_i32).unwrap_or_default());
assert!(fields[0].1.reflect_partial_eq(&321_i32).unwrap_or_default()); assert!(fields[0].1.reflect_partial_eq(&321_i32).unwrap_or_default());
@ -1861,7 +1863,7 @@ mod tests {
assert_eq!(usize::type_path(), info.key_ty().path()); assert_eq!(usize::type_path(), info.key_ty().path());
assert_eq!(f32::type_path(), info.value_ty().path()); assert_eq!(f32::type_path(), info.value_ty().path());
let value: &dyn Reflect = &MyMap::new(); let value: &dyn Reflect = &MyMap::default();
let info = value.reflect_type_info(); let info = value.reflect_type_info();
assert!(info.is::<MyMap>()); assert!(info.is::<MyMap>());
@ -2160,7 +2162,7 @@ mod tests {
} }
} }
let mut map = HashMap::new(); let mut map = <HashMap<_, _>>::default();
map.insert(123, 1.23); map.insert(123, 1.23);
let test = Test { let test = Test {
@ -2474,7 +2476,7 @@ bevy_reflect::tests::Test {
// test reflected value // test reflected value
value: u32, value: u32,
} }
let mut map = HashMap::new(); let mut map = <HashMap<_, _>>::default();
map.insert(9, 10); map.insert(9, 10);
let mut test_struct: DynamicStruct = TestStruct { let mut test_struct: DynamicStruct = TestStruct {
tuple: (0, 1), tuple: (0, 1),

View File

@ -3,11 +3,10 @@ use core::fmt::{Debug, Formatter};
use bevy_reflect_derive::impl_type_path; use bevy_reflect_derive::impl_type_path;
use bevy_utils::hashbrown::HashTable; use bevy_utils::hashbrown::HashTable;
use crate::generics::impl_generic_info_methods;
use crate::{ use crate::{
self as bevy_reflect, type_info::impl_type_methods, ApplyError, Generics, MaybeTyped, self as bevy_reflect, generics::impl_generic_info_methods, type_info::impl_type_methods,
PartialReflect, Reflect, ReflectKind, ReflectMut, ReflectOwned, ReflectRef, Type, TypeInfo, ApplyError, Generics, MaybeTyped, PartialReflect, Reflect, ReflectKind, ReflectMut,
TypePath, ReflectOwned, ReflectRef, Type, TypeInfo, TypePath,
}; };
use alloc::{boxed::Box, format, vec::Vec}; use alloc::{boxed::Box, format, vec::Vec};
@ -31,7 +30,7 @@ use alloc::{boxed::Box, format, vec::Vec};
/// ///
/// ``` /// ```
/// use bevy_reflect::{PartialReflect, Reflect, Map}; /// use bevy_reflect::{PartialReflect, Reflect, Map};
/// use bevy_utils::HashMap; /// use std::collections::HashMap;
/// ///
/// ///
/// let foo: &mut dyn Map = &mut HashMap::<u32, bool>::new(); /// let foo: &mut dyn Map = &mut HashMap::<u32, bool>::new();
@ -569,7 +568,7 @@ pub fn map_partial_eq<M: Map + ?Sized>(a: &M, b: &dyn PartialReflect) -> Option<
/// ///
/// # Example /// # Example
/// ``` /// ```
/// # use bevy_utils::HashMap; /// # use std::collections::HashMap;
/// use bevy_reflect::Reflect; /// use bevy_reflect::Reflect;
/// ///
/// let mut my_map = HashMap::new(); /// let mut my_map = HashMap::new();

View File

@ -26,18 +26,20 @@ mod tuples;
mod tests { mod tests {
use bincode::Options; use bincode::Options;
use core::{any::TypeId, f32::consts::PI, ops::RangeInclusive}; use core::{any::TypeId, f32::consts::PI, ops::RangeInclusive};
use serde::de::IgnoredAny; use serde::{de::IgnoredAny, Deserializer};
use serde::Deserializer;
use serde::{de::DeserializeSeed, Deserialize}; use serde::{de::DeserializeSeed, Deserialize};
use bevy_utils::{HashMap, HashSet}; use bevy_utils::{HashMap, HashSet};
use crate::serde::ReflectDeserializerProcessor;
use crate::{self as bevy_reflect, TypeRegistration};
use crate::{ use crate::{
serde::{ReflectDeserializer, ReflectSerializer, TypedReflectDeserializer}, self as bevy_reflect,
DynamicEnum, FromReflect, PartialReflect, Reflect, ReflectDeserialize, TypeRegistry, serde::{
ReflectDeserializer, ReflectDeserializerProcessor, ReflectSerializer,
TypedReflectDeserializer,
},
DynamicEnum, FromReflect, PartialReflect, Reflect, ReflectDeserialize, TypeRegistration,
TypeRegistry,
}; };
#[derive(Reflect, Debug, PartialEq)] #[derive(Reflect, Debug, PartialEq)]
@ -148,10 +150,10 @@ mod tests {
} }
fn get_my_struct() -> MyStruct { fn get_my_struct() -> MyStruct {
let mut map = HashMap::new(); let mut map = <HashMap<_, _>>::default();
map.insert(64, 32); map.insert(64, 32);
let mut set = HashSet::new(); let mut set = <HashSet<_>>::default();
set.insert(64); set.insert(64);
MyStruct { MyStruct {

View File

@ -26,8 +26,7 @@ mod tests {
PartialReflect, Reflect, ReflectSerialize, Struct, TypeRegistry, PartialReflect, Reflect, ReflectSerialize, Struct, TypeRegistry,
}; };
use bevy_utils::{HashMap, HashSet}; use bevy_utils::{HashMap, HashSet};
use core::any::TypeId; use core::{any::TypeId, f32::consts::PI, ops::RangeInclusive};
use core::{f32::consts::PI, ops::RangeInclusive};
use ron::{extensions::Extensions, ser::PrettyConfig}; use ron::{extensions::Extensions, ser::PrettyConfig};
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
@ -128,10 +127,10 @@ mod tests {
} }
fn get_my_struct() -> MyStruct { fn get_my_struct() -> MyStruct {
let mut map = HashMap::new(); let mut map = <HashMap<_, _>>::default();
map.insert(64, 32); map.insert(64, 32);
let mut set = HashSet::new(); let mut set = <HashSet<_>>::default();
set.insert(64); set.insert(64);
MyStruct { MyStruct {

View File

@ -4,11 +4,10 @@ use core::fmt::{Debug, Formatter};
use bevy_reflect_derive::impl_type_path; use bevy_reflect_derive::impl_type_path;
use bevy_utils::hashbrown::{hash_table::OccupiedEntry as HashTableOccupiedEntry, HashTable}; use bevy_utils::hashbrown::{hash_table::OccupiedEntry as HashTableOccupiedEntry, HashTable};
use crate::generics::impl_generic_info_methods;
use crate::{ use crate::{
self as bevy_reflect, hash_error, type_info::impl_type_methods, ApplyError, Generics, self as bevy_reflect, generics::impl_generic_info_methods, hash_error,
PartialReflect, Reflect, ReflectKind, ReflectMut, ReflectOwned, ReflectRef, Type, TypeInfo, type_info::impl_type_methods, ApplyError, Generics, PartialReflect, Reflect, ReflectKind,
TypePath, ReflectMut, ReflectOwned, ReflectRef, Type, TypeInfo, TypePath,
}; };
/// A trait used to power [set-like] operations via [reflection]. /// A trait used to power [set-like] operations via [reflection].
@ -31,7 +30,7 @@ use crate::{
/// ///
/// ``` /// ```
/// use bevy_reflect::{PartialReflect, Set}; /// use bevy_reflect::{PartialReflect, Set};
/// use bevy_utils::HashSet; /// use std::collections::HashSet;
/// ///
/// ///
/// let foo: &mut dyn Set = &mut HashSet::<u32>::new(); /// let foo: &mut dyn Set = &mut HashSet::<u32>::new();
@ -432,7 +431,7 @@ pub fn set_partial_eq<M: Set>(a: &M, b: &dyn PartialReflect) -> Option<bool> {
/// ///
/// # Example /// # Example
/// ``` /// ```
/// # use bevy_utils::HashSet; /// # use std::collections::HashSet;
/// use bevy_reflect::Reflect; /// use bevy_reflect::Reflect;
/// ///
/// let mut my_set = HashSet::new(); /// let mut my_set = HashSet::new();

View File

@ -2,7 +2,7 @@
use crate::TypeInfo; use crate::TypeInfo;
use alloc::boxed::Box; use alloc::boxed::Box;
use bevy_utils::{FixedState, NoOpHash, TypeIdMap}; use bevy_utils::{DefaultHasher, FixedHasher, NoOpHash, TypeIdMap};
use core::{ use core::{
any::{Any, TypeId}, any::{Any, TypeId},
hash::BuildHasher, hash::BuildHasher,
@ -315,6 +315,6 @@ impl<T: TypedProperty> Default for GenericTypeCell<T> {
/// ///
/// [`Reflect::reflect_hash`]: crate::Reflect /// [`Reflect::reflect_hash`]: crate::Reflect
#[inline] #[inline]
pub fn reflect_hasher() -> bevy_utils::AHasher { pub fn reflect_hasher() -> DefaultHasher {
FixedState.build_hasher() FixedHasher.build_hasher()
} }

View File

@ -364,7 +364,7 @@ pub fn process_remote_get_watching_request(
let mut changed = Vec::new(); let mut changed = Vec::new();
let mut removed = Vec::new(); let mut removed = Vec::new();
let mut errors = HashMap::new(); let mut errors = <HashMap<_, _>>::default();
'component_loop: for component_path in components { 'component_loop: for component_path in components {
let Ok(type_registration) = let Ok(type_registration) =
@ -847,7 +847,7 @@ fn build_components_map<'a>(
paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>, paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>,
type_registry: &TypeRegistry, type_registry: &TypeRegistry,
) -> AnyhowResult<HashMap<String, Value>> { ) -> AnyhowResult<HashMap<String, Value>> {
let mut serialized_components_map = HashMap::new(); let mut serialized_components_map = <HashMap<_, _>>::default();
for (type_path, reflect_component) in paths_and_reflect_components { for (type_path, reflect_component) in paths_and_reflect_components {
let Some(reflected) = reflect_component.reflect(entity_ref.clone()) else { let Some(reflected) = reflect_component.reflect(entity_ref.clone()) else {
@ -873,7 +873,7 @@ fn build_has_map<'a>(
entity_ref: FilteredEntityRef, entity_ref: FilteredEntityRef,
paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>, paths_and_reflect_components: impl Iterator<Item = (&'a str, &'a ReflectComponent)>,
) -> HashMap<String, Value> { ) -> HashMap<String, Value> {
let mut has_map = HashMap::new(); let mut has_map = <HashMap<_, _>>::default();
for (type_path, reflect_component) in paths_and_reflect_components { for (type_path, reflect_component) in paths_and_reflect_components {
let has = reflect_component.contains(entity_ref.clone()); let has = reflect_component.contains(entity_ref.clone());

View File

@ -17,23 +17,25 @@ use async_io::Async;
use bevy_app::{App, Plugin, Startup}; use bevy_app::{App, Plugin, Startup};
use bevy_ecs::system::{Res, Resource}; use bevy_ecs::system::{Res, Resource};
use bevy_tasks::{futures_lite::StreamExt, IoTaskPool}; use bevy_tasks::{futures_lite::StreamExt, IoTaskPool};
use core::net::{IpAddr, Ipv4Addr};
use core::{ use core::{
convert::Infallible, convert::Infallible,
net::{IpAddr, Ipv4Addr},
pin::Pin, pin::Pin,
task::{Context, Poll}, task::{Context, Poll},
}; };
use http_body_util::{BodyExt as _, Full}; use http_body_util::{BodyExt as _, Full};
use hyper::header::{HeaderName, HeaderValue};
use hyper::{ use hyper::{
body::{Body, Bytes, Frame, Incoming}, body::{Body, Bytes, Frame, Incoming},
header::{HeaderName, HeaderValue},
server::conn::http1, server::conn::http1,
service, Request, Response, service, Request, Response,
}; };
use serde_json::Value; use serde_json::Value;
use smol_hyper::rt::{FuturesIo, SmolTimer}; use smol_hyper::rt::{FuturesIo, SmolTimer};
use std::collections::HashMap; use std::{
use std::net::{TcpListener, TcpStream}; collections::HashMap,
net::{TcpListener, TcpStream},
};
/// The default port that Bevy will listen on. /// The default port that Bevy will listen on.
/// ///
@ -57,7 +59,7 @@ impl Headers {
/// Create a new instance of `Headers`. /// Create a new instance of `Headers`.
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
headers: HashMap::new(), headers: HashMap::default(),
} }
} }

View File

@ -887,7 +887,7 @@ pub fn camera_system<T: CameraProjection + Component<Mutability = Mutable>>(
) { ) {
let primary_window = primary_window.iter().next(); let primary_window = primary_window.iter().next();
let mut changed_window_ids = HashSet::new(); let mut changed_window_ids = <HashSet<_>>::default();
changed_window_ids.extend(window_created_events.read().map(|event| event.window)); changed_window_ids.extend(window_created_events.read().map(|event| event.window));
changed_window_ids.extend(window_resized_events.read().map(|event| event.window)); changed_window_ids.extend(window_resized_events.read().map(|event| event.window));
let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_events let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_events
@ -926,7 +926,9 @@ pub fn camera_system<T: CameraProjection + Component<Mutability = Mutable>>(
// This can happen when the window is moved between monitors with different DPIs. // This can happen when the window is moved between monitors with different DPIs.
// Without this, the viewport will take a smaller portion of the window moved to // Without this, the viewport will take a smaller portion of the window moved to
// a higher DPI monitor. // a higher DPI monitor.
if normalized_target.is_changed(&scale_factor_changed_window_ids, &HashSet::new()) { if normalized_target
.is_changed(&scale_factor_changed_window_ids, &HashSet::default())
{
if let (Some(new_scale_factor), Some(old_scale_factor)) = ( if let (Some(new_scale_factor), Some(old_scale_factor)) = (
new_computed_target_info new_computed_target_info
.as_ref() .as_ref()
@ -1199,8 +1201,8 @@ pub fn sort_cameras(
ord => ord, ord => ord,
}); });
let mut previous_order_target = None; let mut previous_order_target = None;
let mut ambiguities = HashSet::new(); let mut ambiguities = <HashSet<_>>::default();
let mut target_counts = HashMap::new(); let mut target_counts = <HashMap<_, _>>::default();
for sorted_camera in &mut sorted_cameras.0 { for sorted_camera in &mut sorted_cameras.0 {
let new_order_target = (sorted_camera.order, sorted_camera.target.clone()); let new_order_target = (sorted_camera.order, sorted_camera.target.clone());
if let Some(previous_order_target) = previous_order_target { if let Some(previous_order_target) = previous_order_target {

View File

@ -32,7 +32,7 @@ impl Node for CameraDriverNode {
) -> Result<(), NodeRunError> { ) -> Result<(), NodeRunError> {
let sorted_cameras = world.resource::<SortedCameras>(); let sorted_cameras = world.resource::<SortedCameras>();
let windows = world.resource::<ExtractedWindows>(); let windows = world.resource::<ExtractedWindows>();
let mut camera_windows = HashSet::new(); let mut camera_windows = <HashSet<_>>::default();
for sorted_camera in &sorted_cameras.0 { for sorted_camera in &sorted_cameras.0 {
let Ok(camera) = self.cameras.get_manual(world, sorted_camera.entity) else { let Ok(camera) = self.cameras.get_manual(world, sorted_camera.entity) else {
continue; continue;

View File

@ -15,11 +15,7 @@ use bevy_ecs::{
system::{Res, ResMut, Resource}, system::{Res, ResMut, Resource},
world::{FromWorld, World}, world::{FromWorld, World},
}; };
use bevy_utils::{ use bevy_utils::{default, tracing::error, HashMap, HashSet};
default,
hashbrown::{HashMap, HashSet},
tracing::error,
};
use offset_allocator::{Allocation, Allocator}; use offset_allocator::{Allocation, Allocator};
use wgpu::{ use wgpu::{
BufferDescriptor, BufferSize, BufferUsages, CommandEncoderDescriptor, DownlevelFlags, BufferDescriptor, BufferSize, BufferUsages, CommandEncoderDescriptor, DownlevelFlags,
@ -329,10 +325,10 @@ impl FromWorld for MeshAllocator {
.contains(DownlevelFlags::BASE_VERTEX); .contains(DownlevelFlags::BASE_VERTEX);
Self { Self {
slabs: HashMap::new(), slabs: HashMap::default(),
slab_layouts: HashMap::new(), slab_layouts: HashMap::default(),
mesh_id_to_vertex_slab: HashMap::new(), mesh_id_to_vertex_slab: HashMap::default(),
mesh_id_to_index_slab: HashMap::new(), mesh_id_to_index_slab: HashMap::default(),
next_slab_id: default(), next_slab_id: default(),
general_vertex_slabs_supported, general_vertex_slabs_supported,
} }
@ -600,7 +596,7 @@ impl MeshAllocator {
} }
fn free_meshes(&mut self, extracted_meshes: &ExtractedAssets<RenderMesh>) { fn free_meshes(&mut self, extracted_meshes: &ExtractedAssets<RenderMesh>) {
let mut empty_slabs = HashSet::new(); let mut empty_slabs = <HashSet<_>>::default();
for mesh_id in &extracted_meshes.removed { for mesh_id in &extracted_meshes.removed {
if let Some(slab_id) = self.mesh_id_to_vertex_slab.remove(mesh_id) { if let Some(slab_id) = self.mesh_id_to_vertex_slab.remove(mesh_id) {
self.free_allocation_in_slab(mesh_id, slab_id, &mut empty_slabs); self.free_allocation_in_slab(mesh_id, slab_id, &mut empty_slabs);
@ -881,8 +877,8 @@ impl GeneralSlab {
let mut new_slab = GeneralSlab { let mut new_slab = GeneralSlab {
allocator: Allocator::new(slab_slot_capacity), allocator: Allocator::new(slab_slot_capacity),
buffer: None, buffer: None,
resident_allocations: HashMap::new(), resident_allocations: HashMap::default(),
pending_allocations: HashMap::new(), pending_allocations: HashMap::default(),
element_layout: layout, element_layout: layout,
slot_capacity: slab_slot_capacity, slot_capacity: slab_slot_capacity,
}; };

View File

@ -233,8 +233,8 @@ pub(crate) fn extract_render_asset<A: RenderAsset>(
|world, mut cached_state: Mut<CachedExtractRenderAssetSystemState<A>>| { |world, mut cached_state: Mut<CachedExtractRenderAssetSystemState<A>>| {
let (mut events, mut assets) = cached_state.state.get_mut(world); let (mut events, mut assets) = cached_state.state.get_mut(world);
let mut changed_assets = HashSet::default(); let mut changed_assets = <HashSet<_>>::default();
let mut removed = HashSet::default(); let mut removed = <HashSet<_>>::default();
for event in events.read() { for event in events.read() {
#[allow(clippy::match_same_arms)] #[allow(clippy::match_same_arms)]
@ -254,7 +254,7 @@ pub(crate) fn extract_render_asset<A: RenderAsset>(
} }
let mut extracted_assets = Vec::new(); let mut extracted_assets = Vec::new();
let mut added = HashSet::new(); let mut added = <HashSet<_>>::default();
for id in changed_assets.drain() { for id in changed_assets.drain() {
if let Some(asset) = assets.get(id) { if let Some(asset) = assets.get(id) {
let asset_usage = A::asset_usage(asset); let asset_usage = A::asset_usage(asset);

View File

@ -10,7 +10,7 @@ use bevy_utils::{
default, default,
hashbrown::hash_map::{RawEntryMut, VacantEntry}, hashbrown::hash_map::{RawEntryMut, VacantEntry},
tracing::error, tracing::error,
Entry, HashMap, Entry, FixedHasher, HashMap,
}; };
use core::{fmt::Debug, hash::Hash}; use core::{fmt::Debug, hash::Hash};
use thiserror::Error; use thiserror::Error;
@ -132,7 +132,11 @@ impl<S: SpecializedMeshPipeline> SpecializedMeshPipelines<S> {
specialize_pipeline: &S, specialize_pipeline: &S,
key: S::Key, key: S::Key,
layout: &MeshVertexBufferLayoutRef, layout: &MeshVertexBufferLayoutRef,
entry: VacantEntry<(MeshVertexBufferLayoutRef, S::Key), CachedRenderPipelineId>, entry: VacantEntry<
(MeshVertexBufferLayoutRef, S::Key),
CachedRenderPipelineId,
FixedHasher,
>,
) -> Result<CachedRenderPipelineId, SpecializedMeshPipelineError> ) -> Result<CachedRenderPipelineId, SpecializedMeshPipelineError>
where where
S: SpecializedMeshPipeline, S: SpecializedMeshPipeline,

View File

@ -923,7 +923,7 @@ pub fn prepare_view_targets(
)>, )>,
view_target_attachments: Res<ViewTargetAttachments>, view_target_attachments: Res<ViewTargetAttachments>,
) { ) {
let mut textures = HashMap::default(); let mut textures = <HashMap<_, _>>::default();
for (entity, camera, view, texture_usage, msaa) in cameras.iter() { for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target) let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
else { else {

View File

@ -47,7 +47,7 @@ impl SceneFilter {
/// ///
/// [`Denylist`]: SceneFilter::Denylist /// [`Denylist`]: SceneFilter::Denylist
pub fn allow_all() -> Self { pub fn allow_all() -> Self {
Self::Denylist(HashSet::new()) Self::Denylist(HashSet::default())
} }
/// Creates a filter where all types are denied. /// Creates a filter where all types are denied.
@ -56,7 +56,7 @@ impl SceneFilter {
/// ///
/// [`Allowlist`]: SceneFilter::Allowlist /// [`Allowlist`]: SceneFilter::Allowlist
pub fn deny_all() -> Self { pub fn deny_all() -> Self {
Self::Allowlist(HashSet::new()) Self::Allowlist(HashSet::default())
} }
/// Allow the given type, `T`. /// Allow the given type, `T`.
@ -88,7 +88,7 @@ impl SceneFilter {
pub fn allow_by_id(mut self, type_id: TypeId) -> Self { pub fn allow_by_id(mut self, type_id: TypeId) -> Self {
match &mut self { match &mut self {
Self::Unset => { Self::Unset => {
self = Self::Allowlist(HashSet::from([type_id])); self = Self::Allowlist([type_id].into_iter().collect());
} }
Self::Allowlist(list) => { Self::Allowlist(list) => {
list.insert(type_id); list.insert(type_id);
@ -128,7 +128,7 @@ impl SceneFilter {
#[must_use] #[must_use]
pub fn deny_by_id(mut self, type_id: TypeId) -> Self { pub fn deny_by_id(mut self, type_id: TypeId) -> Self {
match &mut self { match &mut self {
Self::Unset => self = Self::Denylist(HashSet::from([type_id])), Self::Unset => self = Self::Denylist([type_id].into_iter().collect()),
Self::Allowlist(list) => { Self::Allowlist(list) => {
list.remove(&type_id); list.remove(&type_id);
} }
@ -222,7 +222,7 @@ impl IntoIterator for SceneFilter {
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
match self { match self {
Self::Unset => HashSet::new().into_iter(), Self::Unset => Default::default(),
Self::Allowlist(list) | Self::Denylist(list) => list.into_iter(), Self::Allowlist(list) | Self::Denylist(list) => list.into_iter(),
} }
} }

View File

@ -319,7 +319,7 @@ impl SceneSpawner {
let spawned = self let spawned = self
.spawned_dynamic_scenes .spawned_dynamic_scenes
.entry(handle.id()) .entry(handle.id())
.or_insert_with(HashSet::new); .or_insert_with(HashSet::default);
spawned.insert(instance_id); spawned.insert(instance_id);
// Scenes with parents need more setup before they are ready. // Scenes with parents need more setup before they are ready.
@ -426,7 +426,7 @@ impl SceneSpawner {
pub fn scene_spawner_system(world: &mut World) { pub fn scene_spawner_system(world: &mut World) {
world.resource_scope(|world, mut scene_spawner: Mut<SceneSpawner>| { world.resource_scope(|world, mut scene_spawner: Mut<SceneSpawner>| {
// remove any loading instances where parent is deleted // remove any loading instances where parent is deleted
let mut dead_instances = HashSet::default(); let mut dead_instances = <HashSet<_>>::default();
scene_spawner scene_spawner
.scenes_with_parent .scenes_with_parent
.retain(|(instance, parent)| { .retain(|(instance, parent)| {

View File

@ -476,7 +476,7 @@ impl<'a, 'de> Visitor<'de> for SceneMapVisitor<'a> {
where where
A: MapAccess<'de>, A: MapAccess<'de>,
{ {
let mut added = HashSet::new(); let mut added = <HashSet<_>>::default();
let mut entries = Vec::new(); let mut entries = Vec::new();
while let Some(registration) = while let Some(registration) =
map.next_key_seed(TypeRegistrationDeserializer::new(self.registry))? map.next_key_seed(TypeRegistrationDeserializer::new(self.registry))?

View File

@ -271,7 +271,7 @@ impl<'a> TextureAtlasBuilder<'a> {
let rect_placements = rect_placements.ok_or(TextureAtlasBuilderError::NotEnoughSpace)?; let rect_placements = rect_placements.ok_or(TextureAtlasBuilderError::NotEnoughSpace)?;
let mut texture_rects = Vec::with_capacity(rect_placements.packed_locations().len()); let mut texture_rects = Vec::with_capacity(rect_placements.packed_locations().len());
let mut texture_ids = HashMap::default(); let mut texture_ids = <HashMap<_, _>>::default();
// We iterate through the textures to place to respect the insertion order for the texture indices // We iterate through the textures to place to respect the insertion order for the texture indices
for (index, (image_id, texture)) in self.textures_to_place.iter().enumerate() { for (index, (image_id, texture)) in self.textures_to_place.iter().enumerate() {
let (_, packed_location) = rect_placements.packed_locations().get(&index).unwrap(); let (_, packed_location) = rect_placements.packed_locations().get(&index).unwrap();

View File

@ -710,7 +710,7 @@ mod tests {
ui_child_entities.len() ui_child_entities.len()
); );
let child_node_map = HashMap::from_iter( let child_node_map = <HashMap<_, _>>::from_iter(
ui_child_entities ui_child_entities
.iter() .iter()
.map(|child_entity| (*child_entity, ui_surface.entity_to_taffy[child_entity])), .map(|child_entity| (*child_entity, ui_surface.entity_to_taffy[child_entity])),

View File

@ -29,7 +29,7 @@ use bevy_ecs::{prelude::*, query::QueryData};
use bevy_math::{Rect, Vec2}; use bevy_math::{Rect, Vec2};
use bevy_render::prelude::*; use bevy_render::prelude::*;
use bevy_transform::prelude::*; use bevy_transform::prelude::*;
use bevy_utils::hashbrown::HashMap; use bevy_utils::HashMap;
use bevy_window::PrimaryWindow; use bevy_window::PrimaryWindow;
use bevy_picking::backend::prelude::*; use bevy_picking::backend::prelude::*;
@ -70,7 +70,7 @@ pub fn ui_picking(
mut output: EventWriter<PointerHits>, mut output: EventWriter<PointerHits>,
) { ) {
// For each camera, the pointer and its position // For each camera, the pointer and its position
let mut pointer_pos_by_camera = HashMap::<Entity, HashMap<PointerId, Vec2>>::new(); let mut pointer_pos_by_camera = HashMap::<Entity, HashMap<PointerId, Vec2>>::default();
for (pointer_id, pointer_location) in for (pointer_id, pointer_location) in
pointers.iter().filter_map(|(pointer, pointer_location)| { pointers.iter().filter_map(|(pointer, pointer_location)| {
@ -107,7 +107,7 @@ pub fn ui_picking(
} }
// The list of node entities hovered for each (camera, pointer) combo // The list of node entities hovered for each (camera, pointer) combo
let mut hit_nodes = HashMap::<(Entity, PointerId), Vec<Entity>>::new(); let mut hit_nodes = HashMap::<(Entity, PointerId), Vec<Entity>>::default();
// prepare an iterator that contains all the nodes that have the cursor in their rect, // prepare an iterator that contains all the nodes that have the cursor in their rect,
// from the top node to the bottom one. this will also reset the interaction to `None` // from the top node to the bottom one. this will also reset the interaction to `None`

View File

@ -62,7 +62,7 @@ pub fn ui_stack_system(
maybe_zindex.map(|zindex| zindex.0).unwrap_or(0), maybe_zindex.map(|zindex| zindex.0).unwrap_or(0),
), ),
)); ));
visited_root_nodes.insert_unique_unchecked(id); visited_root_nodes.insert(id);
} }
for (id, global_zindex, maybe_zindex) in zindex_global_node_query.iter() { for (id, global_zindex, maybe_zindex) in zindex_global_node_query.iter() {

View File

@ -146,7 +146,7 @@ pub fn update_target_camera_system(
) { ) {
// Track updated entities to prevent redundant updates, as `Commands` changes are deferred, // Track updated entities to prevent redundant updates, as `Commands` changes are deferred,
// and updates done for changed_children_query can overlap with itself or with root_node_query // and updates done for changed_children_query can overlap with itself or with root_node_query
let mut updated_entities = HashSet::new(); let mut updated_entities = <HashSet<_>>::default();
// Assuming that TargetCamera is manually set on the root node only, // Assuming that TargetCamera is manually set on the root node only,
// update root nodes first, since it implies the biggest change // update root nodes first, since it implies the biggest change

View File

@ -10,22 +10,18 @@ keywords = ["bevy"]
[features] [features]
default = ["std", "serde"] default = ["std", "serde"]
std = [ std = ["alloc", "tracing/std", "foldhash/std", "dep:thread_local"]
"alloc", alloc = ["hashbrown"]
"tracing/std", detailed_trace = []
"ahash/std",
"dep:thread_local",
"ahash/runtime-rng",
]
alloc = ["hashbrown/default"]
serde = ["hashbrown/serde"] serde = ["hashbrown/serde"]
[dependencies] [dependencies]
ahash = { version = "0.8.7", default-features = false, features = [ foldhash = { version = "0.1.3", default-features = false }
"compile-time-rng",
] }
tracing = { version = "0.1", default-features = false } tracing = { version = "0.1", default-features = false }
hashbrown = { version = "0.14.2", default-features = false } hashbrown = { version = "0.15.1", features = [
"equivalent",
"raw-entry",
], optional = true, default-features = false }
thread_local = { version = "1.0", optional = true } thread_local = { version = "1.0", optional = true }
[dev-dependencies] [dev-dependencies]

View File

@ -35,8 +35,28 @@ mod once;
mod parallel_queue; mod parallel_queue;
mod time; mod time;
pub use ahash::{AHasher, RandomState}; /// For when you want a deterministic hasher.
///
/// Seed was randomly generated with a fair dice roll. Guaranteed to be random:
/// <https://github.com/bevyengine/bevy/pull/1268/files#r560918426>
const FIXED_HASHER: FixedState =
FixedState::with_seed(0b1001010111101110000001001100010000000011001001101011001001111000);
/// Deterministic hasher based upon a random but fixed state.
#[derive(Copy, Clone, Default, Debug)]
pub struct FixedHasher;
impl BuildHasher for FixedHasher {
type Hasher = DefaultHasher;
#[inline]
fn build_hasher(&self) -> Self::Hasher {
FIXED_HASHER.build_hasher()
}
}
pub use default::default; pub use default::default;
pub use foldhash::fast::{FixedState, FoldHasher as DefaultHasher, RandomState};
#[cfg(feature = "alloc")]
pub use hashbrown; pub use hashbrown;
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub use parallel_queue::*; pub use parallel_queue::*;
@ -46,15 +66,15 @@ pub use tracing;
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
use alloc::boxed::Box; use alloc::boxed::Box;
#[cfg(feature = "alloc")]
use core::any::TypeId;
use core::{ use core::{
any::TypeId,
fmt::Debug, fmt::Debug,
hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, hash::{BuildHasher, Hash, Hasher},
marker::PhantomData, marker::PhantomData,
mem::ManuallyDrop, mem::ManuallyDrop,
ops::Deref, ops::Deref,
}; };
use hashbrown::hash_map::RawEntryMut;
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
mod conditional_send { mod conditional_send {
@ -83,70 +103,60 @@ impl<T: core::future::Future + ConditionalSend> ConditionalSendFuture for T {}
pub type BoxedFuture<'a, T> = core::pin::Pin<Box<dyn ConditionalSendFuture<Output = T> + 'a>>; pub type BoxedFuture<'a, T> = core::pin::Pin<Box<dyn ConditionalSendFuture<Output = T> + 'a>>;
/// A shortcut alias for [`hashbrown::hash_map::Entry`]. /// A shortcut alias for [`hashbrown::hash_map::Entry`].
pub type Entry<'a, K, V, S = BuildHasherDefault<AHasher>> = hashbrown::hash_map::Entry<'a, K, V, S>; #[cfg(feature = "alloc")]
pub type Entry<'a, K, V, S = FixedHasher> = hashbrown::hash_map::Entry<'a, K, V, S>;
/// A hasher builder that will create a fixed hasher. /// A [`HashMap`][hashbrown::HashMap] implementing a high
#[derive(Debug, Clone, Default)]
pub struct FixedState;
impl BuildHasher for FixedState {
type Hasher = AHasher;
#[inline]
fn build_hasher(&self) -> AHasher {
RandomState::with_seeds(
0b10010101111011100000010011000100,
0b00000011001001101011001001111000,
0b11001111011010110111100010110101,
0b00000100001111100011010011010101,
)
.build_hasher()
}
}
/// A [`HashMap`][hashbrown::HashMap] implementing aHash, a high
/// speed keyed hashing algorithm intended for use in in-memory hashmaps. /// speed keyed hashing algorithm intended for use in in-memory hashmaps.
/// ///
/// aHash is designed for performance and is NOT cryptographically secure. /// The hashing algorithm is designed for performance
/// and is NOT cryptographically secure.
/// ///
/// Within the same execution of the program iteration order of different /// Within the same execution of the program iteration order of different
/// `HashMap`s only depends on the order of insertions and deletions, /// `HashMap`s only depends on the order of insertions and deletions,
/// but it will not be stable between multiple executions of the program. /// but it will not be stable between multiple executions of the program.
pub type HashMap<K, V> = hashbrown::HashMap<K, V, BuildHasherDefault<AHasher>>; #[cfg(feature = "alloc")]
pub type HashMap<K, V, S = FixedHasher> = hashbrown::HashMap<K, V, S>;
/// A stable hash map implementing aHash, a high speed keyed hashing algorithm /// A stable hash map implementing a high speed keyed hashing algorithm
/// intended for use in in-memory hashmaps. /// intended for use in in-memory hashmaps.
/// ///
/// Unlike [`HashMap`] the iteration order stability extends between executions /// Unlike [`HashMap`] the iteration order stability extends between executions
/// using the same Bevy version on the same device. /// using the same Bevy version on the same device.
/// ///
/// aHash is designed for performance and is NOT cryptographically secure. /// The hashing algorithm is designed for performance
/// and is NOT cryptographically secure.
#[deprecated( #[deprecated(
note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashMap<K, V, FixedState>" note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashMap<K, V, FixedHasher>"
)] )]
pub type StableHashMap<K, V> = hashbrown::HashMap<K, V, FixedState>; #[cfg(feature = "alloc")]
pub type StableHashMap<K, V> = hashbrown::HashMap<K, V, FixedHasher>;
/// A [`HashSet`][hashbrown::HashSet] implementing aHash, a high /// A [`HashSet`][hashbrown::HashSet] implementing a high
/// speed keyed hashing algorithm intended for use in in-memory hashmaps. /// speed keyed hashing algorithm intended for use in in-memory hashmaps.
/// ///
/// aHash is designed for performance and is NOT cryptographically secure. /// The hashing algorithm is designed for performance
/// and is NOT cryptographically secure.
/// ///
/// Within the same execution of the program iteration order of different /// Within the same execution of the program iteration order of different
/// `HashSet`s only depends on the order of insertions and deletions, /// `HashSet`s only depends on the order of insertions and deletions,
/// but it will not be stable between multiple executions of the program. /// but it will not be stable between multiple executions of the program.
pub type HashSet<K> = hashbrown::HashSet<K, BuildHasherDefault<AHasher>>; #[cfg(feature = "alloc")]
pub type HashSet<K, S = FixedHasher> = hashbrown::HashSet<K, S>;
/// A stable hash set implementing aHash, a high speed keyed hashing algorithm /// A stable hash set using a high speed keyed hashing algorithm
/// intended for use in in-memory hashmaps. /// intended for use in in-memory hashmaps.
/// ///
/// Unlike [`HashMap`] the iteration order stability extends between executions /// Unlike [`HashMap`] the iteration order stability extends between executions
/// using the same Bevy version on the same device. /// using the same Bevy version on the same device.
/// ///
/// aHash is designed for performance and is NOT cryptographically secure. /// The hashing algorithm is designed for performance
/// and is NOT cryptographically secure.
#[deprecated( #[deprecated(
note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashSet<K, FixedState>" note = "Will be required to use the hash library of your choice. Alias for: hashbrown::HashSet<K, FixedHasher>"
)] )]
pub type StableHashSet<K> = hashbrown::HashSet<K, FixedState>; #[cfg(feature = "alloc")]
pub type StableHashSet<K> = hashbrown::HashSet<K, FixedHasher>;
/// A pre-hashed value of a specific type. Pre-hashing enables memoization of hashes that are expensive to compute. /// A pre-hashed value of a specific type. Pre-hashing enables memoization of hashes that are expensive to compute.
/// ///
@ -154,10 +164,10 @@ pub type StableHashSet<K> = hashbrown::HashSet<K, FixedState>;
/// See [`PassHash`] and [`PassHasher`] for a "pass through" [`BuildHasher`] and [`Hasher`] implementation /// See [`PassHash`] and [`PassHasher`] for a "pass through" [`BuildHasher`] and [`Hasher`] implementation
/// designed to work with [`Hashed`] /// designed to work with [`Hashed`]
/// See [`PreHashMap`] for a hashmap pre-configured to use [`Hashed`] keys. /// See [`PreHashMap`] for a hashmap pre-configured to use [`Hashed`] keys.
pub struct Hashed<V, H = FixedState> { pub struct Hashed<V, S = FixedHasher> {
hash: u64, hash: u64,
value: V, value: V,
marker: PhantomData<H>, marker: PhantomData<S>,
} }
impl<V: Hash, H: BuildHasher + Default> Hashed<V, H> { impl<V: Hash, H: BuildHasher + Default> Hashed<V, H> {
@ -263,9 +273,11 @@ impl Hasher for PassHasher {
/// A [`HashMap`] pre-configured to use [`Hashed`] keys and [`PassHash`] passthrough hashing. /// A [`HashMap`] pre-configured to use [`Hashed`] keys and [`PassHash`] passthrough hashing.
/// Iteration order only depends on the order of insertions and deletions. /// Iteration order only depends on the order of insertions and deletions.
#[cfg(feature = "alloc")]
pub type PreHashMap<K, V> = hashbrown::HashMap<Hashed<K>, V, PassHash>; pub type PreHashMap<K, V> = hashbrown::HashMap<Hashed<K>, V, PassHash>;
/// Extension methods intended to add functionality to [`PreHashMap`]. /// Extension methods intended to add functionality to [`PreHashMap`].
#[cfg(feature = "alloc")]
pub trait PreHashMapExt<K, V> { pub trait PreHashMapExt<K, V> {
/// Tries to get or insert the value for the given `key` using the pre-computed hash first. /// Tries to get or insert the value for the given `key` using the pre-computed hash first.
/// If the [`PreHashMap`] does not already contain the `key`, it will clone it and insert /// If the [`PreHashMap`] does not already contain the `key`, it will clone it and insert
@ -273,9 +285,11 @@ pub trait PreHashMapExt<K, V> {
fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V; fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V;
} }
#[cfg(feature = "alloc")]
impl<K: Hash + Eq + PartialEq + Clone, V> PreHashMapExt<K, V> for PreHashMap<K, V> { impl<K: Hash + Eq + PartialEq + Clone, V> PreHashMapExt<K, V> for PreHashMap<K, V> {
#[inline] #[inline]
fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V { fn get_or_insert_with<F: FnOnce() -> V>(&mut self, key: &Hashed<K>, func: F) -> &mut V {
use hashbrown::hash_map::RawEntryMut;
let entry = self let entry = self
.raw_entry_mut() .raw_entry_mut()
.from_key_hashed_nocheck(key.hash(), key); .from_key_hashed_nocheck(key.hash(), key);
@ -291,6 +305,7 @@ impl<K: Hash + Eq + PartialEq + Clone, V> PreHashMapExt<K, V> for PreHashMap<K,
/// A specialized hashmap type with Key of [`TypeId`] /// A specialized hashmap type with Key of [`TypeId`]
/// Iteration order only depends on the order of insertions and deletions. /// Iteration order only depends on the order of insertions and deletions.
#[cfg(feature = "alloc")]
pub type TypeIdMap<V> = hashbrown::HashMap<TypeId, V, NoOpHash>; pub type TypeIdMap<V> = hashbrown::HashMap<TypeId, V, NoOpHash>;
/// [`BuildHasher`] for types that already contain a high-quality hash. /// [`BuildHasher`] for types that already contain a high-quality hash.
@ -448,8 +463,8 @@ mod tests {
fn stable_hash_within_same_program_execution() { fn stable_hash_within_same_program_execution() {
use alloc::vec::Vec; use alloc::vec::Vec;
let mut map_1 = HashMap::new(); let mut map_1 = <HashMap<_, _>>::default();
let mut map_2 = HashMap::new(); let mut map_2 = <HashMap<_, _>>::default();
for i in 1..10 { for i in 1..10 {
map_1.insert(i, i); map_1.insert(i, i);
map_2.insert(i, i); map_2.insert(i, i);

View File

@ -572,7 +572,7 @@ impl PerMethodSettings {
impl Default for PerMethodSettings { impl Default for PerMethodSettings {
fn default() -> Self { fn default() -> Self {
let mut settings = HashMap::new(); let mut settings = <HashMap<_, _>>::default();
for method in [ for method in [
Tonemapping::None, Tonemapping::None,

View File

@ -4,8 +4,8 @@ use bevy::{
animation::{AnimationTarget, AnimationTargetId}, animation::{AnimationTarget, AnimationTargetId},
color::palettes::css::{LIGHT_GRAY, WHITE}, color::palettes::css::{LIGHT_GRAY, WHITE},
prelude::*, prelude::*,
utils::hashbrown::HashSet,
}; };
use std::collections::HashSet;
// IDs of the mask groups we define for the running fox model. // IDs of the mask groups we define for the running fox model.
// //

View File

@ -3,7 +3,7 @@
//! This example show how you can create components dynamically, spawn entities with those components //! This example show how you can create components dynamically, spawn entities with those components
//! as well as query for entities with those components. //! as well as query for entities with those components.
use std::{alloc::Layout, io::Write, ptr::NonNull}; use std::{alloc::Layout, collections::HashMap, io::Write, ptr::NonNull};
use bevy::{ use bevy::{
ecs::{ ecs::{
@ -13,7 +13,6 @@ use bevy::{
}, },
prelude::*, prelude::*,
ptr::{Aligned, OwningPtr}, ptr::{Aligned, OwningPtr},
utils::HashMap,
}; };
const PROMPT: &str = " const PROMPT: &str = "

View File

@ -1,9 +1,10 @@
//! This example displays each contributor to the bevy source code as a bouncing bevy-ball. //! This example displays each contributor to the bevy source code as a bouncing bevy-ball.
use bevy::{math::bounding::Aabb2d, prelude::*, utils::HashMap}; use bevy::{math::bounding::Aabb2d, prelude::*};
use rand::{Rng, SeedableRng}; use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng; use rand_chacha::ChaCha8Rng;
use std::{ use std::{
collections::HashMap,
env::VarError, env::VarError,
hash::{DefaultHasher, Hash, Hasher}, hash::{DefaultHasher, Hash, Hasher},
io::{self, BufRead, BufReader}, io::{self, BufRead, BufReader},

View File

@ -195,7 +195,7 @@ fn main() {
dynamic_set.remove(&"y"); dynamic_set.remove(&"y");
let mut my_set: HashSet<&str> = HashSet::new(); let mut my_set: HashSet<&str> = HashSet::default();
my_set.apply(&dynamic_set); my_set.apply(&dynamic_set);
assert_eq!(my_set, HashSet::from_iter(["x", "z"])); assert_eq!(my_set, HashSet::from_iter(["x", "z"]));
} }
@ -204,7 +204,7 @@ fn main() {
{ {
let dynamic_map = DynamicMap::from_iter([("x", 1u32), ("y", 2u32), ("z", 3u32)]); let dynamic_map = DynamicMap::from_iter([("x", 1u32), ("y", 2u32), ("z", 3u32)]);
let mut my_map: HashMap<&str, u32> = HashMap::new(); let mut my_map: HashMap<&str, u32> = HashMap::default();
my_map.apply(&dynamic_map); my_map.apply(&dynamic_map);
assert_eq!(my_map.get("x"), Some(&1)); assert_eq!(my_map.get("x"), Some(&1));
assert_eq!(my_map.get("y"), Some(&2)); assert_eq!(my_map.get("y"), Some(&2));

View File

@ -68,7 +68,7 @@ enum F {
} }
fn setup() { fn setup() {
let mut z = HashMap::default(); let mut z = <HashMap<_, _>>::default();
z.insert("Hello".to_string(), 1.0); z.insert("Hello".to_string(), 1.0);
let value: Box<dyn Reflect> = Box::new(A { let value: Box<dyn Reflect> = Box::new(A {
x: 1, x: 1,

View File

@ -71,7 +71,7 @@ fn configure_ambiguity_detection(sub_app: &mut SubApp) {
/// Returns the number of conflicting systems per schedule. /// Returns the number of conflicting systems per schedule.
fn count_ambiguities(sub_app: &SubApp) -> AmbiguitiesCount { fn count_ambiguities(sub_app: &SubApp) -> AmbiguitiesCount {
let schedules = sub_app.world().resource::<Schedules>(); let schedules = sub_app.world().resource::<Schedules>();
let mut ambiguities = HashMap::new(); let mut ambiguities = <HashMap<_, _>>::default();
for (_, schedule) in schedules.iter() { for (_, schedule) in schedules.iter() {
let ambiguities_in_schedule = schedule.graph().conflicting_systems().len(); let ambiguities_in_schedule = schedule.graph().conflicting_systems().len();
ambiguities.insert(schedule.label(), ambiguities_in_schedule); ambiguities.insert(schedule.label(), ambiguities_in_schedule);

View File

@ -12,7 +12,7 @@ toml_edit = { version = "0.22.7", default-features = false, features = [
tera = "1.15" tera = "1.15"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
bitflags = "2.3" bitflags = "2.3"
hashbrown = { version = "0.14", features = ["serde"] } hashbrown = { version = "0.15", features = ["serde"] }
[lints] [lints]
workspace = true workspace = true

View File

@ -13,4 +13,4 @@ index 104384086..6e3c8dd83 100644
+ Self::default() + Self::default()
} }
/// Returns the current [`UpdateMode`]. /// Default settings for mobile.