upgrade legion
This commit is contained in:
parent
7fe2288814
commit
75ab0e21e5
@ -95,6 +95,9 @@ impl FilterResult for Option<bool> {
|
|||||||
pub trait Filter<T: Copy>: Send + Sync + Sized {
|
pub trait Filter<T: Copy>: Send + Sync + Sized {
|
||||||
type Iter: Iterator + Send + Sync;
|
type Iter: Iterator + Send + Sync;
|
||||||
|
|
||||||
|
// Called when a query is about to begin execution.
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
/// Pulls iterator data out of the source.
|
/// Pulls iterator data out of the source.
|
||||||
fn collect(&self, source: T) -> Self::Iter;
|
fn collect(&self, source: T) -> Self::Iter;
|
||||||
|
|
||||||
@ -168,6 +171,9 @@ pub trait EntityFilter: Send + Clone {
|
|||||||
type ChunksetFilter: for<'a> Filter<ChunksetFilterData<'a>> + Clone;
|
type ChunksetFilter: for<'a> Filter<ChunksetFilterData<'a>> + Clone;
|
||||||
type ChunkFilter: for<'a> Filter<ChunkFilterData<'a>> + Clone;
|
type ChunkFilter: for<'a> Filter<ChunkFilterData<'a>> + Clone;
|
||||||
|
|
||||||
|
/// Initializes the entity filter for iteration.
|
||||||
|
fn init(&self);
|
||||||
|
|
||||||
/// Gets mutable references to both inner filters.
|
/// Gets mutable references to both inner filters.
|
||||||
fn filters(
|
fn filters(
|
||||||
&self,
|
&self,
|
||||||
@ -239,6 +245,12 @@ where
|
|||||||
type ChunksetFilter = S;
|
type ChunksetFilter = S;
|
||||||
type ChunkFilter = C;
|
type ChunkFilter = C;
|
||||||
|
|
||||||
|
fn init(&self) {
|
||||||
|
self.arch_filter.init();
|
||||||
|
self.chunkset_filter.init();
|
||||||
|
self.chunk_filter.init();
|
||||||
|
}
|
||||||
|
|
||||||
fn filters(
|
fn filters(
|
||||||
&self,
|
&self,
|
||||||
) -> (
|
) -> (
|
||||||
@ -380,7 +392,7 @@ impl<'a, 'b, F: Filter<ArchetypeFilterData<'a>>> Iterator for FilterArchIter<'a,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An iterator which yields the index of chunks that match a filter.
|
/// An iterator which yields the index of chuinks that match a filter.
|
||||||
pub struct FilterChunkIter<'a, 'b, F: Filter<ChunksetFilterData<'a>>> {
|
pub struct FilterChunkIter<'a, 'b, F: Filter<ChunksetFilterData<'a>>> {
|
||||||
filter: &'b F,
|
filter: &'b F,
|
||||||
chunks: Enumerate<F::Iter>,
|
chunks: Enumerate<F::Iter>,
|
||||||
@ -458,6 +470,9 @@ pub struct Passthrough;
|
|||||||
impl<'a> Filter<ArchetypeFilterData<'a>> for Passthrough {
|
impl<'a> Filter<ArchetypeFilterData<'a>> for Passthrough {
|
||||||
type Iter = Take<Repeat<()>>;
|
type Iter = Take<Repeat<()>>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, arch: ArchetypeFilterData<'a>) -> Self::Iter {
|
fn collect(&self, arch: ArchetypeFilterData<'a>) -> Self::Iter {
|
||||||
std::iter::repeat(()).take(arch.component_types.len())
|
std::iter::repeat(()).take(arch.component_types.len())
|
||||||
@ -470,6 +485,9 @@ impl<'a> Filter<ArchetypeFilterData<'a>> for Passthrough {
|
|||||||
impl<'a> Filter<ChunksetFilterData<'a>> for Passthrough {
|
impl<'a> Filter<ChunksetFilterData<'a>> for Passthrough {
|
||||||
type Iter = Take<Repeat<()>>;
|
type Iter = Take<Repeat<()>>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, sets: ChunksetFilterData<'a>) -> Self::Iter {
|
fn collect(&self, sets: ChunksetFilterData<'a>) -> Self::Iter {
|
||||||
std::iter::repeat(()).take(sets.archetype_data.len())
|
std::iter::repeat(()).take(sets.archetype_data.len())
|
||||||
@ -482,6 +500,9 @@ impl<'a> Filter<ChunksetFilterData<'a>> for Passthrough {
|
|||||||
impl<'a> Filter<ChunkFilterData<'a>> for Passthrough {
|
impl<'a> Filter<ChunkFilterData<'a>> for Passthrough {
|
||||||
type Iter = Take<Repeat<()>>;
|
type Iter = Take<Repeat<()>>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, chunk: ChunkFilterData<'a>) -> Self::Iter {
|
fn collect(&self, chunk: ChunkFilterData<'a>) -> Self::Iter {
|
||||||
std::iter::repeat(()).take(chunk.chunks.len())
|
std::iter::repeat(()).take(chunk.chunks.len())
|
||||||
@ -520,6 +541,9 @@ impl ActiveFilter for Any {}
|
|||||||
impl<'a> Filter<ArchetypeFilterData<'a>> for Any {
|
impl<'a> Filter<ArchetypeFilterData<'a>> for Any {
|
||||||
type Iter = Take<Repeat<()>>;
|
type Iter = Take<Repeat<()>>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, arch: ArchetypeFilterData<'a>) -> Self::Iter {
|
fn collect(&self, arch: ArchetypeFilterData<'a>) -> Self::Iter {
|
||||||
std::iter::repeat(()).take(arch.component_types.len())
|
std::iter::repeat(()).take(arch.component_types.len())
|
||||||
@ -532,6 +556,9 @@ impl<'a> Filter<ArchetypeFilterData<'a>> for Any {
|
|||||||
impl<'a> Filter<ChunksetFilterData<'a>> for Any {
|
impl<'a> Filter<ChunksetFilterData<'a>> for Any {
|
||||||
type Iter = Take<Repeat<()>>;
|
type Iter = Take<Repeat<()>>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, sets: ChunksetFilterData<'a>) -> Self::Iter {
|
fn collect(&self, sets: ChunksetFilterData<'a>) -> Self::Iter {
|
||||||
std::iter::repeat(()).take(sets.archetype_data.len())
|
std::iter::repeat(()).take(sets.archetype_data.len())
|
||||||
@ -544,6 +571,9 @@ impl<'a> Filter<ChunksetFilterData<'a>> for Any {
|
|||||||
impl<'a> Filter<ChunkFilterData<'a>> for Any {
|
impl<'a> Filter<ChunkFilterData<'a>> for Any {
|
||||||
type Iter = Take<Repeat<()>>;
|
type Iter = Take<Repeat<()>>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, chunk: ChunkFilterData<'a>) -> Self::Iter {
|
fn collect(&self, chunk: ChunkFilterData<'a>) -> Self::Iter {
|
||||||
std::iter::repeat(()).take(chunk.chunks.len())
|
std::iter::repeat(()).take(chunk.chunks.len())
|
||||||
@ -592,6 +622,9 @@ impl<F> ActiveFilter for Not<F> {}
|
|||||||
impl<'a, T: Copy, F: Filter<T>> Filter<T> for Not<F> {
|
impl<'a, T: Copy, F: Filter<T>> Filter<T> for Not<F> {
|
||||||
type Iter = F::Iter;
|
type Iter = F::Iter;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) { self.filter.init(); }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, source: T) -> Self::Iter { self.filter.collect(source) }
|
fn collect(&self, source: T) -> Self::Iter { self.filter.collect(source) }
|
||||||
|
|
||||||
@ -648,6 +681,9 @@ impl<T> ActiveFilter for And<(T,)> {}
|
|||||||
impl<'a, T: Copy, F: Filter<T>> Filter<T> for And<(F,)> {
|
impl<'a, T: Copy, F: Filter<T>> Filter<T> for And<(F,)> {
|
||||||
type Iter = F::Iter;
|
type Iter = F::Iter;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) { self.filters.0.init(); }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, source: T) -> Self::Iter { self.filters.0.collect(source) }
|
fn collect(&self, source: T) -> Self::Iter { self.filters.0.collect(source) }
|
||||||
|
|
||||||
@ -717,6 +753,13 @@ macro_rules! impl_and_filter {
|
|||||||
// type Iter = crate::zip::Zip<( $( $ty::Iter ),* )>;
|
// type Iter = crate::zip::Zip<( $( $ty::Iter ),* )>;
|
||||||
type Iter = recursive_zip!(@type $($ty::Iter),*);
|
type Iter = recursive_zip!(@type $($ty::Iter),*);
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {
|
||||||
|
#![allow(non_snake_case)]
|
||||||
|
let ($( $ty, )*) = &self.filters;
|
||||||
|
$( $ty.init(); )*
|
||||||
|
}
|
||||||
|
|
||||||
fn collect(&self, source: T) -> Self::Iter {
|
fn collect(&self, source: T) -> Self::Iter {
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
let ($( $ty, )*) = &self.filters;
|
let ($( $ty, )*) = &self.filters;
|
||||||
@ -818,6 +861,13 @@ macro_rules! impl_or_filter {
|
|||||||
// type Iter = crate::zip::Zip<( $( $ty::Iter ),* )>;
|
// type Iter = crate::zip::Zip<( $( $ty::Iter ),* )>;
|
||||||
type Iter = recursive_zip!(@type $($ty::Iter),*);
|
type Iter = recursive_zip!(@type $($ty::Iter),*);
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {
|
||||||
|
#![allow(non_snake_case)]
|
||||||
|
let ($( $ty, )*) = &self.filters;
|
||||||
|
$( $ty.init(); )*
|
||||||
|
}
|
||||||
|
|
||||||
fn collect(&self, source: T) -> Self::Iter {
|
fn collect(&self, source: T) -> Self::Iter {
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
let ($( $ty, )*) = &self.filters;
|
let ($( $ty, )*) = &self.filters;
|
||||||
@ -905,7 +955,7 @@ impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h,
|
|||||||
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k);
|
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k);
|
||||||
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k, L => l);
|
impl_or_filter!(A => a, B => b, C => c, D => d, E => e, F => f, G => g, H => h, I => i, J => j, K => k, L => l);
|
||||||
|
|
||||||
/// A filter which requires that all chunks contain entity data components of type `T`.
|
/// A filter qhich requires that all chunks contain entity data components of type `T`.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ComponentFilter<T>(PhantomData<T>);
|
pub struct ComponentFilter<T>(PhantomData<T>);
|
||||||
|
|
||||||
@ -923,6 +973,9 @@ impl<T> Clone for ComponentFilter<T> {
|
|||||||
impl<'a, T: Component> Filter<ArchetypeFilterData<'a>> for ComponentFilter<T> {
|
impl<'a, T: Component> Filter<ArchetypeFilterData<'a>> for ComponentFilter<T> {
|
||||||
type Iter = SliceVecIter<'a, ComponentTypeId>;
|
type Iter = SliceVecIter<'a, ComponentTypeId>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, source: ArchetypeFilterData<'a>) -> Self::Iter {
|
fn collect(&self, source: ArchetypeFilterData<'a>) -> Self::Iter {
|
||||||
source.component_types.iter()
|
source.component_types.iter()
|
||||||
@ -995,6 +1048,9 @@ impl<T> Clone for TagFilter<T> {
|
|||||||
impl<'a, T: Tag> Filter<ArchetypeFilterData<'a>> for TagFilter<T> {
|
impl<'a, T: Tag> Filter<ArchetypeFilterData<'a>> for TagFilter<T> {
|
||||||
type Iter = SliceVecIter<'a, TagTypeId>;
|
type Iter = SliceVecIter<'a, TagTypeId>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn collect(&self, source: ArchetypeFilterData<'a>) -> Self::Iter { source.tag_types.iter() }
|
fn collect(&self, source: ArchetypeFilterData<'a>) -> Self::Iter { source.tag_types.iter() }
|
||||||
|
|
||||||
@ -1067,6 +1123,9 @@ impl<'a, T> Clone for TagValueFilter<'a, T> {
|
|||||||
impl<'a, 'b, T: Tag> Filter<ChunksetFilterData<'a>> for TagValueFilter<'b, T> {
|
impl<'a, 'b, T: Tag> Filter<ChunksetFilterData<'a>> for TagValueFilter<'b, T> {
|
||||||
type Iter = Iter<'a, T>;
|
type Iter = Iter<'a, T>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn init(&self) {}
|
||||||
|
|
||||||
fn collect(&self, source: ChunksetFilterData<'a>) -> Self::Iter {
|
fn collect(&self, source: ChunksetFilterData<'a>) -> Self::Iter {
|
||||||
unsafe {
|
unsafe {
|
||||||
source
|
source
|
||||||
@ -1132,14 +1191,16 @@ impl<'a, T> std::ops::BitOr<Passthrough> for TagValueFilter<'a, T> {
|
|||||||
/// chunk since the last time the filter was executed.
|
/// chunk since the last time the filter was executed.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ComponentChangedFilter<T: Component> {
|
pub struct ComponentChangedFilter<T: Component> {
|
||||||
last_read_version: AtomicU64,
|
high_water_mark: AtomicU64,
|
||||||
|
version_threshold: AtomicU64,
|
||||||
phantom: PhantomData<T>,
|
phantom: PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Component> ComponentChangedFilter<T> {
|
impl<T: Component> ComponentChangedFilter<T> {
|
||||||
fn new() -> ComponentChangedFilter<T> {
|
fn new() -> ComponentChangedFilter<T> {
|
||||||
ComponentChangedFilter {
|
ComponentChangedFilter {
|
||||||
last_read_version: AtomicU64::new(0),
|
high_water_mark: AtomicU64::new(0),
|
||||||
|
version_threshold: AtomicU64::new(0),
|
||||||
phantom: PhantomData,
|
phantom: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1150,29 +1211,63 @@ impl<T: Component> ActiveFilter for ComponentChangedFilter<T> {}
|
|||||||
impl<T: Component> Clone for ComponentChangedFilter<T> {
|
impl<T: Component> Clone for ComponentChangedFilter<T> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
last_read_version: AtomicU64::new(self.last_read_version.load(Ordering::Relaxed)),
|
high_water_mark: AtomicU64::new(self.high_water_mark.load(Ordering::Relaxed)),
|
||||||
|
version_threshold: AtomicU64::new(self.version_threshold.load(Ordering::Relaxed)),
|
||||||
phantom: PhantomData,
|
phantom: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T: Component> Filter<ChunkFilterData<'a>> for ComponentChangedFilter<T> {
|
impl<'a, T: Component> Filter<ChunkFilterData<'a>> for ComponentChangedFilter<T> {
|
||||||
type Iter = Iter<'a, ComponentStorage>;
|
type Iter = ComponentChangedState<'a, ComponentStorage>;
|
||||||
|
|
||||||
fn collect(&self, source: ChunkFilterData<'a>) -> Self::Iter { source.chunks.iter() }
|
#[inline]
|
||||||
|
fn init(&self) {
|
||||||
|
let version = self.high_water_mark.load(Ordering::Relaxed);
|
||||||
|
let mut threshold = self.version_threshold.load(Ordering::Relaxed);
|
||||||
|
if threshold < version {
|
||||||
|
loop {
|
||||||
|
match self.version_threshold.compare_exchange_weak(
|
||||||
|
threshold,
|
||||||
|
version,
|
||||||
|
Ordering::Relaxed,
|
||||||
|
Ordering::Relaxed,
|
||||||
|
) {
|
||||||
|
Ok(_) => break,
|
||||||
|
Err(stored_last_read) => {
|
||||||
|
threshold = stored_last_read;
|
||||||
|
if threshold >= version {
|
||||||
|
// matched version is already considered visited, update no longer needed
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect(&self, source: ChunkFilterData<'a>) -> Self::Iter {
|
||||||
|
let compare_version = self.version_threshold.load(Ordering::Relaxed);
|
||||||
|
ComponentChangedState {
|
||||||
|
iter: source.chunks.iter(),
|
||||||
|
version_threshold: compare_version,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn is_match(&self, item: &<Self::Iter as Iterator>::Item) -> Option<bool> {
|
fn is_match(&self, item: &<Self::Iter as Iterator>::Item) -> Option<bool> {
|
||||||
let components = item.components(ComponentTypeId::of::<T>());
|
let (version_threshold, storage) = item;
|
||||||
|
|
||||||
|
let components = storage.components(ComponentTypeId::of::<T>());
|
||||||
if components.is_none() {
|
if components.is_none() {
|
||||||
return Some(false);
|
return Some(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
let version = components.unwrap().version();
|
let version = components.unwrap().version();
|
||||||
let mut last_read = self.last_read_version.load(Ordering::Relaxed);
|
let mut last_read = self.high_water_mark.load(Ordering::Relaxed);
|
||||||
if last_read < version {
|
if last_read < version {
|
||||||
loop {
|
loop {
|
||||||
match self.last_read_version.compare_exchange_weak(
|
match self.high_water_mark.compare_exchange_weak(
|
||||||
last_read,
|
last_read,
|
||||||
version,
|
version,
|
||||||
Ordering::Relaxed,
|
Ordering::Relaxed,
|
||||||
@ -1181,13 +1276,16 @@ impl<'a, T: Component> Filter<ChunkFilterData<'a>> for ComponentChangedFilter<T>
|
|||||||
Ok(_) => break,
|
Ok(_) => break,
|
||||||
Err(stored_last_read) => {
|
Err(stored_last_read) => {
|
||||||
last_read = stored_last_read;
|
last_read = stored_last_read;
|
||||||
if last_read < version {
|
if last_read >= version {
|
||||||
// matched version is already considered visited, update no longer needed
|
// matched version is already considered visited, update no longer needed
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if version > *version_threshold {
|
||||||
Some(true)
|
Some(true)
|
||||||
} else {
|
} else {
|
||||||
Some(false)
|
Some(false)
|
||||||
@ -1195,6 +1293,19 @@ impl<'a, T: Component> Filter<ChunkFilterData<'a>> for ComponentChangedFilter<T>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct ComponentChangedState<'a, T: Component> {
|
||||||
|
iter: Iter<'a, T>,
|
||||||
|
version_threshold: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T: Component> Iterator for ComponentChangedState<'a, T> {
|
||||||
|
type Item = (u64, &'a T);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.iter.next().map(|c| (self.version_threshold, c))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a, T: Component> std::ops::Not for ComponentChangedFilter<T> {
|
impl<'a, T: Component> std::ops::Not for ComponentChangedFilter<T> {
|
||||||
type Output = Not<Self>;
|
type Output = Not<Self>;
|
||||||
|
|
||||||
@ -1241,6 +1352,7 @@ impl<'a, T: Component> std::ops::BitOr<Passthrough> for ComponentChangedFilter<T
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::filter_fns::*;
|
use super::filter_fns::*;
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn create() {
|
pub fn create() {
|
||||||
@ -1249,4 +1361,40 @@ mod test {
|
|||||||
let filter = component::<usize>() | tag_value(&5isize);
|
let filter = component::<usize>() | tag_value(&5isize);
|
||||||
tracing::trace!(?filter);
|
tracing::trace!(?filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn component_changed_filter() {
|
||||||
|
let _ = tracing_subscriber::fmt::try_init();
|
||||||
|
|
||||||
|
let universe = Universe::new();
|
||||||
|
let mut world = universe.create_world();
|
||||||
|
|
||||||
|
let entity1 = world.insert((), vec![(1usize,)])[0];
|
||||||
|
let entity2 = world.insert((), vec![(2usize, false)])[0];
|
||||||
|
|
||||||
|
let query = <Read<usize>>::query().filter(changed::<usize>());
|
||||||
|
|
||||||
|
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||||
|
|
||||||
|
*world.get_component_mut::<usize>(entity1).unwrap() = 3usize;
|
||||||
|
|
||||||
|
assert_eq!(1, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||||
|
|
||||||
|
*world.get_component_mut::<usize>(entity1).unwrap() = 4usize;
|
||||||
|
*world.get_component_mut::<usize>(entity2).unwrap() = 5usize;
|
||||||
|
|
||||||
|
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||||
|
|
||||||
|
*world.get_component_mut::<usize>(entity1).unwrap() = 6usize;
|
||||||
|
*world.get_component_mut::<usize>(entity1).unwrap() = 7usize;
|
||||||
|
*world.get_component_mut::<usize>(entity2).unwrap() = 8usize;
|
||||||
|
|
||||||
|
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||||
|
|
||||||
|
*world.get_component_mut::<usize>(entity2).unwrap() = 6usize;
|
||||||
|
*world.get_component_mut::<usize>(entity2).unwrap() = 7usize;
|
||||||
|
*world.get_component_mut::<usize>(entity1).unwrap() = 8usize;
|
||||||
|
|
||||||
|
assert_eq!(2, query.iter_chunks(&world).collect::<Vec<_>>().len());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -949,6 +949,7 @@ where
|
|||||||
&'a self,
|
&'a self,
|
||||||
world: &'data World,
|
world: &'data World,
|
||||||
) -> ChunkViewIter<'data, 'a, V, F::ArchetypeFilter, F::ChunksetFilter, F::ChunkFilter> {
|
) -> ChunkViewIter<'data, 'a, V, F::ArchetypeFilter, F::ChunksetFilter, F::ChunkFilter> {
|
||||||
|
self.filter.init();
|
||||||
let (arch_filter, chunkset_filter, chunk_filter) = self.filter.filters();
|
let (arch_filter, chunkset_filter, chunk_filter) = self.filter.filters();
|
||||||
let storage = world.storage();
|
let storage = world.storage();
|
||||||
let archetypes = arch_filter
|
let archetypes = arch_filter
|
||||||
@ -1190,6 +1191,7 @@ where
|
|||||||
<F::ChunksetFilter as Filter<ChunksetFilterData<'data>>>::Iter: FissileIterator,
|
<F::ChunksetFilter as Filter<ChunksetFilterData<'data>>>::Iter: FissileIterator,
|
||||||
<F::ChunkFilter as Filter<ChunkFilterData<'data>>>::Iter: FissileIterator,
|
<F::ChunkFilter as Filter<ChunkFilterData<'data>>>::Iter: FissileIterator,
|
||||||
{
|
{
|
||||||
|
self.filter.init();
|
||||||
let (arch_filter, chunkset_filter, chunk_filter) = self.filter.filters();
|
let (arch_filter, chunkset_filter, chunk_filter) = self.filter.filters();
|
||||||
let storage = world.storage();
|
let storage = world.storage();
|
||||||
let archetypes = FissileEnumerate::new(arch_filter.collect(ArchetypeFilterData {
|
let archetypes = FissileEnumerate::new(arch_filter.collect(ArchetypeFilterData {
|
||||||
|
|||||||
@ -1425,7 +1425,10 @@ impl ComponentStorage {
|
|||||||
// free component memory
|
// free component memory
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = self.component_data.take().unwrap();
|
let ptr = self.component_data.take().unwrap();
|
||||||
std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
|
|
||||||
|
if self.component_layout.size() > 0 {
|
||||||
|
std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.update_mem_gauge();
|
self.update_mem_gauge();
|
||||||
@ -1446,13 +1449,18 @@ impl ComponentStorage {
|
|||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
// allocating backing store
|
// allocating backing store
|
||||||
let ptr = std::alloc::alloc(self.component_layout);
|
if self.component_layout.size() > 0 {
|
||||||
self.component_data = Some(NonNull::new_unchecked(ptr));
|
let ptr = std::alloc::alloc(self.component_layout);
|
||||||
|
self.component_data = Some(NonNull::new_unchecked(ptr));
|
||||||
|
|
||||||
// update accessor pointers
|
// update accessor pointers
|
||||||
for (type_id, component) in (&mut *self.component_info.get()).iter_mut() {
|
for (type_id, component) in (&mut *self.component_info.get()).iter_mut() {
|
||||||
let &offset = self.component_offsets.get(type_id).unwrap();
|
let &offset = self.component_offsets.get(type_id).unwrap();
|
||||||
*component.ptr.get_mut() = ptr.add(offset);
|
*component.ptr.get_mut() = ptr.add(offset);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.component_data =
|
||||||
|
Some(NonNull::new(self.component_layout.align() as *mut u8).unwrap());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1516,8 +1524,10 @@ impl Drop for ComponentStorage {
|
|||||||
self.update_count_gauge();
|
self.update_count_gauge();
|
||||||
|
|
||||||
// free the chunk's memory
|
// free the chunk's memory
|
||||||
unsafe {
|
if self.component_layout.size() > 0 {
|
||||||
std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
|
unsafe {
|
||||||
|
std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -462,8 +462,6 @@ impl World {
|
|||||||
/// This function has the overhead of moving the entity to either an existing or new archetype,
|
/// This function has the overhead of moving the entity to either an existing or new archetype,
|
||||||
/// causing a memory copy of the entity to a new location. This function should not be used
|
/// causing a memory copy of the entity to a new location. This function should not be used
|
||||||
/// multiple times in successive order.
|
/// multiple times in successive order.
|
||||||
///
|
|
||||||
/// `World::add_components` should be used for adding multiple omponents to an entity at once.
|
|
||||||
pub fn add_component<T: Component>(
|
pub fn add_component<T: Component>(
|
||||||
&mut self,
|
&mut self,
|
||||||
entity: Entity,
|
entity: Entity,
|
||||||
|
|||||||
@ -168,8 +168,13 @@ impl Executor {
|
|||||||
64,
|
64,
|
||||||
Default::default(),
|
Default::default(),
|
||||||
);
|
);
|
||||||
let mut component_mutated =
|
let mut component_last_mutated =
|
||||||
FxHashMap::<ComponentTypeId, Vec<usize>>::with_capacity_and_hasher(
|
FxHashMap::<ComponentTypeId, usize>::with_capacity_and_hasher(
|
||||||
|
64,
|
||||||
|
Default::default(),
|
||||||
|
);
|
||||||
|
let mut component_last_read =
|
||||||
|
FxHashMap::<ComponentTypeId, usize>::with_capacity_and_hasher(
|
||||||
64,
|
64,
|
||||||
Default::default(),
|
Default::default(),
|
||||||
);
|
);
|
||||||
@ -220,23 +225,28 @@ impl Executor {
|
|||||||
|
|
||||||
// find component access dependencies
|
// find component access dependencies
|
||||||
let mut comp_dependencies = FxHashSet::default();
|
let mut comp_dependencies = FxHashSet::default();
|
||||||
for comp in read_comp {
|
|
||||||
if let Some(ns) = component_mutated.get(comp) {
|
|
||||||
for n in ns {
|
|
||||||
comp_dependencies.insert(*n);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for comp in write_comp {
|
for comp in write_comp {
|
||||||
if let Some(ns) = component_mutated.get(comp) {
|
// Writes have to be exclusive, so we are dependent on reads too
|
||||||
for n in ns {
|
trace!(component = ?comp, "Write component");
|
||||||
comp_dependencies.insert(*n);
|
if let Some(n) = component_last_read.get(comp) {
|
||||||
}
|
trace!(system_index = n, "Added read dependency");
|
||||||
|
comp_dependencies.insert(*n);
|
||||||
}
|
}
|
||||||
component_mutated
|
if let Some(n) = component_last_mutated.get(comp) {
|
||||||
.entry(*comp)
|
trace!(system_index = n, "Added write dependency");
|
||||||
.or_insert_with(Vec::new)
|
comp_dependencies.insert(*n);
|
||||||
.push(i);
|
}
|
||||||
|
component_last_mutated.insert(*comp, i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do reads after writes to ensure we don't overwrite last_read
|
||||||
|
for comp in read_comp {
|
||||||
|
trace!(component = ?comp, "Read component");
|
||||||
|
if let Some(n) = component_last_mutated.get(comp) {
|
||||||
|
trace!(system_index = n, "Added write dependency");
|
||||||
|
comp_dependencies.insert(*n);
|
||||||
|
}
|
||||||
|
component_last_read.insert(*comp, i);
|
||||||
}
|
}
|
||||||
|
|
||||||
trace!(depentants = ?comp_dependencies, "Computed dynamic dependants");
|
trace!(depentants = ?comp_dependencies, "Computed dynamic dependants");
|
||||||
|
|||||||
@ -1627,11 +1627,11 @@ mod tests {
|
|||||||
struct Comp2(f32, f32, f32);
|
struct Comp2(f32, f32, f32);
|
||||||
|
|
||||||
let components = vec![
|
let components = vec![
|
||||||
(Pos(69., 69., 69.), Vel(69., 69., 69.)),
|
(Comp1(69., 69., 69.), Comp2(69., 69., 69.)),
|
||||||
(Pos(69., 69., 69.), Vel(69., 69., 69.)),
|
(Comp1(69., 69., 69.), Comp2(69., 69., 69.)),
|
||||||
];
|
];
|
||||||
|
|
||||||
let mut expected = HashMap::<Entity, (Pos, Vel)>::new();
|
let mut expected = HashMap::<Entity, (Comp1, Comp2)>::new();
|
||||||
|
|
||||||
for (i, e) in world.insert((), components.clone()).iter().enumerate() {
|
for (i, e) in world.insert((), components.clone()).iter().enumerate() {
|
||||||
if let Some((pos, rot)) = components.get(i) {
|
if let Some((pos, rot)) = components.get(i) {
|
||||||
@ -1670,7 +1670,7 @@ mod tests {
|
|||||||
let system3 = SystemBuilder::<()>::new("TestSystem3")
|
let system3 = SystemBuilder::<()>::new("TestSystem3")
|
||||||
.with_query(<(Write<Comp1>, Write<Comp2>)>::query())
|
.with_query(<(Write<Comp1>, Write<Comp2>)>::query())
|
||||||
.build(move |_, world, _, query| {
|
.build(move |_, world, _, query| {
|
||||||
query.iter_mut(world).for_each(|(mut one, two)| {
|
query.iter_mut(world).for_each(|(mut one, mut two)| {
|
||||||
assert_eq!(one.0, 456.);
|
assert_eq!(one.0, 456.);
|
||||||
assert_eq!(one.1, 456.);
|
assert_eq!(one.1, 456.);
|
||||||
assert_eq!(one.2, 456.);
|
assert_eq!(one.2, 456.);
|
||||||
@ -1683,9 +1683,9 @@ mod tests {
|
|||||||
one.1 = 789.;
|
one.1 = 789.;
|
||||||
one.2 = 789.;
|
one.2 = 789.;
|
||||||
|
|
||||||
one.0 = 789.;
|
two.0 = 789.;
|
||||||
one.1 = 789.;
|
two.1 = 789.;
|
||||||
one.2 = 789.;
|
two.2 = 789.;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user