Merge branch 'ui_clipping_override_picking_fix' of https://github.com/ickshonpe/bevy into ui_clipping_override_picking_fix
This commit is contained in:
commit
ee68bf7a1a
@ -21,7 +21,7 @@ mod core_radio;
|
|||||||
mod core_scrollbar;
|
mod core_scrollbar;
|
||||||
mod core_slider;
|
mod core_slider;
|
||||||
|
|
||||||
use bevy_app::{App, Plugin};
|
use bevy_app::{PluginGroup, PluginGroupBuilder};
|
||||||
|
|
||||||
pub use callback::{Callback, Notify};
|
pub use callback::{Callback, Notify};
|
||||||
pub use core_button::{CoreButton, CoreButtonPlugin};
|
pub use core_button::{CoreButton, CoreButtonPlugin};
|
||||||
@ -36,18 +36,17 @@ pub use core_slider::{
|
|||||||
SliderRange, SliderStep, SliderValue, TrackClick,
|
SliderRange, SliderStep, SliderValue, TrackClick,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A plugin that registers the observers for all of the core widgets. If you don't want to
|
/// A plugin group that registers the observers for all of the core widgets. If you don't want to
|
||||||
/// use all of the widgets, you can import the individual widget plugins instead.
|
/// use all of the widgets, you can import the individual widget plugins instead.
|
||||||
pub struct CoreWidgetsPlugin;
|
pub struct CoreWidgetsPlugins;
|
||||||
|
|
||||||
impl Plugin for CoreWidgetsPlugin {
|
impl PluginGroup for CoreWidgetsPlugins {
|
||||||
fn build(&self, app: &mut App) {
|
fn build(self) -> PluginGroupBuilder {
|
||||||
app.add_plugins((
|
PluginGroupBuilder::start::<Self>()
|
||||||
CoreButtonPlugin,
|
.add(CoreButtonPlugin)
|
||||||
CoreCheckboxPlugin,
|
.add(CoreCheckboxPlugin)
|
||||||
CoreRadioGroupPlugin,
|
.add(CoreRadioGroupPlugin)
|
||||||
CoreScrollbarPlugin,
|
.add(CoreScrollbarPlugin)
|
||||||
CoreSliderPlugin,
|
.add(CoreSliderPlugin)
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -563,10 +563,21 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
|
||||||
fn dependency_loop() {
|
fn dependency_loop() {
|
||||||
let mut schedule = Schedule::default();
|
let mut schedule = Schedule::default();
|
||||||
schedule.configure_sets(TestSystems::X.after(TestSystems::X));
|
schedule.configure_sets(TestSystems::X.after(TestSystems::X));
|
||||||
|
let mut world = World::new();
|
||||||
|
let result = schedule.initialize(&mut world);
|
||||||
|
assert!(matches!(result, Err(ScheduleBuildError::DependencyLoop(_))));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn dependency_loop_from_chain() {
|
||||||
|
let mut schedule = Schedule::default();
|
||||||
|
schedule.configure_sets((TestSystems::X, TestSystems::X).chain());
|
||||||
|
let mut world = World::new();
|
||||||
|
let result = schedule.initialize(&mut world);
|
||||||
|
assert!(matches!(result, Err(ScheduleBuildError::DependencyLoop(_))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -598,10 +609,12 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
|
||||||
fn hierarchy_loop() {
|
fn hierarchy_loop() {
|
||||||
let mut schedule = Schedule::default();
|
let mut schedule = Schedule::default();
|
||||||
schedule.configure_sets(TestSystems::X.in_set(TestSystems::X));
|
schedule.configure_sets(TestSystems::X.in_set(TestSystems::X));
|
||||||
|
let mut world = World::new();
|
||||||
|
let result = schedule.initialize(&mut world);
|
||||||
|
assert!(matches!(result, Err(ScheduleBuildError::HierarchyLoop(_))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@ -390,14 +390,14 @@ impl Schedule {
|
|||||||
let a = a.into_system_set();
|
let a = a.into_system_set();
|
||||||
let b = b.into_system_set();
|
let b = b.into_system_set();
|
||||||
|
|
||||||
let Some(&a_id) = self.graph.system_set_ids.get(&a.intern()) else {
|
let Some(&a_id) = self.graph.system_sets.ids.get(&a.intern()) else {
|
||||||
panic!(
|
panic!(
|
||||||
"Could not mark system as ambiguous, `{:?}` was not found in the schedule.
|
"Could not mark system as ambiguous, `{:?}` was not found in the schedule.
|
||||||
Did you try to call `ambiguous_with` before adding the system to the world?",
|
Did you try to call `ambiguous_with` before adding the system to the world?",
|
||||||
a
|
a
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
let Some(&b_id) = self.graph.system_set_ids.get(&b.intern()) else {
|
let Some(&b_id) = self.graph.system_sets.ids.get(&b.intern()) else {
|
||||||
panic!(
|
panic!(
|
||||||
"Could not mark system as ambiguous, `{:?}` was not found in the schedule.
|
"Could not mark system as ambiguous, `{:?}` was not found in the schedule.
|
||||||
Did you try to call `ambiguous_with` before adding the system to the world?",
|
Did you try to call `ambiguous_with` before adding the system to the world?",
|
||||||
@ -760,6 +760,27 @@ enum UninitializedId {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Metadata for system sets in a schedule.
|
||||||
|
#[derive(Default)]
|
||||||
|
struct SystemSets {
|
||||||
|
/// List of system sets in the schedule
|
||||||
|
sets: SlotMap<SystemSetKey, SystemSetNode>,
|
||||||
|
/// List of conditions for each system set, in the same order as `system_sets`
|
||||||
|
conditions: SecondaryMap<SystemSetKey, Vec<ConditionWithAccess>>,
|
||||||
|
/// Map from system set to node id
|
||||||
|
ids: HashMap<InternedSystemSet, SystemSetKey>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SystemSets {
|
||||||
|
fn get_or_add_set(&mut self, set: InternedSystemSet) -> SystemSetKey {
|
||||||
|
*self.ids.entry(set).or_insert_with(|| {
|
||||||
|
let key = self.sets.insert(SystemSetNode::new(set));
|
||||||
|
self.conditions.insert(key, Vec::new());
|
||||||
|
key
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Metadata for a [`Schedule`].
|
/// Metadata for a [`Schedule`].
|
||||||
///
|
///
|
||||||
/// The order isn't optimized; calling `ScheduleGraph::build_schedule` will return a
|
/// The order isn't optimized; calling `ScheduleGraph::build_schedule` will return a
|
||||||
@ -770,12 +791,8 @@ pub struct ScheduleGraph {
|
|||||||
pub systems: SlotMap<SystemKey, SystemNode>,
|
pub systems: SlotMap<SystemKey, SystemNode>,
|
||||||
/// List of conditions for each system, in the same order as `systems`
|
/// List of conditions for each system, in the same order as `systems`
|
||||||
pub system_conditions: SecondaryMap<SystemKey, Vec<ConditionWithAccess>>,
|
pub system_conditions: SecondaryMap<SystemKey, Vec<ConditionWithAccess>>,
|
||||||
/// List of system sets in the schedule
|
/// Data about system sets in the schedule
|
||||||
system_sets: SlotMap<SystemSetKey, SystemSetNode>,
|
system_sets: SystemSets,
|
||||||
/// List of conditions for each system set, in the same order as `system_sets`
|
|
||||||
system_set_conditions: SecondaryMap<SystemSetKey, Vec<ConditionWithAccess>>,
|
|
||||||
/// Map from system set to node id
|
|
||||||
system_set_ids: HashMap<InternedSystemSet, SystemSetKey>,
|
|
||||||
/// Systems that have not been initialized yet; for system sets, we store the index of the first uninitialized condition
|
/// Systems that have not been initialized yet; for system sets, we store the index of the first uninitialized condition
|
||||||
/// (all the conditions after that index still need to be initialized)
|
/// (all the conditions after that index still need to be initialized)
|
||||||
uninit: Vec<UninitializedId>,
|
uninit: Vec<UninitializedId>,
|
||||||
@ -800,9 +817,7 @@ impl ScheduleGraph {
|
|||||||
Self {
|
Self {
|
||||||
systems: SlotMap::with_key(),
|
systems: SlotMap::with_key(),
|
||||||
system_conditions: SecondaryMap::new(),
|
system_conditions: SecondaryMap::new(),
|
||||||
system_sets: SlotMap::with_key(),
|
system_sets: SystemSets::default(),
|
||||||
system_set_conditions: SecondaryMap::new(),
|
|
||||||
system_set_ids: HashMap::default(),
|
|
||||||
uninit: Vec::new(),
|
uninit: Vec::new(),
|
||||||
hierarchy: Dag::new(),
|
hierarchy: Dag::new(),
|
||||||
dependency: Dag::new(),
|
dependency: Dag::new(),
|
||||||
@ -826,7 +841,7 @@ impl ScheduleGraph {
|
|||||||
|
|
||||||
/// Returns `true` if the given system set is part of the graph. Otherwise, returns `false`.
|
/// Returns `true` if the given system set is part of the graph. Otherwise, returns `false`.
|
||||||
pub fn contains_set(&self, set: impl SystemSet) -> bool {
|
pub fn contains_set(&self, set: impl SystemSet) -> bool {
|
||||||
self.system_set_ids.contains_key(&set.intern())
|
self.system_sets.ids.contains_key(&set.intern())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the system at the given [`NodeId`].
|
/// Returns the system at the given [`NodeId`].
|
||||||
@ -840,7 +855,7 @@ impl ScheduleGraph {
|
|||||||
|
|
||||||
/// Returns the set at the given [`NodeId`], if it exists.
|
/// Returns the set at the given [`NodeId`], if it exists.
|
||||||
pub fn get_set_at(&self, key: SystemSetKey) -> Option<&dyn SystemSet> {
|
pub fn get_set_at(&self, key: SystemSetKey) -> Option<&dyn SystemSet> {
|
||||||
self.system_sets.get(key).map(|set| &*set.inner)
|
self.system_sets.sets.get(key).map(|set| &*set.inner)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the set at the given [`NodeId`].
|
/// Returns the set at the given [`NodeId`].
|
||||||
@ -854,7 +869,7 @@ impl ScheduleGraph {
|
|||||||
|
|
||||||
/// Returns the conditions for the set at the given [`SystemSetKey`], if it exists.
|
/// Returns the conditions for the set at the given [`SystemSetKey`], if it exists.
|
||||||
pub fn get_set_conditions_at(&self, key: SystemSetKey) -> Option<&[ConditionWithAccess]> {
|
pub fn get_set_conditions_at(&self, key: SystemSetKey) -> Option<&[ConditionWithAccess]> {
|
||||||
self.system_set_conditions.get(key).map(Vec::as_slice)
|
self.system_sets.conditions.get(key).map(Vec::as_slice)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the conditions for the set at the given [`SystemSetKey`].
|
/// Returns the conditions for the set at the given [`SystemSetKey`].
|
||||||
@ -882,9 +897,9 @@ impl ScheduleGraph {
|
|||||||
pub fn system_sets(
|
pub fn system_sets(
|
||||||
&self,
|
&self,
|
||||||
) -> impl Iterator<Item = (SystemSetKey, &dyn SystemSet, &[ConditionWithAccess])> {
|
) -> impl Iterator<Item = (SystemSetKey, &dyn SystemSet, &[ConditionWithAccess])> {
|
||||||
self.system_sets.iter().filter_map(|(key, set_node)| {
|
self.system_sets.sets.iter().filter_map(|(key, set_node)| {
|
||||||
let set = &*set_node.inner;
|
let set = &*set_node.inner;
|
||||||
let conditions = self.system_set_conditions.get(key)?.as_slice();
|
let conditions = self.system_sets.conditions.get(key)?.as_slice();
|
||||||
Some((key, set, conditions))
|
Some((key, set, conditions))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -946,7 +961,7 @@ impl ScheduleGraph {
|
|||||||
}
|
}
|
||||||
let mut set_config = InternedSystemSet::into_config(set.intern());
|
let mut set_config = InternedSystemSet::into_config(set.intern());
|
||||||
set_config.conditions.extend(collective_conditions);
|
set_config.conditions.extend(collective_conditions);
|
||||||
self.configure_set_inner(set_config).unwrap();
|
self.configure_set_inner(set_config);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1047,10 +1062,7 @@ impl ScheduleGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Add a [`ScheduleConfig`] to the graph, including its dependencies and conditions.
|
/// Add a [`ScheduleConfig`] to the graph, including its dependencies and conditions.
|
||||||
fn add_system_inner(
|
fn add_system_inner(&mut self, config: ScheduleConfig<ScheduleSystem>) -> SystemKey {
|
||||||
&mut self,
|
|
||||||
config: ScheduleConfig<ScheduleSystem>,
|
|
||||||
) -> Result<NodeId, ScheduleBuildError> {
|
|
||||||
let key = self.systems.insert(SystemNode::new(config.node));
|
let key = self.systems.insert(SystemNode::new(config.node));
|
||||||
self.system_conditions.insert(
|
self.system_conditions.insert(
|
||||||
key,
|
key,
|
||||||
@ -1064,9 +1076,9 @@ impl ScheduleGraph {
|
|||||||
self.uninit.push(UninitializedId::System(key));
|
self.uninit.push(UninitializedId::System(key));
|
||||||
|
|
||||||
// graph updates are immediate
|
// graph updates are immediate
|
||||||
self.update_graphs(NodeId::System(key), config.metadata)?;
|
self.update_graphs(NodeId::System(key), config.metadata);
|
||||||
|
|
||||||
Ok(NodeId::System(key))
|
key
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
@ -1075,39 +1087,26 @@ impl ScheduleGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Add a single `ScheduleConfig` to the graph, including its dependencies and conditions.
|
/// Add a single `ScheduleConfig` to the graph, including its dependencies and conditions.
|
||||||
fn configure_set_inner(
|
fn configure_set_inner(&mut self, set: ScheduleConfig<InternedSystemSet>) -> SystemSetKey {
|
||||||
&mut self,
|
|
||||||
set: ScheduleConfig<InternedSystemSet>,
|
|
||||||
) -> Result<NodeId, ScheduleBuildError> {
|
|
||||||
let ScheduleConfig {
|
let ScheduleConfig {
|
||||||
node: set,
|
node: set,
|
||||||
metadata,
|
metadata,
|
||||||
conditions,
|
conditions,
|
||||||
} = set;
|
} = set;
|
||||||
|
|
||||||
let key = match self.system_set_ids.get(&set) {
|
let key = self.system_sets.get_or_add_set(set);
|
||||||
Some(&id) => id,
|
|
||||||
None => self.add_set(set),
|
|
||||||
};
|
|
||||||
|
|
||||||
// graph updates are immediate
|
// graph updates are immediate
|
||||||
self.update_graphs(NodeId::Set(key), metadata)?;
|
self.update_graphs(NodeId::Set(key), metadata);
|
||||||
|
|
||||||
// system init has to be deferred (need `&mut World`)
|
// system init has to be deferred (need `&mut World`)
|
||||||
let system_set_conditions = self.system_set_conditions.entry(key).unwrap().or_default();
|
let system_set_conditions = self.system_sets.conditions.entry(key).unwrap().or_default();
|
||||||
self.uninit.push(UninitializedId::Set {
|
self.uninit.push(UninitializedId::Set {
|
||||||
key,
|
key,
|
||||||
first_uninit_condition: system_set_conditions.len(),
|
first_uninit_condition: system_set_conditions.len(),
|
||||||
});
|
});
|
||||||
system_set_conditions.extend(conditions.into_iter().map(ConditionWithAccess::new));
|
system_set_conditions.extend(conditions.into_iter().map(ConditionWithAccess::new));
|
||||||
|
|
||||||
Ok(NodeId::Set(key))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_set(&mut self, set: InternedSystemSet) -> SystemSetKey {
|
|
||||||
let key = self.system_sets.insert(SystemSetNode::new(set));
|
|
||||||
self.system_set_conditions.insert(key, Vec::new());
|
|
||||||
self.system_set_ids.insert(set, key);
|
|
||||||
key
|
key
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1117,78 +1116,8 @@ impl ScheduleGraph {
|
|||||||
AnonymousSet::new(id)
|
AnonymousSet::new(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check that no set is included in itself.
|
|
||||||
/// Add all the sets from the [`GraphInfo`]'s hierarchy to the graph.
|
|
||||||
fn check_hierarchy_sets(
|
|
||||||
&mut self,
|
|
||||||
id: NodeId,
|
|
||||||
graph_info: &GraphInfo,
|
|
||||||
) -> Result<(), ScheduleBuildError> {
|
|
||||||
for &set in &graph_info.hierarchy {
|
|
||||||
if let Some(&set_id) = self.system_set_ids.get(&set) {
|
|
||||||
if let NodeId::Set(key) = id
|
|
||||||
&& set_id == key
|
|
||||||
{
|
|
||||||
{
|
|
||||||
return Err(ScheduleBuildError::HierarchyLoop(
|
|
||||||
self.get_node_name(&NodeId::Set(key)),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If the set is not in the graph, we add it
|
|
||||||
self.add_set(set);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks that no system set is dependent on itself.
|
|
||||||
/// Add all the sets from the [`GraphInfo`]'s dependencies to the graph.
|
|
||||||
fn check_edges(
|
|
||||||
&mut self,
|
|
||||||
id: NodeId,
|
|
||||||
graph_info: &GraphInfo,
|
|
||||||
) -> Result<(), ScheduleBuildError> {
|
|
||||||
for Dependency { set, .. } in &graph_info.dependencies {
|
|
||||||
if let Some(&set_id) = self.system_set_ids.get(set) {
|
|
||||||
if let NodeId::Set(key) = id
|
|
||||||
&& set_id == key
|
|
||||||
{
|
|
||||||
return Err(ScheduleBuildError::DependencyLoop(
|
|
||||||
self.get_node_name(&NodeId::Set(key)),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If the set is not in the graph, we add it
|
|
||||||
self.add_set(*set);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add all the sets from the [`GraphInfo`]'s ambiguity to the graph.
|
|
||||||
fn add_ambiguities(&mut self, graph_info: &GraphInfo) {
|
|
||||||
if let Ambiguity::IgnoreWithSet(ambiguous_with) = &graph_info.ambiguous_with {
|
|
||||||
for set in ambiguous_with {
|
|
||||||
if !self.system_set_ids.contains_key(set) {
|
|
||||||
self.add_set(*set);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update the internal graphs (hierarchy, dependency, ambiguity) by adding a single [`GraphInfo`]
|
/// Update the internal graphs (hierarchy, dependency, ambiguity) by adding a single [`GraphInfo`]
|
||||||
fn update_graphs(
|
fn update_graphs(&mut self, id: NodeId, graph_info: GraphInfo) {
|
||||||
&mut self,
|
|
||||||
id: NodeId,
|
|
||||||
graph_info: GraphInfo,
|
|
||||||
) -> Result<(), ScheduleBuildError> {
|
|
||||||
self.check_hierarchy_sets(id, &graph_info)?;
|
|
||||||
self.check_edges(id, &graph_info)?;
|
|
||||||
self.add_ambiguities(&graph_info);
|
|
||||||
self.changed = true;
|
self.changed = true;
|
||||||
|
|
||||||
let GraphInfo {
|
let GraphInfo {
|
||||||
@ -1201,16 +1130,22 @@ impl ScheduleGraph {
|
|||||||
self.hierarchy.graph.add_node(id);
|
self.hierarchy.graph.add_node(id);
|
||||||
self.dependency.graph.add_node(id);
|
self.dependency.graph.add_node(id);
|
||||||
|
|
||||||
for key in sets.into_iter().map(|set| self.system_set_ids[&set]) {
|
for key in sets
|
||||||
|
.into_iter()
|
||||||
|
.map(|set| self.system_sets.get_or_add_set(set))
|
||||||
|
{
|
||||||
self.hierarchy.graph.add_edge(NodeId::Set(key), id);
|
self.hierarchy.graph.add_edge(NodeId::Set(key), id);
|
||||||
|
|
||||||
// ensure set also appears in dependency graph
|
// ensure set also appears in dependency graph
|
||||||
self.dependency.graph.add_node(NodeId::Set(key));
|
self.dependency.graph.add_node(NodeId::Set(key));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (kind, key, options) in dependencies
|
for (kind, key, options) in
|
||||||
.into_iter()
|
dependencies
|
||||||
.map(|Dependency { kind, set, options }| (kind, self.system_set_ids[&set], options))
|
.into_iter()
|
||||||
|
.map(|Dependency { kind, set, options }| {
|
||||||
|
(kind, self.system_sets.get_or_add_set(set), options)
|
||||||
|
})
|
||||||
{
|
{
|
||||||
let (lhs, rhs) = match kind {
|
let (lhs, rhs) = match kind {
|
||||||
DependencyKind::Before => (id, NodeId::Set(key)),
|
DependencyKind::Before => (id, NodeId::Set(key)),
|
||||||
@ -1230,7 +1165,7 @@ impl ScheduleGraph {
|
|||||||
Ambiguity::IgnoreWithSet(ambiguous_with) => {
|
Ambiguity::IgnoreWithSet(ambiguous_with) => {
|
||||||
for key in ambiguous_with
|
for key in ambiguous_with
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|set| self.system_set_ids[&set])
|
.map(|set| self.system_sets.get_or_add_set(set))
|
||||||
{
|
{
|
||||||
self.ambiguous_with.add_edge(id, NodeId::Set(key));
|
self.ambiguous_with.add_edge(id, NodeId::Set(key));
|
||||||
}
|
}
|
||||||
@ -1239,8 +1174,6 @@ impl ScheduleGraph {
|
|||||||
self.ambiguous_with_all.insert(id);
|
self.ambiguous_with_all.insert(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Initializes any newly-added systems and conditions by calling [`System::initialize`](crate::system::System)
|
/// Initializes any newly-added systems and conditions by calling [`System::initialize`](crate::system::System)
|
||||||
@ -1258,7 +1191,7 @@ impl ScheduleGraph {
|
|||||||
key,
|
key,
|
||||||
first_uninit_condition,
|
first_uninit_condition,
|
||||||
} => {
|
} => {
|
||||||
for condition in self.system_set_conditions[key]
|
for condition in self.system_sets.conditions[key]
|
||||||
.iter_mut()
|
.iter_mut()
|
||||||
.skip(first_uninit_condition)
|
.skip(first_uninit_condition)
|
||||||
{
|
{
|
||||||
@ -1358,9 +1291,9 @@ impl ScheduleGraph {
|
|||||||
HashMap<SystemSetKey, HashSet<SystemKey>>,
|
HashMap<SystemSetKey, HashSet<SystemKey>>,
|
||||||
) {
|
) {
|
||||||
let mut set_systems: HashMap<SystemSetKey, Vec<SystemKey>> =
|
let mut set_systems: HashMap<SystemSetKey, Vec<SystemKey>> =
|
||||||
HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default());
|
HashMap::with_capacity_and_hasher(self.system_sets.sets.len(), Default::default());
|
||||||
let mut set_system_sets: HashMap<SystemSetKey, HashSet<SystemKey>> =
|
let mut set_system_sets: HashMap<SystemSetKey, HashSet<SystemKey>> =
|
||||||
HashMap::with_capacity_and_hasher(self.system_sets.len(), Default::default());
|
HashMap::with_capacity_and_hasher(self.system_sets.sets.len(), Default::default());
|
||||||
for &id in hierarchy_topsort.iter().rev() {
|
for &id in hierarchy_topsort.iter().rev() {
|
||||||
let NodeId::Set(set_key) = id else {
|
let NodeId::Set(set_key) = id else {
|
||||||
continue;
|
continue;
|
||||||
@ -1559,7 +1492,7 @@ impl ScheduleGraph {
|
|||||||
// ignore system sets that have no conditions
|
// ignore system sets that have no conditions
|
||||||
// ignore system type sets (already covered, they don't have conditions)
|
// ignore system type sets (already covered, they don't have conditions)
|
||||||
let key = id.as_set()?;
|
let key = id.as_set()?;
|
||||||
(!self.system_set_conditions[key].is_empty()).then_some((i, key))
|
(!self.system_sets.conditions[key].is_empty()).then_some((i, key))
|
||||||
})
|
})
|
||||||
.unzip();
|
.unzip();
|
||||||
|
|
||||||
@ -1659,7 +1592,7 @@ impl ScheduleGraph {
|
|||||||
.drain(..)
|
.drain(..)
|
||||||
.zip(schedule.set_conditions.drain(..))
|
.zip(schedule.set_conditions.drain(..))
|
||||||
{
|
{
|
||||||
self.system_set_conditions[key] = conditions;
|
self.system_sets.conditions[key] = conditions;
|
||||||
}
|
}
|
||||||
|
|
||||||
*schedule = self.build_schedule(world, schedule_label, ignored_ambiguities)?;
|
*schedule = self.build_schedule(world, schedule_label, ignored_ambiguities)?;
|
||||||
@ -1673,7 +1606,7 @@ impl ScheduleGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for &key in &schedule.set_ids {
|
for &key in &schedule.set_ids {
|
||||||
let conditions = core::mem::take(&mut self.system_set_conditions[key]);
|
let conditions = core::mem::take(&mut self.system_sets.conditions[key]);
|
||||||
schedule.set_conditions.push(conditions);
|
schedule.set_conditions.push(conditions);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1700,13 +1633,13 @@ trait ProcessScheduleConfig: Schedulable + Sized {
|
|||||||
|
|
||||||
impl ProcessScheduleConfig for ScheduleSystem {
|
impl ProcessScheduleConfig for ScheduleSystem {
|
||||||
fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId {
|
fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId {
|
||||||
schedule_graph.add_system_inner(config).unwrap()
|
NodeId::System(schedule_graph.add_system_inner(config))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProcessScheduleConfig for InternedSystemSet {
|
impl ProcessScheduleConfig for InternedSystemSet {
|
||||||
fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId {
|
fn process_config(schedule_graph: &mut ScheduleGraph, config: ScheduleConfig<Self>) -> NodeId {
|
||||||
schedule_graph.configure_set_inner(config).unwrap()
|
NodeId::Set(schedule_graph.configure_set_inner(config))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1748,7 +1681,7 @@ impl ScheduleGraph {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
NodeId::Set(key) => {
|
NodeId::Set(key) => {
|
||||||
let set = &self.system_sets[key];
|
let set = &self.system_sets.sets[key];
|
||||||
if set.is_anonymous() {
|
if set.is_anonymous() {
|
||||||
self.anonymous_set_name(id)
|
self.anonymous_set_name(id)
|
||||||
} else {
|
} else {
|
||||||
@ -1833,6 +1766,17 @@ impl ScheduleGraph {
|
|||||||
graph: &DiGraph,
|
graph: &DiGraph,
|
||||||
report: ReportCycles,
|
report: ReportCycles,
|
||||||
) -> Result<Vec<NodeId>, ScheduleBuildError> {
|
) -> Result<Vec<NodeId>, ScheduleBuildError> {
|
||||||
|
// Check explicitly for self-edges.
|
||||||
|
// `iter_sccs` won't report them as cycles because they still form components of one node.
|
||||||
|
if let Some((node, _)) = graph.all_edges().find(|(left, right)| left == right) {
|
||||||
|
let name = self.get_node_name(&node);
|
||||||
|
let error = match report {
|
||||||
|
ReportCycles::Hierarchy => ScheduleBuildError::HierarchyLoop(name),
|
||||||
|
ReportCycles::Dependency => ScheduleBuildError::DependencyLoop(name),
|
||||||
|
};
|
||||||
|
return Err(error);
|
||||||
|
}
|
||||||
|
|
||||||
// Tarjan's SCC algorithm returns elements in *reverse* topological order.
|
// Tarjan's SCC algorithm returns elements in *reverse* topological order.
|
||||||
let mut top_sorted_nodes = Vec::with_capacity(graph.node_count());
|
let mut top_sorted_nodes = Vec::with_capacity(graph.node_count());
|
||||||
let mut sccs_with_cycles = Vec::new();
|
let mut sccs_with_cycles = Vec::new();
|
||||||
@ -1963,7 +1907,7 @@ impl ScheduleGraph {
|
|||||||
set_systems: &HashMap<SystemSetKey, Vec<SystemKey>>,
|
set_systems: &HashMap<SystemSetKey, Vec<SystemKey>>,
|
||||||
) -> Result<(), ScheduleBuildError> {
|
) -> Result<(), ScheduleBuildError> {
|
||||||
for (&key, systems) in set_systems {
|
for (&key, systems) in set_systems {
|
||||||
let set = &self.system_sets[key];
|
let set = &self.system_sets.sets[key];
|
||||||
if set.is_system_type() {
|
if set.is_system_type() {
|
||||||
let instances = systems.len();
|
let instances = systems.len();
|
||||||
let ambiguous_with = self.ambiguous_with.edges(NodeId::Set(key));
|
let ambiguous_with = self.ambiguous_with.edges(NodeId::Set(key));
|
||||||
@ -2070,7 +2014,7 @@ impl ScheduleGraph {
|
|||||||
fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> {
|
fn names_of_sets_containing_node(&self, id: &NodeId) -> Vec<String> {
|
||||||
let mut sets = <HashSet<_>>::default();
|
let mut sets = <HashSet<_>>::default();
|
||||||
self.traverse_sets_containing_node(*id, &mut |key| {
|
self.traverse_sets_containing_node(*id, &mut |key| {
|
||||||
!self.system_sets[key].is_system_type() && sets.insert(key)
|
!self.system_sets.sets[key].is_system_type() && sets.insert(key)
|
||||||
});
|
});
|
||||||
let mut sets: Vec<_> = sets
|
let mut sets: Vec<_> = sets
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|||||||
@ -2,17 +2,13 @@
|
|||||||
use {
|
use {
|
||||||
super::{Measured2d, Triangle2d},
|
super::{Measured2d, Triangle2d},
|
||||||
alloc::{collections::BTreeMap, vec::Vec},
|
alloc::{collections::BTreeMap, vec::Vec},
|
||||||
|
core::cmp::Ordering,
|
||||||
};
|
};
|
||||||
|
|
||||||
use core::cmp::Ordering;
|
|
||||||
|
|
||||||
use crate::Vec2;
|
use crate::Vec2;
|
||||||
|
|
||||||
#[cfg_attr(
|
|
||||||
not(feature = "alloc"),
|
|
||||||
expect(dead_code, reason = "this type is only used with the alloc feature")
|
|
||||||
)]
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
enum Endpoint {
|
enum Endpoint {
|
||||||
Left,
|
Left,
|
||||||
Right,
|
Right,
|
||||||
@ -24,22 +20,16 @@ enum Endpoint {
|
|||||||
/// If `e1.position().x == e2.position().x` the events are ordered from bottom to top.
|
/// If `e1.position().x == e2.position().x` the events are ordered from bottom to top.
|
||||||
///
|
///
|
||||||
/// This is the order expected by the [`SweepLine`].
|
/// This is the order expected by the [`SweepLine`].
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
#[cfg_attr(
|
|
||||||
not(feature = "alloc"),
|
|
||||||
allow(dead_code, reason = "this type is only used with the alloc feature")
|
|
||||||
)]
|
|
||||||
struct SweepLineEvent {
|
struct SweepLineEvent {
|
||||||
segment: Segment,
|
segment: Segment,
|
||||||
/// Type of the vertex (left or right)
|
/// Type of the vertex (left or right)
|
||||||
endpoint: Endpoint,
|
endpoint: Endpoint,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl SweepLineEvent {
|
impl SweepLineEvent {
|
||||||
#[cfg_attr(
|
|
||||||
not(feature = "alloc"),
|
|
||||||
allow(dead_code, reason = "this type is only used with the alloc feature")
|
|
||||||
)]
|
|
||||||
fn position(&self) -> Vec2 {
|
fn position(&self) -> Vec2 {
|
||||||
match self.endpoint {
|
match self.endpoint {
|
||||||
Endpoint::Left => self.segment.left,
|
Endpoint::Left => self.segment.left,
|
||||||
@ -48,20 +38,24 @@ impl SweepLineEvent {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl PartialEq for SweepLineEvent {
|
impl PartialEq for SweepLineEvent {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.position() == other.position()
|
self.position() == other.position()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl Eq for SweepLineEvent {}
|
impl Eq for SweepLineEvent {}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl PartialOrd for SweepLineEvent {
|
impl PartialOrd for SweepLineEvent {
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
Some(self.cmp(other))
|
Some(self.cmp(other))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl Ord for SweepLineEvent {
|
impl Ord for SweepLineEvent {
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
xy_order(self.position(), other.position())
|
xy_order(self.position(), other.position())
|
||||||
@ -69,10 +63,7 @@ impl Ord for SweepLineEvent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Orders 2D points according to the order expected by the sweep line and event queue from -X to +X and then -Y to Y.
|
/// Orders 2D points according to the order expected by the sweep line and event queue from -X to +X and then -Y to Y.
|
||||||
#[cfg_attr(
|
#[cfg(feature = "alloc")]
|
||||||
not(feature = "alloc"),
|
|
||||||
allow(dead_code, reason = "this type is only used with the alloc feature")
|
|
||||||
)]
|
|
||||||
fn xy_order(a: Vec2, b: Vec2) -> Ordering {
|
fn xy_order(a: Vec2, b: Vec2) -> Ordering {
|
||||||
a.x.total_cmp(&b.x).then_with(|| a.y.total_cmp(&b.y))
|
a.x.total_cmp(&b.x).then_with(|| a.y.total_cmp(&b.y))
|
||||||
}
|
}
|
||||||
@ -129,26 +120,31 @@ impl EventQueue {
|
|||||||
/// Segments are ordered from bottom to top based on their left vertices if possible.
|
/// Segments are ordered from bottom to top based on their left vertices if possible.
|
||||||
/// If their y values are identical, the segments are ordered based on the y values of their right vertices.
|
/// If their y values are identical, the segments are ordered based on the y values of their right vertices.
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
struct Segment {
|
struct Segment {
|
||||||
edge_index: usize,
|
edge_index: usize,
|
||||||
left: Vec2,
|
left: Vec2,
|
||||||
right: Vec2,
|
right: Vec2,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl PartialEq for Segment {
|
impl PartialEq for Segment {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.edge_index == other.edge_index
|
self.edge_index == other.edge_index
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl Eq for Segment {}
|
impl Eq for Segment {}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl PartialOrd for Segment {
|
impl PartialOrd for Segment {
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
Some(self.cmp(other))
|
Some(self.cmp(other))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
impl Ord for Segment {
|
impl Ord for Segment {
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
self.left
|
self.left
|
||||||
@ -159,10 +155,7 @@ impl Ord for Segment {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Holds information about which segment is above and which is below a given [`Segment`]
|
/// Holds information about which segment is above and which is below a given [`Segment`]
|
||||||
#[cfg_attr(
|
#[cfg(feature = "alloc")]
|
||||||
not(feature = "alloc"),
|
|
||||||
expect(dead_code, reason = "this type is only used with the alloc feature")
|
|
||||||
)]
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
struct SegmentOrder {
|
struct SegmentOrder {
|
||||||
above: Option<usize>,
|
above: Option<usize>,
|
||||||
@ -173,8 +166,8 @@ struct SegmentOrder {
|
|||||||
///
|
///
|
||||||
/// It can be thought of as a vertical line sweeping from -X to +X across the polygon that keeps track of the order of the segments
|
/// It can be thought of as a vertical line sweeping from -X to +X across the polygon that keeps track of the order of the segments
|
||||||
/// the sweep line is intersecting at any given moment.
|
/// the sweep line is intersecting at any given moment.
|
||||||
#[cfg(feature = "alloc")]
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
#[cfg(feature = "alloc")]
|
||||||
struct SweepLine<'a> {
|
struct SweepLine<'a> {
|
||||||
vertices: &'a [Vec2],
|
vertices: &'a [Vec2],
|
||||||
tree: BTreeMap<Segment, SegmentOrder>,
|
tree: BTreeMap<Segment, SegmentOrder>,
|
||||||
|
|||||||
@ -746,11 +746,11 @@ fn early_sweep_material_instances<M>(
|
|||||||
/// preparation for a new frame.
|
/// preparation for a new frame.
|
||||||
pub(crate) fn late_sweep_material_instances(
|
pub(crate) fn late_sweep_material_instances(
|
||||||
mut material_instances: ResMut<RenderMaterialInstances>,
|
mut material_instances: ResMut<RenderMaterialInstances>,
|
||||||
mut removed_visibilities_query: Extract<RemovedComponents<ViewVisibility>>,
|
mut removed_meshes_query: Extract<RemovedComponents<Mesh3d>>,
|
||||||
) {
|
) {
|
||||||
let last_change_tick = material_instances.current_change_tick;
|
let last_change_tick = material_instances.current_change_tick;
|
||||||
|
|
||||||
for entity in removed_visibilities_query.read() {
|
for entity in removed_meshes_query.read() {
|
||||||
if let Entry::Occupied(occupied_entry) = material_instances.instances.entry(entity.into()) {
|
if let Entry::Occupied(occupied_entry) = material_instances.instances.entry(entity.into()) {
|
||||||
// Only sweep the entry if it wasn't updated this frame. It's
|
// Only sweep the entry if it wasn't updated this frame. It's
|
||||||
// possible that a `ViewVisibility` component was removed and
|
// possible that a `ViewVisibility` component was removed and
|
||||||
|
|||||||
@ -1452,8 +1452,6 @@ pub fn extract_meshes_for_gpu_building(
|
|||||||
>,
|
>,
|
||||||
>,
|
>,
|
||||||
all_meshes_query: Extract<Query<GpuMeshExtractionQuery>>,
|
all_meshes_query: Extract<Query<GpuMeshExtractionQuery>>,
|
||||||
mut removed_visibilities_query: Extract<RemovedComponents<ViewVisibility>>,
|
|
||||||
mut removed_global_transforms_query: Extract<RemovedComponents<GlobalTransform>>,
|
|
||||||
mut removed_meshes_query: Extract<RemovedComponents<Mesh3d>>,
|
mut removed_meshes_query: Extract<RemovedComponents<Mesh3d>>,
|
||||||
gpu_culling_query: Extract<Query<(), (With<Camera>, Without<NoIndirectDrawing>)>>,
|
gpu_culling_query: Extract<Query<(), (With<Camera>, Without<NoIndirectDrawing>)>>,
|
||||||
meshes_to_reextract_next_frame: ResMut<MeshesToReextractNextFrame>,
|
meshes_to_reextract_next_frame: ResMut<MeshesToReextractNextFrame>,
|
||||||
@ -1509,11 +1507,7 @@ pub fn extract_meshes_for_gpu_building(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Also record info about each mesh that became invisible.
|
// Also record info about each mesh that became invisible.
|
||||||
for entity in removed_visibilities_query
|
for entity in removed_meshes_query.read() {
|
||||||
.read()
|
|
||||||
.chain(removed_global_transforms_query.read())
|
|
||||||
.chain(removed_meshes_query.read())
|
|
||||||
{
|
|
||||||
// Only queue a mesh for removal if we didn't pick it up above.
|
// Only queue a mesh for removal if we didn't pick it up above.
|
||||||
// It's possible that a necessary component was removed and re-added in
|
// It's possible that a necessary component was removed and re-added in
|
||||||
// the same frame.
|
// the same frame.
|
||||||
|
|||||||
@ -62,6 +62,22 @@ fn fetch_point_shadow(light_id: u32, frag_position: vec4<f32>, surface_normal: v
|
|||||||
return sample_shadow_cubemap(frag_ls * flip_z, distance_to_light, depth, light_id);
|
return sample_shadow_cubemap(frag_ls * flip_z, distance_to_light, depth, light_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// this method of constructing a basis from a vec3 is used by glam::Vec3::any_orthonormal_pair
|
||||||
|
// so we reproduce it here to avoid a mismatch if glam changes. we also switch the handedness
|
||||||
|
// the construction of the orthonormal basis up and right vectors needs to precisely mirror the code
|
||||||
|
// in bevy_light/spot_light.rs:spot_light_world_from_view
|
||||||
|
fn spot_light_world_from_view(fwd: vec3<f32>) -> mat3x3<f32> {
|
||||||
|
var sign = -1.0;
|
||||||
|
if (fwd.z >= 0.0) {
|
||||||
|
sign = 1.0;
|
||||||
|
}
|
||||||
|
let a = -1.0 / (fwd.z + sign);
|
||||||
|
let b = fwd.x * fwd.y * a;
|
||||||
|
let up_dir = vec3<f32>(1.0 + sign * fwd.x * fwd.x * a, sign * b, -sign * fwd.x);
|
||||||
|
let right_dir = vec3<f32>(-b, -sign - fwd.y * fwd.y * a, fwd.y);
|
||||||
|
return mat3x3<f32>(right_dir, up_dir, fwd);
|
||||||
|
}
|
||||||
|
|
||||||
fn fetch_spot_shadow(
|
fn fetch_spot_shadow(
|
||||||
light_id: u32,
|
light_id: u32,
|
||||||
frag_position: vec4<f32>,
|
frag_position: vec4<f32>,
|
||||||
@ -88,17 +104,7 @@ fn fetch_spot_shadow(
|
|||||||
+ ((*light).shadow_depth_bias * normalize(surface_to_light))
|
+ ((*light).shadow_depth_bias * normalize(surface_to_light))
|
||||||
+ (surface_normal.xyz * (*light).shadow_normal_bias) * distance_to_light;
|
+ (surface_normal.xyz * (*light).shadow_normal_bias) * distance_to_light;
|
||||||
|
|
||||||
// the construction of the up and right vectors needs to precisely mirror the code
|
let light_inv_rot = spot_light_world_from_view(fwd);
|
||||||
// in render/light.rs:spot_light_view_matrix
|
|
||||||
var sign = -1.0;
|
|
||||||
if (fwd.z >= 0.0) {
|
|
||||||
sign = 1.0;
|
|
||||||
}
|
|
||||||
let a = -1.0 / (fwd.z + sign);
|
|
||||||
let b = fwd.x * fwd.y * a;
|
|
||||||
let up_dir = vec3<f32>(1.0 + sign * fwd.x * fwd.x * a, sign * b, -sign * fwd.x);
|
|
||||||
let right_dir = vec3<f32>(-b, -sign - fwd.y * fwd.y * a, fwd.y);
|
|
||||||
let light_inv_rot = mat3x3<f32>(right_dir, up_dir, fwd);
|
|
||||||
|
|
||||||
// because the matrix is a pure rotation matrix, the inverse is just the transpose, and to calculate
|
// because the matrix is a pure rotation matrix, the inverse is just the transpose, and to calculate
|
||||||
// the product of the transpose with a vector we can just post-multiply instead of pre-multiplying.
|
// the product of the transpose with a vector we can just post-multiply instead of pre-multiplying.
|
||||||
|
|||||||
@ -309,7 +309,6 @@ pub fn extract_skins(
|
|||||||
skinned_mesh_inverse_bindposes: Extract<Res<Assets<SkinnedMeshInverseBindposes>>>,
|
skinned_mesh_inverse_bindposes: Extract<Res<Assets<SkinnedMeshInverseBindposes>>>,
|
||||||
changed_transforms: Extract<Query<(Entity, &GlobalTransform), Changed<GlobalTransform>>>,
|
changed_transforms: Extract<Query<(Entity, &GlobalTransform), Changed<GlobalTransform>>>,
|
||||||
joints: Extract<Query<&GlobalTransform>>,
|
joints: Extract<Query<&GlobalTransform>>,
|
||||||
mut removed_visibilities_query: Extract<RemovedComponents<ViewVisibility>>,
|
|
||||||
mut removed_skinned_meshes_query: Extract<RemovedComponents<SkinnedMesh>>,
|
mut removed_skinned_meshes_query: Extract<RemovedComponents<SkinnedMesh>>,
|
||||||
) {
|
) {
|
||||||
let skin_uniforms = skin_uniforms.into_inner();
|
let skin_uniforms = skin_uniforms.into_inner();
|
||||||
@ -335,10 +334,7 @@ pub fn extract_skins(
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Delete skins that became invisible.
|
// Delete skins that became invisible.
|
||||||
for skinned_mesh_entity in removed_visibilities_query
|
for skinned_mesh_entity in removed_skinned_meshes_query.read() {
|
||||||
.read()
|
|
||||||
.chain(removed_skinned_meshes_query.read())
|
|
||||||
{
|
|
||||||
// Only remove a skin if we didn't pick it up in `add_or_delete_skins`.
|
// Only remove a skin if we didn't pick it up in `add_or_delete_skins`.
|
||||||
// It's possible that a necessary component was removed and re-added in
|
// It's possible that a necessary component was removed and re-added in
|
||||||
// the same frame.
|
// the same frame.
|
||||||
|
|||||||
@ -30,6 +30,7 @@ bevy_platform = { path = "../bevy_platform", version = "0.17.0-dev", default-fea
|
|||||||
"serialize",
|
"serialize",
|
||||||
] }
|
] }
|
||||||
bevy_asset = { path = "../bevy_asset", version = "0.17.0-dev", optional = true }
|
bevy_asset = { path = "../bevy_asset", version = "0.17.0-dev", optional = true }
|
||||||
|
bevy_log = { path = "../bevy_log", version = "0.17.0-dev" }
|
||||||
|
|
||||||
# other
|
# other
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
@ -38,7 +39,6 @@ serde = { version = "1", features = ["derive"] }
|
|||||||
serde_json = "1.0.140"
|
serde_json = "1.0.140"
|
||||||
http-body-util = "0.1"
|
http-body-util = "0.1"
|
||||||
async-channel = "2"
|
async-channel = "2"
|
||||||
bevy_log = { version = "0.17.0-dev", path = "../bevy_log" }
|
|
||||||
|
|
||||||
# dependencies that will not compile on wasm
|
# dependencies that will not compile on wasm
|
||||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||||
|
|||||||
@ -14,7 +14,7 @@ proc-macro = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
bevy_macro_utils = { path = "../../bevy_macro_utils", version = "0.17.0-dev" }
|
bevy_macro_utils = { path = "../../bevy_macro_utils", version = "0.17.0-dev" }
|
||||||
|
|
||||||
syn = "2.0"
|
syn = { version = "2.0", features = ["full"] }
|
||||||
proc-macro2 = "1.0"
|
proc-macro2 = "1.0"
|
||||||
quote = "1.0"
|
quote = "1.0"
|
||||||
|
|
||||||
|
|||||||
@ -331,7 +331,6 @@ pub fn extract_mesh_materials_2d<M: Material2d>(
|
|||||||
Or<(Changed<ViewVisibility>, Changed<MeshMaterial2d<M>>)>,
|
Or<(Changed<ViewVisibility>, Changed<MeshMaterial2d<M>>)>,
|
||||||
>,
|
>,
|
||||||
>,
|
>,
|
||||||
mut removed_visibilities_query: Extract<RemovedComponents<ViewVisibility>>,
|
|
||||||
mut removed_materials_query: Extract<RemovedComponents<MeshMaterial2d<M>>>,
|
mut removed_materials_query: Extract<RemovedComponents<MeshMaterial2d<M>>>,
|
||||||
) {
|
) {
|
||||||
for (entity, view_visibility, material) in &changed_meshes_query {
|
for (entity, view_visibility, material) in &changed_meshes_query {
|
||||||
@ -342,10 +341,7 @@ pub fn extract_mesh_materials_2d<M: Material2d>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for entity in removed_visibilities_query
|
for entity in removed_materials_query.read() {
|
||||||
.read()
|
|
||||||
.chain(removed_materials_query.read())
|
|
||||||
{
|
|
||||||
// Only queue a mesh for removal if we didn't pick it up above.
|
// Only queue a mesh for removal if we didn't pick it up above.
|
||||||
// It's possible that a necessary component was removed and re-added in
|
// It's possible that a necessary component was removed and re-added in
|
||||||
// the same frame.
|
// the same frame.
|
||||||
|
|||||||
@ -4,7 +4,7 @@ use bevy::{
|
|||||||
color::palettes::basic::*,
|
color::palettes::basic::*,
|
||||||
core_widgets::{
|
core_widgets::{
|
||||||
Callback, CoreButton, CoreCheckbox, CoreRadio, CoreRadioGroup, CoreSlider,
|
Callback, CoreButton, CoreCheckbox, CoreRadio, CoreRadioGroup, CoreSlider,
|
||||||
CoreSliderDragState, CoreSliderThumb, CoreWidgetsPlugin, SliderRange, SliderValue,
|
CoreSliderDragState, CoreSliderThumb, CoreWidgetsPlugins, SliderRange, SliderValue,
|
||||||
TrackClick,
|
TrackClick,
|
||||||
},
|
},
|
||||||
input_focus::{
|
input_focus::{
|
||||||
@ -21,7 +21,7 @@ fn main() {
|
|||||||
App::new()
|
App::new()
|
||||||
.add_plugins((
|
.add_plugins((
|
||||||
DefaultPlugins,
|
DefaultPlugins,
|
||||||
CoreWidgetsPlugin,
|
CoreWidgetsPlugins,
|
||||||
InputDispatchPlugin,
|
InputDispatchPlugin,
|
||||||
TabNavigationPlugin,
|
TabNavigationPlugin,
|
||||||
))
|
))
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
use bevy::{
|
use bevy::{
|
||||||
color::palettes::basic::*,
|
color::palettes::basic::*,
|
||||||
core_widgets::{
|
core_widgets::{
|
||||||
Callback, CoreButton, CoreCheckbox, CoreSlider, CoreSliderThumb, CoreWidgetsPlugin,
|
Callback, CoreButton, CoreCheckbox, CoreSlider, CoreSliderThumb, CoreWidgetsPlugins,
|
||||||
SliderRange, SliderValue,
|
SliderRange, SliderValue,
|
||||||
},
|
},
|
||||||
ecs::system::SystemId,
|
ecs::system::SystemId,
|
||||||
@ -21,7 +21,7 @@ fn main() {
|
|||||||
App::new()
|
App::new()
|
||||||
.add_plugins((
|
.add_plugins((
|
||||||
DefaultPlugins,
|
DefaultPlugins,
|
||||||
CoreWidgetsPlugin,
|
CoreWidgetsPlugins,
|
||||||
InputDispatchPlugin,
|
InputDispatchPlugin,
|
||||||
TabNavigationPlugin,
|
TabNavigationPlugin,
|
||||||
))
|
))
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
//! This example shows off the various Bevy Feathers widgets.
|
//! This example shows off the various Bevy Feathers widgets.
|
||||||
|
|
||||||
use bevy::{
|
use bevy::{
|
||||||
core_widgets::{Callback, CoreRadio, CoreRadioGroup, CoreWidgetsPlugin, SliderStep},
|
core_widgets::{Callback, CoreRadio, CoreRadioGroup, CoreWidgetsPlugins, SliderStep},
|
||||||
feathers::{
|
feathers::{
|
||||||
controls::{
|
controls::{
|
||||||
button, checkbox, radio, slider, toggle_switch, ButtonProps, ButtonVariant,
|
button, checkbox, radio, slider, toggle_switch, ButtonProps, ButtonVariant,
|
||||||
@ -25,7 +25,7 @@ fn main() {
|
|||||||
App::new()
|
App::new()
|
||||||
.add_plugins((
|
.add_plugins((
|
||||||
DefaultPlugins,
|
DefaultPlugins,
|
||||||
CoreWidgetsPlugin,
|
CoreWidgetsPlugins,
|
||||||
InputDispatchPlugin,
|
InputDispatchPlugin,
|
||||||
TabNavigationPlugin,
|
TabNavigationPlugin,
|
||||||
FeathersPlugin,
|
FeathersPlugin,
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
title: Headless Widgets
|
title: Headless Widgets
|
||||||
authors: ["@viridia", "@ickshonpe", "@alice-i-cecile"]
|
authors: ["@viridia", "@ickshonpe", "@alice-i-cecile"]
|
||||||
pull_requests: [19366, 19584, 19665, 19778, 19803]
|
pull_requests: [19366, 19584, 19665, 19778, 19803, 20036]
|
||||||
---
|
---
|
||||||
|
|
||||||
Bevy's `Button` and `Interaction` components have been around for a long time. Unfortunately
|
Bevy's `Button` and `Interaction` components have been around for a long time. Unfortunately
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user