Fix the clippy::explicit_iter_loop lint (#9834)
# Objective
Replace instances of
```rust
for x in collection.iter{_mut}() {
```
with
```rust
for x in &{mut} collection {
```
This also changes CI to no longer suppress this lint. Note that since
this lint only shows up when using clippy in pedantic mode, it was
probably unnecessary to suppress this lint in the first place.
This commit is contained in:
parent
07f61a1146
commit
d5d355ae1f
@ -243,7 +243,7 @@ impl App {
|
|||||||
let _bevy_main_update_span = info_span!("main app").entered();
|
let _bevy_main_update_span = info_span!("main app").entered();
|
||||||
self.world.run_schedule(&*self.main_schedule_label);
|
self.world.run_schedule(&*self.main_schedule_label);
|
||||||
}
|
}
|
||||||
for (_label, sub_app) in self.sub_apps.iter_mut() {
|
for (_label, sub_app) in &mut self.sub_apps {
|
||||||
#[cfg(feature = "trace")]
|
#[cfg(feature = "trace")]
|
||||||
let _sub_app_span = info_span!("sub app", name = ?_label).entered();
|
let _sub_app_span = info_span!("sub app", name = ?_label).entered();
|
||||||
sub_app.extract(&mut self.world);
|
sub_app.extract(&mut self.world);
|
||||||
|
|||||||
@ -43,7 +43,7 @@ fn derive_dependency_visitor_internal(
|
|||||||
) -> Result<proc_macro2::TokenStream, syn::Error> {
|
) -> Result<proc_macro2::TokenStream, syn::Error> {
|
||||||
let mut field_visitors = Vec::new();
|
let mut field_visitors = Vec::new();
|
||||||
if let Data::Struct(data_struct) = &ast.data {
|
if let Data::Struct(data_struct) = &ast.data {
|
||||||
for field in data_struct.fields.iter() {
|
for field in &data_struct.fields {
|
||||||
if field
|
if field
|
||||||
.attrs
|
.attrs
|
||||||
.iter()
|
.iter()
|
||||||
|
|||||||
@ -233,7 +233,7 @@ impl VisitAssetDependencies for Option<UntypedHandle> {
|
|||||||
|
|
||||||
impl<A: Asset> VisitAssetDependencies for Vec<Handle<A>> {
|
impl<A: Asset> VisitAssetDependencies for Vec<Handle<A>> {
|
||||||
fn visit_dependencies(&self, visit: &mut impl FnMut(UntypedAssetId)) {
|
fn visit_dependencies(&self, visit: &mut impl FnMut(UntypedAssetId)) {
|
||||||
for dependency in self.iter() {
|
for dependency in self {
|
||||||
visit(dependency.id().untyped());
|
visit(dependency.id().untyped());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -241,7 +241,7 @@ impl<A: Asset> VisitAssetDependencies for Vec<Handle<A>> {
|
|||||||
|
|
||||||
impl VisitAssetDependencies for Vec<UntypedHandle> {
|
impl VisitAssetDependencies for Vec<UntypedHandle> {
|
||||||
fn visit_dependencies(&self, visit: &mut impl FnMut(UntypedAssetId)) {
|
fn visit_dependencies(&self, visit: &mut impl FnMut(UntypedAssetId)) {
|
||||||
for dependency in self.iter() {
|
for dependency in self {
|
||||||
visit(dependency.id());
|
visit(dependency.id());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -241,7 +241,7 @@ impl<'a> ProcessContext<'a> {
|
|||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
for (path, full_hash) in loaded_asset.loader_dependencies.iter() {
|
for (path, full_hash) in &loaded_asset.loader_dependencies {
|
||||||
self.new_processed_info
|
self.new_processed_info
|
||||||
.process_dependencies
|
.process_dependencies
|
||||||
.push(ProcessDependencyInfo {
|
.push(ProcessDependencyInfo {
|
||||||
|
|||||||
@ -37,7 +37,7 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
let mut field_kind = Vec::with_capacity(named_fields.len());
|
let mut field_kind = Vec::with_capacity(named_fields.len());
|
||||||
|
|
||||||
for field in named_fields.iter() {
|
for field in named_fields {
|
||||||
for attr in field
|
for attr in field
|
||||||
.attrs
|
.attrs
|
||||||
.iter()
|
.iter()
|
||||||
|
|||||||
@ -95,7 +95,7 @@ impl Schedules {
|
|||||||
let _all_span = info_span!("check stored schedule ticks").entered();
|
let _all_span = info_span!("check stored schedule ticks").entered();
|
||||||
// label used when trace feature is enabled
|
// label used when trace feature is enabled
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
for (label, schedule) in self.inner.iter_mut() {
|
for (label, schedule) in &mut self.inner {
|
||||||
#[cfg(feature = "trace")]
|
#[cfg(feature = "trace")]
|
||||||
let name = format!("{label:?}");
|
let name = format!("{label:?}");
|
||||||
#[cfg(feature = "trace")]
|
#[cfg(feature = "trace")]
|
||||||
@ -106,7 +106,7 @@ impl Schedules {
|
|||||||
|
|
||||||
/// Applies the provided [`ScheduleBuildSettings`] to all schedules.
|
/// Applies the provided [`ScheduleBuildSettings`] to all schedules.
|
||||||
pub fn configure_schedules(&mut self, schedule_build_settings: ScheduleBuildSettings) {
|
pub fn configure_schedules(&mut self, schedule_build_settings: ScheduleBuildSettings) {
|
||||||
for (_, schedule) in self.inner.iter_mut() {
|
for (_, schedule) in &mut self.inner {
|
||||||
schedule.set_build_settings(schedule_build_settings.clone());
|
schedule.set_build_settings(schedule_build_settings.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -297,13 +297,13 @@ impl Schedule {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for conditions in &mut self.executable.system_conditions {
|
for conditions in &mut self.executable.system_conditions {
|
||||||
for system in conditions.iter_mut() {
|
for system in conditions {
|
||||||
system.check_change_tick(change_tick);
|
system.check_change_tick(change_tick);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for conditions in &mut self.executable.set_conditions {
|
for conditions in &mut self.executable.set_conditions {
|
||||||
for system in conditions.iter_mut() {
|
for system in conditions {
|
||||||
system.check_change_tick(change_tick);
|
system.check_change_tick(change_tick);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -973,7 +973,7 @@ impl ScheduleGraph {
|
|||||||
// have to do it like this to preserve transitivity
|
// have to do it like this to preserve transitivity
|
||||||
let mut dependency_flattened = self.dependency.graph.clone();
|
let mut dependency_flattened = self.dependency.graph.clone();
|
||||||
let mut temp = Vec::new();
|
let mut temp = Vec::new();
|
||||||
for (&set, systems) in set_systems.iter() {
|
for (&set, systems) in set_systems {
|
||||||
if systems.is_empty() {
|
if systems.is_empty() {
|
||||||
for a in dependency_flattened.neighbors_directed(set, Direction::Incoming) {
|
for a in dependency_flattened.neighbors_directed(set, Direction::Incoming) {
|
||||||
for b in dependency_flattened.neighbors_directed(set, Direction::Outgoing) {
|
for b in dependency_flattened.neighbors_directed(set, Direction::Outgoing) {
|
||||||
@ -1456,7 +1456,7 @@ impl ScheduleGraph {
|
|||||||
dep_results: &CheckGraphResults<NodeId>,
|
dep_results: &CheckGraphResults<NodeId>,
|
||||||
hier_results_connected: &HashSet<(NodeId, NodeId)>,
|
hier_results_connected: &HashSet<(NodeId, NodeId)>,
|
||||||
) -> Result<(), ScheduleBuildError> {
|
) -> Result<(), ScheduleBuildError> {
|
||||||
for &(a, b) in dep_results.connected.iter() {
|
for &(a, b) in &dep_results.connected {
|
||||||
if hier_results_connected.contains(&(a, b)) || hier_results_connected.contains(&(b, a))
|
if hier_results_connected.contains(&(a, b)) || hier_results_connected.contains(&(b, a))
|
||||||
{
|
{
|
||||||
let name_a = self.get_node_name(&a);
|
let name_a = self.get_node_name(&a);
|
||||||
@ -1474,7 +1474,7 @@ impl ScheduleGraph {
|
|||||||
set_system_bitsets: &HashMap<NodeId, FixedBitSet>,
|
set_system_bitsets: &HashMap<NodeId, FixedBitSet>,
|
||||||
) -> Result<(), ScheduleBuildError> {
|
) -> Result<(), ScheduleBuildError> {
|
||||||
// check that there is no ordering between system sets that intersect
|
// check that there is no ordering between system sets that intersect
|
||||||
for (a, b) in dep_results_connected.iter() {
|
for (a, b) in dep_results_connected {
|
||||||
if !(a.is_set() && b.is_set()) {
|
if !(a.is_set() && b.is_set()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -1497,7 +1497,7 @@ impl ScheduleGraph {
|
|||||||
&self,
|
&self,
|
||||||
set_systems: &HashMap<NodeId, Vec<NodeId>>,
|
set_systems: &HashMap<NodeId, Vec<NodeId>>,
|
||||||
) -> Result<(), ScheduleBuildError> {
|
) -> Result<(), ScheduleBuildError> {
|
||||||
for (&id, systems) in set_systems.iter() {
|
for (&id, systems) in set_systems {
|
||||||
let set = &self.system_sets[id.index()];
|
let set = &self.system_sets[id.index()];
|
||||||
if set.is_system_type() {
|
if set.is_system_type() {
|
||||||
let instances = systems.len();
|
let instances = systems.len();
|
||||||
|
|||||||
@ -422,7 +422,7 @@ pub fn update_directional_light_cascades(
|
|||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
for (transform, directional_light, cascades_config, mut cascades) in lights.iter_mut() {
|
for (transform, directional_light, cascades_config, mut cascades) in &mut lights {
|
||||||
if !directional_light.shadows_enabled {
|
if !directional_light.shadows_enabled {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -1882,7 +1882,7 @@ pub fn update_spot_light_frusta(
|
|||||||
Or<(Changed<GlobalTransform>, Changed<SpotLight>)>,
|
Or<(Changed<GlobalTransform>, Changed<SpotLight>)>,
|
||||||
>,
|
>,
|
||||||
) {
|
) {
|
||||||
for (entity, transform, spot_light, mut frustum) in views.iter_mut() {
|
for (entity, transform, spot_light, mut frustum) in &mut views {
|
||||||
// The frusta are used for culling meshes to the light for shadow mapping
|
// The frusta are used for culling meshes to the light for shadow mapping
|
||||||
// so if shadow mapping is disabled for this light, then the frusta are
|
// so if shadow mapping is disabled for this light, then the frusta are
|
||||||
// not needed.
|
// not needed.
|
||||||
@ -1969,7 +1969,7 @@ pub fn check_light_mesh_visibility(
|
|||||||
{
|
{
|
||||||
// Re-use already allocated entries where possible.
|
// Re-use already allocated entries where possible.
|
||||||
let mut views_to_remove = Vec::new();
|
let mut views_to_remove = Vec::new();
|
||||||
for (view, cascade_view_entities) in visible_entities.entities.iter_mut() {
|
for (view, cascade_view_entities) in &mut visible_entities.entities {
|
||||||
match frusta.frusta.get(view) {
|
match frusta.frusta.get(view) {
|
||||||
Some(view_frusta) => {
|
Some(view_frusta) => {
|
||||||
cascade_view_entities.resize(view_frusta.len(), Default::default());
|
cascade_view_entities.resize(view_frusta.len(), Default::default());
|
||||||
@ -1980,7 +1980,7 @@ pub fn check_light_mesh_visibility(
|
|||||||
None => views_to_remove.push(*view),
|
None => views_to_remove.push(*view),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
for (view, frusta) in frusta.frusta.iter() {
|
for (view, frusta) in &frusta.frusta {
|
||||||
visible_entities
|
visible_entities
|
||||||
.entities
|
.entities
|
||||||
.entry(*view)
|
.entry(*view)
|
||||||
@ -2017,7 +2017,7 @@ pub fn check_light_mesh_visibility(
|
|||||||
|
|
||||||
// If we have an aabb and transform, do frustum culling
|
// If we have an aabb and transform, do frustum culling
|
||||||
if let (Some(aabb), Some(transform)) = (maybe_aabb, maybe_transform) {
|
if let (Some(aabb), Some(transform)) = (maybe_aabb, maybe_transform) {
|
||||||
for (view, view_frusta) in frusta.frusta.iter() {
|
for (view, view_frusta) in &frusta.frusta {
|
||||||
let view_visible_entities = visible_entities
|
let view_visible_entities = visible_entities
|
||||||
.entities
|
.entities
|
||||||
.get_mut(view)
|
.get_mut(view)
|
||||||
@ -2050,7 +2050,7 @@ pub fn check_light_mesh_visibility(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (_, cascade_view_entities) in visible_entities.entities.iter_mut() {
|
for (_, cascade_view_entities) in &mut visible_entities.entities {
|
||||||
cascade_view_entities.iter_mut().for_each(shrink_entities);
|
cascade_view_entities.iter_mut().for_each(shrink_entities);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2153,7 +2153,7 @@ pub fn check_light_mesh_visibility(
|
|||||||
maybe_entity_mask,
|
maybe_entity_mask,
|
||||||
maybe_aabb,
|
maybe_aabb,
|
||||||
maybe_transform,
|
maybe_transform,
|
||||||
) in visible_entity_query.iter_mut()
|
) in &mut visible_entity_query
|
||||||
{
|
{
|
||||||
if !inherited_visibility.get() {
|
if !inherited_visibility.get() {
|
||||||
continue;
|
continue;
|
||||||
|
|||||||
@ -348,7 +348,7 @@ fn generate_impls(reflect_enum: &ReflectEnum, ref_index: &Ident, ref_name: &Iden
|
|||||||
) -> Vec<proc_macro2::TokenStream> {
|
) -> Vec<proc_macro2::TokenStream> {
|
||||||
let mut constructor_argument = Vec::new();
|
let mut constructor_argument = Vec::new();
|
||||||
let mut reflect_idx = 0;
|
let mut reflect_idx = 0;
|
||||||
for field in fields.iter() {
|
for field in fields {
|
||||||
if field.attrs.ignore.is_ignored() {
|
if field.attrs.ignore.is_ignored() {
|
||||||
// Ignored field
|
// Ignored field
|
||||||
continue;
|
continue;
|
||||||
|
|||||||
@ -116,7 +116,7 @@ pub fn derive_as_bind_group(ast: syn::DeriveInput) -> Result<TokenStream> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Read field-level attributes
|
// Read field-level attributes
|
||||||
for field in fields.iter() {
|
for field in fields {
|
||||||
// Search ahead for texture attributes so we can use them with any
|
// Search ahead for texture attributes so we can use them with any
|
||||||
// corresponding sampler attribute.
|
// corresponding sampler attribute.
|
||||||
let mut tex_attrs = None;
|
let mut tex_attrs = None;
|
||||||
|
|||||||
@ -150,7 +150,7 @@ impl RenderGraph {
|
|||||||
if let Some(node_state) = self.nodes.remove(&id) {
|
if let Some(node_state) = self.nodes.remove(&id) {
|
||||||
// Remove all edges from other nodes to this one. Note that as we're removing this
|
// Remove all edges from other nodes to this one. Note that as we're removing this
|
||||||
// node, we don't need to remove its input edges
|
// node, we don't need to remove its input edges
|
||||||
for input_edge in node_state.edges.input_edges().iter() {
|
for input_edge in node_state.edges.input_edges() {
|
||||||
match input_edge {
|
match input_edge {
|
||||||
Edge::SlotEdge { output_node, .. }
|
Edge::SlotEdge { output_node, .. }
|
||||||
| Edge::NodeEdge {
|
| Edge::NodeEdge {
|
||||||
@ -165,7 +165,7 @@ impl RenderGraph {
|
|||||||
}
|
}
|
||||||
// Remove all edges from this node to other nodes. Note that as we're removing this
|
// Remove all edges from this node to other nodes. Note that as we're removing this
|
||||||
// node, we don't need to remove its output edges
|
// node, we don't need to remove its output edges
|
||||||
for output_edge in node_state.edges.output_edges().iter() {
|
for output_edge in node_state.edges.output_edges() {
|
||||||
match output_edge {
|
match output_edge {
|
||||||
Edge::SlotEdge {
|
Edge::SlotEdge {
|
||||||
output_node: _,
|
output_node: _,
|
||||||
|
|||||||
@ -271,7 +271,7 @@ impl AssetLoader for ShaderLoader {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// collect file dependencies
|
// collect file dependencies
|
||||||
for import in shader.imports.iter() {
|
for import in &shader.imports {
|
||||||
if let ShaderImport::AssetPath(asset_path) = import {
|
if let ShaderImport::AssetPath(asset_path) = import {
|
||||||
// TODO: should we just allow this handle to be dropped?
|
// TODO: should we just allow this handle to be dropped?
|
||||||
let _handle: Handle<Shader> = load_context.load(asset_path);
|
let _handle: Handle<Shader> = load_context.load(asset_path);
|
||||||
|
|||||||
@ -475,7 +475,7 @@ pub fn check_visibility(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
for cell in thread_queues.iter_mut() {
|
for cell in &mut thread_queues {
|
||||||
visible_entities.entities.append(cell.get_mut());
|
visible_entities.entities.append(cell.get_mut());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -225,7 +225,7 @@ impl TextureAtlasBuilder {
|
|||||||
|
|
||||||
let mut texture_rects = Vec::with_capacity(rect_placements.packed_locations().len());
|
let mut texture_rects = Vec::with_capacity(rect_placements.packed_locations().len());
|
||||||
let mut texture_ids = HashMap::default();
|
let mut texture_ids = HashMap::default();
|
||||||
for (image_id, (_, packed_location)) in rect_placements.packed_locations().iter() {
|
for (image_id, (_, packed_location)) in rect_placements.packed_locations() {
|
||||||
let texture = textures.get(*image_id).unwrap();
|
let texture = textures.get(*image_id).unwrap();
|
||||||
let min = Vec2::new(packed_location.x() as f32, packed_location.y() as f32);
|
let min = Vec2::new(packed_location.x() as f32, packed_location.y() as f32);
|
||||||
let max = min
|
let max = min
|
||||||
|
|||||||
@ -21,7 +21,7 @@ use bevy_transform::prelude::GlobalTransform;
|
|||||||
|
|
||||||
fn calc_name(texts: &Query<&Text>, children: &Children) -> Option<Box<str>> {
|
fn calc_name(texts: &Query<&Text>, children: &Children) -> Option<Box<str>> {
|
||||||
let mut name = None;
|
let mut name = None;
|
||||||
for child in children.iter() {
|
for child in children {
|
||||||
if let Ok(text) = texts.get(*child) {
|
if let Ok(text) = texts.get(*child) {
|
||||||
let values = text
|
let values = text
|
||||||
.sections
|
.sections
|
||||||
|
|||||||
@ -155,7 +155,7 @@ pub fn ui_focus_system(
|
|||||||
let mouse_released =
|
let mouse_released =
|
||||||
mouse_button_input.just_released(MouseButton::Left) || touches_input.any_just_released();
|
mouse_button_input.just_released(MouseButton::Left) || touches_input.any_just_released();
|
||||||
if mouse_released {
|
if mouse_released {
|
||||||
for node in node_query.iter_mut() {
|
for node in &mut node_query {
|
||||||
if let Some(mut interaction) = node.interaction {
|
if let Some(mut interaction) = node.interaction {
|
||||||
if *interaction == Interaction::Pressed {
|
if *interaction == Interaction::Pressed {
|
||||||
*interaction = Interaction::None;
|
*interaction = Interaction::None;
|
||||||
|
|||||||
@ -12,7 +12,7 @@ pub fn print_ui_layout_tree(ui_surface: &UiSurface) {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|(entity, node)| (*node, *entity))
|
.map(|(entity, node)| (*node, *entity))
|
||||||
.collect();
|
.collect();
|
||||||
for (&entity, &node) in ui_surface.window_nodes.iter() {
|
for (&entity, &node) in &ui_surface.window_nodes {
|
||||||
let mut out = String::new();
|
let mut out = String::new();
|
||||||
print_node(
|
print_node(
|
||||||
ui_surface,
|
ui_surface,
|
||||||
|
|||||||
@ -279,7 +279,7 @@ pub fn ui_layout_system(
|
|||||||
ui_surface.try_remove_measure(entity);
|
ui_surface.try_remove_measure(entity);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (entity, mut content_size) in measure_query.iter_mut() {
|
for (entity, mut content_size) in &mut measure_query {
|
||||||
if let Some(measure_func) = content_size.measure_func.take() {
|
if let Some(measure_func) = content_size.measure_func.take() {
|
||||||
ui_surface.update_measure(entity, measure_func);
|
ui_surface.update_measure(entity, measure_func);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -127,7 +127,7 @@ pub fn measure_text_system(
|
|||||||
#[allow(clippy::float_cmp)]
|
#[allow(clippy::float_cmp)]
|
||||||
if *last_scale_factor == scale_factor {
|
if *last_scale_factor == scale_factor {
|
||||||
// scale factor unchanged, only create new measure funcs for modified text
|
// scale factor unchanged, only create new measure funcs for modified text
|
||||||
for (text, content_size, text_flags) in text_query.iter_mut() {
|
for (text, content_size, text_flags) in &mut text_query {
|
||||||
if text.is_changed() || text_flags.needs_new_measure_func || content_size.is_added() {
|
if text.is_changed() || text_flags.needs_new_measure_func || content_size.is_added() {
|
||||||
create_text_measure(&fonts, scale_factor, text, content_size, text_flags);
|
create_text_measure(&fonts, scale_factor, text, content_size, text_flags);
|
||||||
}
|
}
|
||||||
@ -136,7 +136,7 @@ pub fn measure_text_system(
|
|||||||
// scale factor changed, create new measure funcs for all text
|
// scale factor changed, create new measure funcs for all text
|
||||||
*last_scale_factor = scale_factor;
|
*last_scale_factor = scale_factor;
|
||||||
|
|
||||||
for (text, content_size, text_flags) in text_query.iter_mut() {
|
for (text, content_size, text_flags) in &mut text_query {
|
||||||
create_text_measure(&fonts, scale_factor, text, content_size, text_flags);
|
create_text_measure(&fonts, scale_factor, text, content_size, text_flags);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -232,7 +232,7 @@ pub fn text_system(
|
|||||||
let inverse_scale_factor = scale_factor.recip();
|
let inverse_scale_factor = scale_factor.recip();
|
||||||
if *last_scale_factor == scale_factor {
|
if *last_scale_factor == scale_factor {
|
||||||
// Scale factor unchanged, only recompute text for modified text nodes
|
// Scale factor unchanged, only recompute text for modified text nodes
|
||||||
for (node, text, text_layout_info, text_flags) in text_query.iter_mut() {
|
for (node, text, text_layout_info, text_flags) in &mut text_query {
|
||||||
if node.is_changed() || text_flags.needs_recompute {
|
if node.is_changed() || text_flags.needs_recompute {
|
||||||
queue_text(
|
queue_text(
|
||||||
&fonts,
|
&fonts,
|
||||||
@ -255,7 +255,7 @@ pub fn text_system(
|
|||||||
// Scale factor changed, recompute text for all text nodes
|
// Scale factor changed, recompute text for all text nodes
|
||||||
*last_scale_factor = scale_factor;
|
*last_scale_factor = scale_factor;
|
||||||
|
|
||||||
for (node, text, text_layout_info, text_flags) in text_query.iter_mut() {
|
for (node, text, text_layout_info, text_flags) in &mut text_query {
|
||||||
queue_text(
|
queue_text(
|
||||||
&fonts,
|
&fonts,
|
||||||
&mut text_pipeline,
|
&mut text_pipeline,
|
||||||
|
|||||||
@ -131,7 +131,7 @@ fn update_accessibility_nodes(
|
|||||||
root_children.push(entity.to_node_id());
|
root_children.push(entity.to_node_id());
|
||||||
}
|
}
|
||||||
if let Some(children) = children {
|
if let Some(children) = children {
|
||||||
for child in children.iter() {
|
for child in children {
|
||||||
if node_entities.get(*child).is_ok() {
|
if node_entities.get(*child).is_ok() {
|
||||||
node.push_child(child.to_node_id());
|
node.push_child(child.to_node_id());
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,9 +17,8 @@ bitflags! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const CLIPPY_FLAGS: [&str; 8] = [
|
const CLIPPY_FLAGS: [&str; 7] = [
|
||||||
"-Aclippy::type_complexity",
|
"-Aclippy::type_complexity",
|
||||||
"-Aclippy::explicit_iter_loop",
|
|
||||||
"-Wclippy::doc_markdown",
|
"-Wclippy::doc_markdown",
|
||||||
"-Wclippy::redundant_else",
|
"-Wclippy::redundant_else",
|
||||||
"-Wclippy::match_same_arms",
|
"-Wclippy::match_same_arms",
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user