Improved UI camera mapping (#17244)

# Objective

Two more optimisations for UI extraction:
* We only need to query for the camera's render entity when the target
camera changes. If the target camera is the same as for the previous UI
node we can use the previous render entity.
* The cheap checks for visibility and zero size should be performed
first before the camera queries.

## Solution
Add a new system param `UiCameraMap` that resolves the correct render
camera entity and only queries when necessary.

<img width="506" alt="tracee"
src="https://github.com/user-attachments/assets/f57d1e0d-f3a7-49ee-8287-4f01ffc8ba24"
/>

I don't like the `UiCameraMap` + `UiCameraMapper` implementation very
much, maybe someone else can suggest a better construction.

This is partly motivated by #16942 which adds further indirection and
these changes would ameliorate that performance regression.
This commit is contained in:
ickshonpe 2025-01-28 18:05:00 +00:00 committed by GitHub
parent a80263a5bf
commit 5bbcf646a7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 111 additions and 106 deletions

View File

@ -3,7 +3,7 @@
use core::{hash::Hash, ops::Range};
use crate::{
BoxShadow, BoxShadowSamples, CalculatedClip, ComputedNode, DefaultUiCamera, RenderUiSystem,
BoxShadow, BoxShadowSamples, CalculatedClip, ComputedNode, RenderUiSystem,
ResolvedBorderRadius, TransparentUi, UiTargetCamera, Val,
};
use bevy_app::prelude::*;
@ -27,14 +27,14 @@ use bevy_render::{
render_phase::*,
render_resource::{binding_types::uniform_buffer, *},
renderer::{RenderDevice, RenderQueue},
sync_world::{RenderEntity, TemporaryRenderEntity},
sync_world::TemporaryRenderEntity,
view::*,
Extract, ExtractSchedule, Render, RenderSet,
};
use bevy_transform::prelude::GlobalTransform;
use bytemuck::{Pod, Zeroable};
use super::{stack_z_offsets, UiCameraView, QUAD_INDICES, QUAD_VERTEX_POSITIONS};
use super::{stack_z_offsets, UiCameraMap, UiCameraView, QUAD_INDICES, QUAD_VERTEX_POSITIONS};
pub const BOX_SHADOW_SHADER_HANDLE: Handle<Shader> = Handle::weak_from_u128(17717747047134343426);
@ -236,7 +236,6 @@ pub struct ExtractedBoxShadows {
pub fn extract_shadows(
mut commands: Commands,
mut extracted_box_shadows: ResMut<ExtractedBoxShadows>,
default_ui_camera: Extract<DefaultUiCamera>,
camera_query: Extract<Query<(Entity, &Camera)>>,
box_shadow_query: Extract<
Query<(
@ -249,27 +248,22 @@ pub fn extract_shadows(
Option<&UiTargetCamera>,
)>,
>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
let default_camera_entity = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (entity, uinode, transform, visibility, box_shadow, clip, camera) in &box_shadow_query {
let Some(camera_entity) = camera.map(UiTargetCamera::entity).or(default_camera_entity)
else {
continue;
};
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
continue;
};
// Skip if no visible shadows
if !visibility.get() || box_shadow.is_empty() || uinode.is_empty() {
continue;
}
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
continue;
};
let ui_physical_viewport_size = camera_query
.get(extracted_camera_entity)
.get(camera_mapper.current_camera())
.ok()
.and_then(|(_, c)| {
c.physical_viewport_size()
@ -392,6 +386,10 @@ pub fn queue_shadows(
}
}
#[expect(
clippy::too_many_arguments,
reason = "Could be rewritten with less arguments using a QueryData-implementing struct, but doesn't need to be."
)]
pub fn prepare_shadows(
mut commands: Commands,
render_device: Res<RenderDevice>,

View File

@ -1,6 +1,5 @@
use crate::CalculatedClip;
use crate::ComputedNode;
use crate::DefaultUiCamera;
use crate::UiTargetCamera;
use bevy_asset::AssetId;
use bevy_color::Hsla;
@ -12,7 +11,6 @@ use bevy_ecs::system::Res;
use bevy_ecs::system::ResMut;
use bevy_math::Rect;
use bevy_math::Vec2;
use bevy_render::sync_world::RenderEntity;
use bevy_render::sync_world::TemporaryRenderEntity;
use bevy_render::view::InheritedVisibility;
use bevy_render::Extract;
@ -23,6 +21,7 @@ use super::ExtractedUiItem;
use super::ExtractedUiNode;
use super::ExtractedUiNodes;
use super::NodeType;
use super::UiCameraMap;
/// Configuration for the UI debug overlay
#[derive(Resource)]
@ -58,7 +57,6 @@ pub fn extract_debug_overlay(
mut commands: Commands,
debug_options: Extract<Res<UiDebugOptions>>,
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
default_ui_camera: Extract<DefaultUiCamera>,
uinode_query: Extract<
Query<(
Entity,
@ -69,25 +67,20 @@ pub fn extract_debug_overlay(
Option<&UiTargetCamera>,
)>,
>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
if !debug_options.enabled {
return;
}
let default_camera_entity = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (entity, uinode, visibility, maybe_clip, transform, camera) in &uinode_query {
if !debug_options.show_hidden && !visibility.get() {
continue;
}
let Some(camera_entity) = camera.map(UiTargetCamera::entity).or(default_camera_entity)
else {
continue;
};
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
continue;
};

View File

@ -20,6 +20,7 @@ use bevy_core_pipeline::core_3d::graph::{Core3d, Node3d};
use bevy_core_pipeline::{core_2d::Camera2d, core_3d::Camera3d};
use bevy_ecs::entity::hash_map::EntityHashMap;
use bevy_ecs::prelude::*;
use bevy_ecs::system::SystemParam;
use bevy_image::prelude::*;
use bevy_math::{FloatOrd, Mat4, Rect, UVec4, Vec2, Vec3, Vec3Swizzles, Vec4Swizzles};
use bevy_render::render_graph::{NodeRunError, RenderGraphContext};
@ -251,6 +252,54 @@ impl ExtractedUiNodes {
}
}
#[derive(SystemParam)]
pub struct UiCameraMap<'w, 's> {
default: DefaultUiCamera<'w, 's>,
mapping: Query<'w, 's, RenderEntity>,
}
impl<'w, 's> UiCameraMap<'w, 's> {
/// Get the default camera and create the mapper
pub fn get_mapper(&'w self) -> UiCameraMapper<'w, 's> {
let default_camera_entity = self.default.get();
UiCameraMapper {
mapping: &self.mapping,
default_camera_entity,
camera_entity: Entity::PLACEHOLDER,
render_entity: Entity::PLACEHOLDER,
}
}
}
pub struct UiCameraMapper<'w, 's> {
mapping: &'w Query<'w, 's, RenderEntity>,
default_camera_entity: Option<Entity>,
camera_entity: Entity,
render_entity: Entity,
}
impl<'w, 's> UiCameraMapper<'w, 's> {
/// Returns the render entity corresponding to the given `UiTargetCamera` or the default camera if `None`.
pub fn map(&mut self, camera: Option<&UiTargetCamera>) -> Option<Entity> {
let camera_entity = camera
.map(UiTargetCamera::entity)
.or(self.default_camera_entity)?;
if self.camera_entity != camera_entity {
let Ok(new_render_camera_entity) = self.mapping.get(camera_entity) else {
return None;
};
self.render_entity = new_render_camera_entity;
self.camera_entity = camera_entity;
}
Some(self.render_entity)
}
pub fn current_camera(&self) -> Entity {
self.camera_entity
}
}
/// A [`RenderGraphNode`] that executes the UI rendering subgraph on the UI
/// view.
struct RunUiSubgraphOnUiViewNode;
@ -279,7 +328,6 @@ impl RenderGraphNode for RunUiSubgraphOnUiViewNode {
pub fn extract_uinode_background_colors(
mut commands: Commands,
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
default_ui_camera: Extract<DefaultUiCamera>,
uinode_query: Extract<
Query<(
Entity,
@ -291,21 +339,13 @@ pub fn extract_uinode_background_colors(
&BackgroundColor,
)>,
>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
let default_camera_entity = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (entity, uinode, transform, inherited_visibility, clip, camera, background_color) in
&uinode_query
{
let Some(camera_entity) = camera.map(UiTargetCamera::entity).or(default_camera_entity)
else {
continue;
};
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
continue;
};
// Skip invisible backgrounds
if !inherited_visibility.get()
|| background_color.0.is_fully_transparent()
@ -314,6 +354,10 @@ pub fn extract_uinode_background_colors(
continue;
}
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
continue;
};
extracted_uinodes.uinodes.insert(
commands.spawn(TemporaryRenderEntity).id(),
ExtractedUiNode {
@ -345,7 +389,6 @@ pub fn extract_uinode_images(
mut commands: Commands,
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
default_ui_camera: Extract<DefaultUiCamera>,
uinode_query: Extract<
Query<(
Entity,
@ -357,19 +400,10 @@ pub fn extract_uinode_images(
&ImageNode,
)>,
>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
let default_camera_entity = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (entity, uinode, transform, inherited_visibility, clip, camera, image) in &uinode_query {
let Some(camera_entity) = camera.map(UiTargetCamera::entity).or(default_camera_entity)
else {
continue;
};
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
continue;
};
// Skip invisible images
if !inherited_visibility.get()
|| image.color.is_fully_transparent()
@ -380,6 +414,10 @@ pub fn extract_uinode_images(
continue;
}
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
continue;
};
let atlas_rect = image
.texture_atlas
.as_ref()
@ -436,7 +474,6 @@ pub fn extract_uinode_images(
pub fn extract_uinode_borders(
mut commands: Commands,
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
default_ui_camera: Extract<DefaultUiCamera>,
uinode_query: Extract<
Query<(
Entity,
@ -449,10 +486,11 @@ pub fn extract_uinode_borders(
AnyOf<(&BorderColor, &Outline)>,
)>,
>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
let image = AssetId::<Image>::default();
let default_camera_entity = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (
entity,
node,
@ -464,22 +502,15 @@ pub fn extract_uinode_borders(
(maybe_border_color, maybe_outline),
) in &uinode_query
{
let Some(camera_entity) = maybe_camera
.map(UiTargetCamera::entity)
.or(default_camera_entity)
else {
continue;
};
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
continue;
};
// Skip invisible borders and removed nodes
if !inherited_visibility.get() || node.display == Display::None {
continue;
}
let Some(extracted_camera_entity) = camera_mapper.map(maybe_camera) else {
continue;
};
// Don't extract borders with zero width along all edges
if computed_node.border() != BorderRect::ZERO {
if let Some(border_color) = maybe_border_color.filter(|bc| !bc.0.is_fully_transparent())
@ -675,7 +706,6 @@ pub fn extract_ui_camera_view(
pub fn extract_text_sections(
mut commands: Commands,
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
default_ui_camera: Extract<DefaultUiCamera>,
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
uinode_query: Extract<
Query<(
@ -690,12 +720,12 @@ pub fn extract_text_sections(
)>,
>,
text_styles: Extract<Query<&TextColor>>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
let mut start = 0;
let mut end = 1;
let default_ui_camera = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (
entity,
uinode,
@ -707,16 +737,12 @@ pub fn extract_text_sections(
text_layout_info,
) in &uinode_query
{
let Some(camera_entity) = camera.map(UiTargetCamera::entity).or(default_ui_camera) else {
continue;
};
// Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
if !inherited_visibility.get() || uinode.is_empty() {
continue;
}
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
continue;
};

View File

@ -21,7 +21,6 @@ use bevy_render::{
render_phase::*,
render_resource::{binding_types::uniform_buffer, *},
renderer::{RenderDevice, RenderQueue},
sync_world::RenderEntity,
view::*,
Extract, ExtractSchedule, Render, RenderSet,
};
@ -367,7 +366,6 @@ pub fn extract_ui_material_nodes<M: UiMaterial>(
mut commands: Commands,
mut extracted_uinodes: ResMut<ExtractedUiMaterialNodes<M>>,
materials: Extract<Res<Assets<M>>>,
default_ui_camera: Extract<DefaultUiCamera>,
uinode_query: Extract<
Query<(
Entity,
@ -379,23 +377,13 @@ pub fn extract_ui_material_nodes<M: UiMaterial>(
Option<&UiTargetCamera>,
)>,
>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
// If there is only one camera, we use it as default
let default_single_camera = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (entity, computed_node, transform, handle, inherited_visibility, clip, camera) in
uinode_query.iter()
{
let Some(camera_entity) = camera.map(UiTargetCamera::entity).or(default_single_camera)
else {
continue;
};
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
continue;
};
// skip invisible nodes
if !inherited_visibility.get() || computed_node.is_empty() {
continue;
@ -406,6 +394,10 @@ pub fn extract_ui_material_nodes<M: UiMaterial>(
continue;
}
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
continue;
};
extracted_uinodes.uinodes.insert(
commands.spawn(TemporaryRenderEntity).id(),
ExtractedUiMaterialNode {

View File

@ -20,7 +20,7 @@ use bevy_render::{
render_phase::*,
render_resource::{binding_types::uniform_buffer, *},
renderer::{RenderDevice, RenderQueue},
sync_world::{RenderEntity, TemporaryRenderEntity},
sync_world::TemporaryRenderEntity,
texture::{GpuImage, TRANSPARENT_IMAGE_HANDLE},
view::*,
Extract, ExtractSchedule, Render, RenderSet,
@ -247,7 +247,6 @@ pub struct ExtractedUiTextureSlices {
pub fn extract_ui_texture_slices(
mut commands: Commands,
mut extracted_ui_slicers: ResMut<ExtractedUiTextureSlices>,
default_ui_camera: Extract<DefaultUiCamera>,
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
slicers_query: Extract<
Query<(
@ -260,19 +259,18 @@ pub fn extract_ui_texture_slices(
&ImageNode,
)>,
>,
mapping: Extract<Query<RenderEntity>>,
camera_map: Extract<UiCameraMap>,
) {
let default_camera_entity = default_ui_camera.get();
let mut camera_mapper = camera_map.get_mapper();
for (entity, uinode, transform, inherited_visibility, clip, camera, image) in &slicers_query {
let Some(camera_entity) = camera.map(UiTargetCamera::entity).or(default_camera_entity)
else {
// Skip invisible images
if !inherited_visibility.get()
|| image.color.is_fully_transparent()
|| image.image.id() == TRANSPARENT_IMAGE_HANDLE.id()
{
continue;
};
let Ok(extracted_camera_entity) = mapping.get(camera_entity) else {
continue;
};
}
let image_scale_mode = match image.image_mode.clone() {
widget::NodeImageMode::Sliced(texture_slicer) => {
@ -290,13 +288,9 @@ pub fn extract_ui_texture_slices(
_ => continue,
};
// Skip invisible images
if !inherited_visibility.get()
|| image.color.is_fully_transparent()
|| image.image.id() == TRANSPARENT_IMAGE_HANDLE.id()
{
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
continue;
}
};
let atlas_rect = image
.texture_atlas

View File

@ -245,7 +245,7 @@ pub fn measure_text_system(
mut scale_factors_buffer: Local<EntityHashMap<f32>>,
mut last_scale_factors: Local<EntityHashMap<f32>>,
fonts: Res<Assets<Font>>,
camera_query: Query<(Entity, &Camera)>,
camera_query: Query<&Camera>,
default_ui_camera: DefaultUiCamera,
ui_scale: Res<UiScale>,
mut text_query: Query<
@ -274,17 +274,19 @@ pub fn measure_text_system(
else {
continue;
};
let scale_factor = match scale_factors_buffer.entry(camera_entity) {
Entry::Occupied(entry) => *entry.get(),
Entry::Vacant(entry) => *entry.insert(
camera_query
.get(camera_entity)
.ok()
.and_then(|(_, c)| c.target_scaling_factor())
.and_then(Camera::target_scaling_factor)
.unwrap_or(1.0)
* ui_scale.0,
),
};
// Note: the ComputedTextBlock::needs_rerender bool is cleared in create_text_measure().
if last_scale_factors.get(&camera_entity) != Some(&scale_factor)
|| computed.needs_rerender()