bevy_ui_render/
lib.rs

1#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
2#![cfg_attr(docsrs, feature(doc_cfg))]
3#![doc(
4    html_logo_url = "https://bevyengine.org/assets/icon.png",
5    html_favicon_url = "https://bevyengine.org/assets/icon.png"
6)]
7
8//! Provides rendering functionality for `bevy_ui`.
9
10pub mod box_shadow;
11mod color_space;
12mod gradient;
13mod pipeline;
14mod render_pass;
15pub mod ui_material;
16mod ui_material_pipeline;
17pub mod ui_texture_slice_pipeline;
18
19#[cfg(feature = "bevy_ui_debug")]
20mod debug_overlay;
21
22use bevy_camera::visibility::InheritedVisibility;
23use bevy_camera::{Camera, Camera2d, Camera3d, RenderTarget};
24use bevy_reflect::prelude::ReflectDefault;
25use bevy_reflect::Reflect;
26use bevy_shader::load_shader_library;
27use bevy_sprite_render::SpriteAssetEvents;
28use bevy_ui::widget::{ImageNode, TextShadow, ViewportNode};
29use bevy_ui::{
30    BackgroundColor, BorderColor, CalculatedClip, ComputedNode, ComputedUiTargetCamera, Display,
31    Node, Outline, ResolvedBorderRadius, UiGlobalTransform,
32};
33
34use bevy_app::prelude::*;
35use bevy_asset::{AssetEvent, AssetId, Assets};
36use bevy_color::{Alpha, ColorToComponents, LinearRgba};
37use bevy_core_pipeline::core_2d::graph::{Core2d, Node2d};
38use bevy_core_pipeline::core_3d::graph::{Core3d, Node3d};
39use bevy_ecs::prelude::*;
40use bevy_ecs::system::SystemParam;
41use bevy_image::{prelude::*, TRANSPARENT_IMAGE_HANDLE};
42use bevy_math::{Affine2, FloatOrd, Mat4, Rect, UVec4, Vec2};
43use bevy_render::{
44    render_asset::RenderAssets,
45    render_graph::{Node as RenderGraphNode, NodeRunError, RenderGraph, RenderGraphContext},
46    render_phase::{
47        sort_phase_system, AddRenderCommand, DrawFunctions, PhaseItem, PhaseItemExtraIndex,
48        ViewSortedRenderPhases,
49    },
50    render_resource::*,
51    renderer::{RenderContext, RenderDevice, RenderQueue},
52    sync_world::{MainEntity, RenderEntity, TemporaryRenderEntity},
53    texture::GpuImage,
54    view::{ExtractedView, Hdr, RetainedViewEntity, ViewUniforms},
55    Extract, ExtractSchedule, Render, RenderApp, RenderStartup, RenderSystems,
56};
57use bevy_sprite::BorderRect;
58#[cfg(feature = "bevy_ui_debug")]
59pub use debug_overlay::UiDebugOptions;
60
61use color_space::ColorSpacePlugin;
62use gradient::GradientPlugin;
63
64use bevy_platform::collections::{HashMap, HashSet};
65use bevy_text::{
66    ComputedTextBlock, PositionedGlyph, Strikethrough, StrikethroughColor, TextBackgroundColor,
67    TextColor, TextLayoutInfo, Underline, UnderlineColor,
68};
69use bevy_transform::components::GlobalTransform;
70use box_shadow::BoxShadowPlugin;
71use bytemuck::{Pod, Zeroable};
72use core::ops::Range;
73
74use graph::{NodeUi, SubGraphUi};
75pub use pipeline::*;
76pub use render_pass::*;
77pub use ui_material_pipeline::*;
78use ui_texture_slice_pipeline::UiTextureSlicerPlugin;
79
80pub mod graph {
81    use bevy_render::render_graph::{RenderLabel, RenderSubGraph};
82
83    #[derive(Debug, Hash, PartialEq, Eq, Clone, RenderSubGraph)]
84    pub struct SubGraphUi;
85
86    #[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)]
87    pub enum NodeUi {
88        UiPass,
89    }
90}
91
92pub mod prelude {
93    #[cfg(feature = "bevy_ui_debug")]
94    pub use crate::debug_overlay::UiDebugOptions;
95
96    pub use crate::{
97        ui_material::*, ui_material_pipeline::UiMaterialPlugin, BoxShadowSamples, UiAntiAlias,
98    };
99}
100
101/// Local Z offsets of "extracted nodes" for a given entity. These exist to allow rendering multiple "extracted nodes"
102/// for a given source entity (ex: render both a background color _and_ a custom material for a given node).
103///
104/// When possible these offsets should be defined in _this_ module to ensure z-index coordination across contexts.
105/// When this is _not_ possible, pick a suitably unique index unlikely to clash with other things (ex: `0.1826823` not `0.1`).
106///
107/// Offsets should be unique for a given node entity to avoid z fighting.
108/// These should pretty much _always_ be larger than -0.5 and smaller than 0.5 to avoid clipping into nodes
109/// above / below the current node in the stack.
110///
111/// A z-index of 0.0 is the baseline, which is used as the primary "background color" of the node.
112///
113/// Note that nodes "stack" on each other, so a negative offset on the node above could clip _into_
114/// a positive offset on a node below.
115pub mod stack_z_offsets {
116    pub const BOX_SHADOW: f32 = -0.1;
117    pub const BACKGROUND_COLOR: f32 = 0.0;
118    pub const BORDER: f32 = 0.01;
119    pub const GRADIENT: f32 = 0.02;
120    pub const BORDER_GRADIENT: f32 = 0.03;
121    pub const IMAGE: f32 = 0.04;
122    pub const MATERIAL: f32 = 0.05;
123    pub const TEXT: f32 = 0.06;
124    pub const TEXT_STRIKETHROUGH: f32 = 0.07;
125}
126
127#[derive(Debug, Hash, PartialEq, Eq, Clone, SystemSet)]
128pub enum RenderUiSystems {
129    ExtractCameraViews,
130    ExtractBoxShadows,
131    ExtractBackgrounds,
132    ExtractImages,
133    ExtractTextureSlice,
134    ExtractBorders,
135    ExtractViewportNodes,
136    ExtractTextBackgrounds,
137    ExtractTextShadows,
138    ExtractText,
139    ExtractDebug,
140    ExtractGradient,
141}
142
143/// Marker for controlling whether UI is rendered with or without anti-aliasing
144/// in a camera. By default, UI is always anti-aliased.
145///
146/// **Note:** This does not affect text anti-aliasing. For that, use the `font_smoothing` property of the [`TextFont`](bevy_text::TextFont) component.
147///
148/// ```
149/// use bevy_camera::prelude::*;
150/// use bevy_ecs::prelude::*;
151/// use bevy_ui::prelude::*;
152/// use bevy_ui_render::prelude::*;
153///
154/// fn spawn_camera(mut commands: Commands) {
155///     commands.spawn((
156///         Camera2d,
157///         // This will cause all UI in this camera to be rendered without
158///         // anti-aliasing
159///         UiAntiAlias::Off,
160///     ));
161/// }
162/// ```
163#[derive(Component, Clone, Copy, Default, Debug, Reflect, Eq, PartialEq)]
164#[reflect(Component, Default, PartialEq, Clone)]
165pub enum UiAntiAlias {
166    /// UI will render with anti-aliasing
167    #[default]
168    On,
169    /// UI will render without anti-aliasing
170    Off,
171}
172
173/// Number of shadow samples.
174/// A larger value will result in higher quality shadows.
175/// Default is 4, values higher than ~10 offer diminishing returns.
176///
177/// ```
178/// use bevy_camera::prelude::*;
179/// use bevy_ecs::prelude::*;
180/// use bevy_ui::prelude::*;
181/// use bevy_ui_render::prelude::*;
182///
183/// fn spawn_camera(mut commands: Commands) {
184///     commands.spawn((
185///         Camera2d,
186///         BoxShadowSamples(6),
187///     ));
188/// }
189/// ```
190#[derive(Component, Clone, Copy, Debug, Reflect, Eq, PartialEq)]
191#[reflect(Component, Default, PartialEq, Clone)]
192pub struct BoxShadowSamples(pub u32);
193
194impl Default for BoxShadowSamples {
195    fn default() -> Self {
196        Self(4)
197    }
198}
199
200#[derive(Default)]
201pub struct UiRenderPlugin;
202
203impl Plugin for UiRenderPlugin {
204    fn build(&self, app: &mut App) {
205        load_shader_library!(app, "ui.wgsl");
206
207        #[cfg(feature = "bevy_ui_debug")]
208        app.init_resource::<UiDebugOptions>();
209
210        let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
211            return;
212        };
213
214        render_app
215            .init_resource::<SpecializedRenderPipelines<UiPipeline>>()
216            .init_resource::<ImageNodeBindGroups>()
217            .init_resource::<UiMeta>()
218            .init_resource::<ExtractedUiNodes>()
219            .allow_ambiguous_resource::<ExtractedUiNodes>()
220            .init_resource::<DrawFunctions<TransparentUi>>()
221            .init_resource::<ViewSortedRenderPhases<TransparentUi>>()
222            .add_render_command::<TransparentUi, DrawUi>()
223            .configure_sets(
224                ExtractSchedule,
225                (
226                    RenderUiSystems::ExtractCameraViews,
227                    RenderUiSystems::ExtractBoxShadows,
228                    RenderUiSystems::ExtractBackgrounds,
229                    RenderUiSystems::ExtractImages,
230                    RenderUiSystems::ExtractTextureSlice,
231                    RenderUiSystems::ExtractBorders,
232                    RenderUiSystems::ExtractTextBackgrounds,
233                    RenderUiSystems::ExtractTextShadows,
234                    RenderUiSystems::ExtractText,
235                    RenderUiSystems::ExtractDebug,
236                )
237                    .chain(),
238            )
239            .add_systems(RenderStartup, init_ui_pipeline)
240            .add_systems(
241                ExtractSchedule,
242                (
243                    extract_ui_camera_view.in_set(RenderUiSystems::ExtractCameraViews),
244                    extract_uinode_background_colors.in_set(RenderUiSystems::ExtractBackgrounds),
245                    extract_uinode_images.in_set(RenderUiSystems::ExtractImages),
246                    extract_uinode_borders.in_set(RenderUiSystems::ExtractBorders),
247                    extract_viewport_nodes.in_set(RenderUiSystems::ExtractViewportNodes),
248                    extract_text_decorations.in_set(RenderUiSystems::ExtractTextBackgrounds),
249                    extract_text_shadows.in_set(RenderUiSystems::ExtractTextShadows),
250                    extract_text_sections.in_set(RenderUiSystems::ExtractText),
251                    #[cfg(feature = "bevy_ui_debug")]
252                    debug_overlay::extract_debug_overlay.in_set(RenderUiSystems::ExtractDebug),
253                ),
254            )
255            .add_systems(
256                Render,
257                (
258                    queue_uinodes.in_set(RenderSystems::Queue),
259                    sort_phase_system::<TransparentUi>.in_set(RenderSystems::PhaseSort),
260                    prepare_uinodes.in_set(RenderSystems::PrepareBindGroups),
261                ),
262            );
263
264        // Render graph
265        render_app
266            .world_mut()
267            .resource_scope(|world, mut graph: Mut<RenderGraph>| {
268                if let Some(graph_2d) = graph.get_sub_graph_mut(Core2d) {
269                    let ui_graph_2d = new_ui_graph(world);
270                    graph_2d.add_sub_graph(SubGraphUi, ui_graph_2d);
271                    graph_2d.add_node(NodeUi::UiPass, RunUiSubgraphOnUiViewNode);
272                    graph_2d.add_node_edge(Node2d::EndMainPass, NodeUi::UiPass);
273                    graph_2d.add_node_edge(Node2d::EndMainPassPostProcessing, NodeUi::UiPass);
274                    graph_2d.add_node_edge(NodeUi::UiPass, Node2d::Upscaling);
275                }
276
277                if let Some(graph_3d) = graph.get_sub_graph_mut(Core3d) {
278                    let ui_graph_3d = new_ui_graph(world);
279                    graph_3d.add_sub_graph(SubGraphUi, ui_graph_3d);
280                    graph_3d.add_node(NodeUi::UiPass, RunUiSubgraphOnUiViewNode);
281                    graph_3d.add_node_edge(Node3d::EndMainPass, NodeUi::UiPass);
282                    graph_3d.add_node_edge(Node3d::EndMainPassPostProcessing, NodeUi::UiPass);
283                    graph_3d.add_node_edge(NodeUi::UiPass, Node3d::Upscaling);
284                }
285            });
286
287        app.add_plugins(UiTextureSlicerPlugin);
288        app.add_plugins(ColorSpacePlugin);
289        app.add_plugins(GradientPlugin);
290        app.add_plugins(BoxShadowPlugin);
291    }
292}
293
294fn new_ui_graph(world: &mut World) -> RenderGraph {
295    let ui_pass_node = UiPassNode::new(world);
296    let mut ui_graph = RenderGraph::default();
297    ui_graph.add_node(NodeUi::UiPass, ui_pass_node);
298    ui_graph
299}
300
301#[derive(SystemParam)]
302pub struct UiCameraMap<'w, 's> {
303    mapping: Query<'w, 's, RenderEntity>,
304}
305
306impl<'w, 's> UiCameraMap<'w, 's> {
307    /// Creates a [`UiCameraMapper`] for performing repeated camera-to-render-entity lookups.
308    ///
309    /// The last successful mapping is cached to avoid redundant queries.
310    pub fn get_mapper(&'w self) -> UiCameraMapper<'w, 's> {
311        UiCameraMapper {
312            mapping: &self.mapping,
313            camera_entity: Entity::PLACEHOLDER,
314            render_entity: Entity::PLACEHOLDER,
315        }
316    }
317}
318
319/// Helper for mapping UI target camera entities to their corresponding render entities,
320/// with caching to avoid repeated lookups for the same camera.
321pub struct UiCameraMapper<'w, 's> {
322    mapping: &'w Query<'w, 's, RenderEntity>,
323    /// Cached camera entity from the last successful `map` call.
324    camera_entity: Entity,
325    /// Cached camera entity from the last successful `map` call.
326    render_entity: Entity,
327}
328
329impl<'w, 's> UiCameraMapper<'w, 's> {
330    /// Returns the render entity corresponding to the given [`ComputedUiTargetCamera`]'s camera, or none if no corresponding entity was found.
331    pub fn map(&mut self, computed_target: &ComputedUiTargetCamera) -> Option<Entity> {
332        let camera_entity = computed_target.get()?;
333        if self.camera_entity != camera_entity {
334            let new_render_camera_entity = self.mapping.get(camera_entity).ok()?;
335            self.render_entity = new_render_camera_entity;
336            self.camera_entity = camera_entity;
337        }
338
339        Some(self.render_entity)
340    }
341
342    /// Returns the cached camera entity from the last successful `map` call.
343    pub fn current_camera(&self) -> Entity {
344        self.camera_entity
345    }
346}
347
348pub struct ExtractedUiNode {
349    pub z_order: f32,
350    pub image: AssetId<Image>,
351    pub clip: Option<Rect>,
352    /// Render world entity of the extracted camera corresponding to this node's target camera.
353    pub extracted_camera_entity: Entity,
354    pub item: ExtractedUiItem,
355    pub main_entity: MainEntity,
356    pub render_entity: Entity,
357    pub transform: Affine2,
358}
359
360/// The type of UI node.
361/// This is used to determine how to render the UI node.
362#[derive(Clone, Copy, Debug, PartialEq)]
363pub enum NodeType {
364    Rect,
365    Border(u32), // shader flags
366}
367
368pub enum ExtractedUiItem {
369    Node {
370        color: LinearRgba,
371        rect: Rect,
372        atlas_scaling: Option<Vec2>,
373        flip_x: bool,
374        flip_y: bool,
375        /// Border radius of the UI node.
376        /// Ordering: top left, top right, bottom right, bottom left.
377        border_radius: ResolvedBorderRadius,
378        /// Border thickness of the UI node.
379        /// Ordering: left, top, right, bottom.
380        border: BorderRect,
381        node_type: NodeType,
382    },
383    /// A contiguous sequence of text glyphs from the same section
384    Glyphs {
385        /// Indices into [`ExtractedUiNodes::glyphs`]
386        range: Range<usize>,
387    },
388}
389
390pub struct ExtractedGlyph {
391    pub color: LinearRgba,
392    pub translation: Vec2,
393    pub rect: Rect,
394}
395
396#[derive(Resource, Default)]
397pub struct ExtractedUiNodes {
398    pub uinodes: Vec<ExtractedUiNode>,
399    pub glyphs: Vec<ExtractedGlyph>,
400}
401
402impl ExtractedUiNodes {
403    pub fn clear(&mut self) {
404        self.uinodes.clear();
405        self.glyphs.clear();
406    }
407}
408
409/// A [`RenderGraphNode`] that executes the UI rendering subgraph on the UI
410/// view.
411struct RunUiSubgraphOnUiViewNode;
412
413impl RenderGraphNode for RunUiSubgraphOnUiViewNode {
414    fn run<'w>(
415        &self,
416        graph: &mut RenderGraphContext,
417        _: &mut RenderContext<'w>,
418        world: &'w World,
419    ) -> Result<(), NodeRunError> {
420        // Fetch the UI view.
421        let Some(mut render_views) = world.try_query::<&UiCameraView>() else {
422            return Ok(());
423        };
424        let Ok(ui_camera_view) = render_views.get(world, graph.view_entity()) else {
425            return Ok(());
426        };
427
428        // Run the subgraph on the UI view.
429        graph.run_sub_graph(SubGraphUi, vec![], Some(ui_camera_view.0), None)?;
430        Ok(())
431    }
432}
433
434pub fn extract_uinode_background_colors(
435    mut commands: Commands,
436    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
437    uinode_query: Extract<
438        Query<(
439            Entity,
440            &ComputedNode,
441            &UiGlobalTransform,
442            &InheritedVisibility,
443            Option<&CalculatedClip>,
444            &ComputedUiTargetCamera,
445            &BackgroundColor,
446        )>,
447    >,
448    camera_map: Extract<UiCameraMap>,
449) {
450    let mut camera_mapper = camera_map.get_mapper();
451
452    for (entity, uinode, transform, inherited_visibility, clip, camera, background_color) in
453        &uinode_query
454    {
455        // Skip invisible backgrounds
456        if !inherited_visibility.get()
457            || background_color.0.is_fully_transparent()
458            || uinode.is_empty()
459        {
460            continue;
461        }
462
463        let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
464            continue;
465        };
466
467        extracted_uinodes.uinodes.push(ExtractedUiNode {
468            render_entity: commands.spawn(TemporaryRenderEntity).id(),
469            z_order: uinode.stack_index as f32 + stack_z_offsets::BACKGROUND_COLOR,
470            clip: clip.map(|clip| clip.clip),
471            image: AssetId::default(),
472            extracted_camera_entity,
473            transform: transform.into(),
474            item: ExtractedUiItem::Node {
475                color: background_color.0.into(),
476                rect: Rect {
477                    min: Vec2::ZERO,
478                    max: uinode.size,
479                },
480                atlas_scaling: None,
481                flip_x: false,
482                flip_y: false,
483                border: uinode.border(),
484                border_radius: uinode.border_radius(),
485                node_type: NodeType::Rect,
486            },
487            main_entity: entity.into(),
488        });
489    }
490}
491
492pub fn extract_uinode_images(
493    mut commands: Commands,
494    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
495    texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
496    uinode_query: Extract<
497        Query<(
498            Entity,
499            &ComputedNode,
500            &UiGlobalTransform,
501            &InheritedVisibility,
502            Option<&CalculatedClip>,
503            &ComputedUiTargetCamera,
504            &ImageNode,
505        )>,
506    >,
507    camera_map: Extract<UiCameraMap>,
508) {
509    let mut camera_mapper = camera_map.get_mapper();
510    for (entity, uinode, transform, inherited_visibility, clip, camera, image) in &uinode_query {
511        // Skip invisible images
512        if !inherited_visibility.get()
513            || image.color.is_fully_transparent()
514            || image.image.id() == TRANSPARENT_IMAGE_HANDLE.id()
515            || image.image_mode.uses_slices()
516            || uinode.is_empty()
517        {
518            continue;
519        }
520
521        let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
522            continue;
523        };
524
525        let atlas_rect = image
526            .texture_atlas
527            .as_ref()
528            .and_then(|s| s.texture_rect(&texture_atlases))
529            .map(|r| r.as_rect());
530
531        let mut rect = match (atlas_rect, image.rect) {
532            (None, None) => Rect {
533                min: Vec2::ZERO,
534                max: uinode.size,
535            },
536            (None, Some(image_rect)) => image_rect,
537            (Some(atlas_rect), None) => atlas_rect,
538            (Some(atlas_rect), Some(mut image_rect)) => {
539                image_rect.min += atlas_rect.min;
540                image_rect.max += atlas_rect.min;
541                image_rect
542            }
543        };
544
545        let atlas_scaling = if atlas_rect.is_some() || image.rect.is_some() {
546            let atlas_scaling = uinode.size() / rect.size();
547            rect.min *= atlas_scaling;
548            rect.max *= atlas_scaling;
549            Some(atlas_scaling)
550        } else {
551            None
552        };
553
554        extracted_uinodes.uinodes.push(ExtractedUiNode {
555            z_order: uinode.stack_index as f32 + stack_z_offsets::IMAGE,
556            render_entity: commands.spawn(TemporaryRenderEntity).id(),
557            clip: clip.map(|clip| clip.clip),
558            image: image.image.id(),
559            extracted_camera_entity,
560            transform: transform.into(),
561            item: ExtractedUiItem::Node {
562                color: image.color.into(),
563                rect,
564                atlas_scaling,
565                flip_x: image.flip_x,
566                flip_y: image.flip_y,
567                border: uinode.border,
568                border_radius: uinode.border_radius,
569                node_type: NodeType::Rect,
570            },
571            main_entity: entity.into(),
572        });
573    }
574}
575
576pub fn extract_uinode_borders(
577    mut commands: Commands,
578    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
579    uinode_query: Extract<
580        Query<(
581            Entity,
582            &Node,
583            &ComputedNode,
584            &UiGlobalTransform,
585            &InheritedVisibility,
586            Option<&CalculatedClip>,
587            &ComputedUiTargetCamera,
588            AnyOf<(&BorderColor, &Outline)>,
589        )>,
590    >,
591    camera_map: Extract<UiCameraMap>,
592) {
593    let image = AssetId::<Image>::default();
594    let mut camera_mapper = camera_map.get_mapper();
595
596    for (
597        entity,
598        node,
599        computed_node,
600        transform,
601        inherited_visibility,
602        maybe_clip,
603        camera,
604        (maybe_border_color, maybe_outline),
605    ) in &uinode_query
606    {
607        // Skip invisible borders and removed nodes
608        if !inherited_visibility.get() || node.display == Display::None {
609            continue;
610        }
611
612        let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
613            continue;
614        };
615
616        // Don't extract borders with zero width along all edges
617        if computed_node.border() != BorderRect::ZERO
618            && let Some(border_color) = maybe_border_color
619        {
620            let border_colors = [
621                border_color.left.to_linear(),
622                border_color.top.to_linear(),
623                border_color.right.to_linear(),
624                border_color.bottom.to_linear(),
625            ];
626
627            const BORDER_FLAGS: [u32; 4] = [
628                shader_flags::BORDER_LEFT,
629                shader_flags::BORDER_TOP,
630                shader_flags::BORDER_RIGHT,
631                shader_flags::BORDER_BOTTOM,
632            ];
633            let mut completed_flags = 0;
634
635            for (i, &color) in border_colors.iter().enumerate() {
636                if color.is_fully_transparent() {
637                    continue;
638                }
639
640                let mut border_flags = BORDER_FLAGS[i];
641
642                if completed_flags & border_flags != 0 {
643                    continue;
644                }
645
646                for j in i + 1..4 {
647                    if color == border_colors[j] {
648                        border_flags |= BORDER_FLAGS[j];
649                    }
650                }
651                completed_flags |= border_flags;
652
653                extracted_uinodes.uinodes.push(ExtractedUiNode {
654                    z_order: computed_node.stack_index as f32 + stack_z_offsets::BORDER,
655                    image,
656                    clip: maybe_clip.map(|clip| clip.clip),
657                    extracted_camera_entity,
658                    transform: transform.into(),
659                    item: ExtractedUiItem::Node {
660                        color,
661                        rect: Rect {
662                            max: computed_node.size(),
663                            ..Default::default()
664                        },
665                        atlas_scaling: None,
666                        flip_x: false,
667                        flip_y: false,
668                        border: computed_node.border(),
669                        border_radius: computed_node.border_radius(),
670                        node_type: NodeType::Border(border_flags),
671                    },
672                    main_entity: entity.into(),
673                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
674                });
675            }
676        }
677
678        if computed_node.outline_width() <= 0. {
679            continue;
680        }
681
682        if let Some(outline) = maybe_outline.filter(|outline| !outline.color.is_fully_transparent())
683        {
684            let outline_size = computed_node.outlined_node_size();
685            extracted_uinodes.uinodes.push(ExtractedUiNode {
686                z_order: computed_node.stack_index as f32 + stack_z_offsets::BORDER,
687                render_entity: commands.spawn(TemporaryRenderEntity).id(),
688                image,
689                clip: maybe_clip.map(|clip| clip.clip),
690                extracted_camera_entity,
691                transform: transform.into(),
692                item: ExtractedUiItem::Node {
693                    color: outline.color.into(),
694                    rect: Rect {
695                        max: outline_size,
696                        ..Default::default()
697                    },
698                    atlas_scaling: None,
699                    flip_x: false,
700                    flip_y: false,
701                    border: BorderRect::all(computed_node.outline_width()),
702                    border_radius: computed_node.outline_radius(),
703                    node_type: NodeType::Border(shader_flags::BORDER_ALL),
704                },
705                main_entity: entity.into(),
706            });
707        }
708    }
709}
710
711/// The UI camera is "moved back" by this many units (plus the [`UI_CAMERA_TRANSFORM_OFFSET`]) and also has a view
712/// distance of this many units. This ensures that with a left-handed projection,
713/// as ui elements are "stacked on top of each other", they are within the camera's view
714/// and have room to grow.
715// TODO: Consider computing this value at runtime based on the maximum z-value.
716const UI_CAMERA_FAR: f32 = 1000.0;
717
718// This value is subtracted from the far distance for the camera's z-position to ensure nodes at z == 0.0 are rendered
719// TODO: Evaluate if we still need this.
720const UI_CAMERA_TRANSFORM_OFFSET: f32 = -0.1;
721
722/// The ID of the subview associated with a camera on which UI is to be drawn.
723///
724/// When UI is present, cameras extract to two views: the main 2D/3D one and a
725/// UI one. The main 2D or 3D camera gets subview 0, and the corresponding UI
726/// camera gets this subview, 1.
727const UI_CAMERA_SUBVIEW: u32 = 1;
728
729/// A render-world component that lives on the main render target view and
730/// specifies the corresponding UI view.
731///
732/// For example, if UI is being rendered to a 3D camera, this component lives on
733/// the 3D camera and contains the entity corresponding to the UI view.
734#[derive(Component)]
735/// Entity id of the temporary render entity with the corresponding extracted UI view.
736pub struct UiCameraView(pub Entity);
737
738/// A render-world component that lives on the UI view and specifies the
739/// corresponding main render target view.
740///
741/// For example, if the UI is being rendered to a 3D camera, this component
742/// lives on the UI view and contains the entity corresponding to the 3D camera.
743///
744/// This is the inverse of [`UiCameraView`].
745#[derive(Component)]
746pub struct UiViewTarget(pub Entity);
747
748/// Extracts all UI elements associated with a camera into the render world.
749pub fn extract_ui_camera_view(
750    mut commands: Commands,
751    mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
752    query: Extract<
753        Query<
754            (
755                Entity,
756                RenderEntity,
757                &Camera,
758                Has<Hdr>,
759                Option<&UiAntiAlias>,
760                Option<&BoxShadowSamples>,
761            ),
762            Or<(With<Camera2d>, With<Camera3d>)>,
763        >,
764    >,
765    mut live_entities: Local<HashSet<RetainedViewEntity>>,
766) {
767    live_entities.clear();
768
769    for (main_entity, render_entity, camera, hdr, ui_anti_alias, shadow_samples) in &query {
770        // ignore inactive cameras
771        if !camera.is_active {
772            commands
773                .get_entity(render_entity)
774                .expect("Camera entity wasn't synced.")
775                .remove::<(UiCameraView, UiAntiAlias, BoxShadowSamples)>();
776            continue;
777        }
778
779        if let Some(physical_viewport_rect) = camera.physical_viewport_rect() {
780            // use a projection matrix with the origin in the top left instead of the bottom left that comes with OrthographicProjection
781            let projection_matrix = Mat4::orthographic_rh(
782                0.0,
783                physical_viewport_rect.width() as f32,
784                physical_viewport_rect.height() as f32,
785                0.0,
786                0.0,
787                UI_CAMERA_FAR,
788            );
789            // We use `UI_CAMERA_SUBVIEW` here so as not to conflict with the
790            // main 3D or 2D camera, which will have subview index 0.
791            let retained_view_entity =
792                RetainedViewEntity::new(main_entity.into(), None, UI_CAMERA_SUBVIEW);
793            // Creates the UI view.
794            let ui_camera_view = commands
795                .spawn((
796                    ExtractedView {
797                        retained_view_entity,
798                        clip_from_view: projection_matrix,
799                        world_from_view: GlobalTransform::from_xyz(
800                            0.0,
801                            0.0,
802                            UI_CAMERA_FAR + UI_CAMERA_TRANSFORM_OFFSET,
803                        ),
804                        clip_from_world: None,
805                        hdr,
806                        viewport: UVec4::from((
807                            physical_viewport_rect.min,
808                            physical_viewport_rect.size(),
809                        )),
810                        color_grading: Default::default(),
811                        invert_culling: false,
812                    },
813                    // Link to the main camera view.
814                    UiViewTarget(render_entity),
815                    TemporaryRenderEntity,
816                ))
817                .id();
818
819            let mut entity_commands = commands
820                .get_entity(render_entity)
821                .expect("Camera entity wasn't synced.");
822            // Link from the main 2D/3D camera view to the UI view.
823            entity_commands.insert(UiCameraView(ui_camera_view));
824            if let Some(ui_anti_alias) = ui_anti_alias {
825                entity_commands.insert(*ui_anti_alias);
826            }
827            if let Some(shadow_samples) = shadow_samples {
828                entity_commands.insert(*shadow_samples);
829            }
830            transparent_render_phases.insert_or_clear(retained_view_entity);
831
832            live_entities.insert(retained_view_entity);
833        }
834    }
835
836    transparent_render_phases.retain(|entity, _| live_entities.contains(entity));
837}
838
839pub fn extract_viewport_nodes(
840    mut commands: Commands,
841    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
842    camera_query: Extract<Query<(&Camera, &RenderTarget)>>,
843    uinode_query: Extract<
844        Query<(
845            Entity,
846            &ComputedNode,
847            &UiGlobalTransform,
848            &InheritedVisibility,
849            Option<&CalculatedClip>,
850            &ComputedUiTargetCamera,
851            &ViewportNode,
852        )>,
853    >,
854    camera_map: Extract<UiCameraMap>,
855) {
856    let mut camera_mapper = camera_map.get_mapper();
857    for (entity, uinode, transform, inherited_visibility, clip, camera, viewport_node) in
858        &uinode_query
859    {
860        // Skip invisible images
861        if !inherited_visibility.get() || uinode.is_empty() {
862            continue;
863        }
864
865        let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
866            continue;
867        };
868
869        let Some(image) = camera_query
870            .get(viewport_node.camera)
871            .ok()
872            .and_then(|(_, render_target)| render_target.as_image())
873        else {
874            continue;
875        };
876
877        extracted_uinodes.uinodes.push(ExtractedUiNode {
878            z_order: uinode.stack_index as f32 + stack_z_offsets::IMAGE,
879            render_entity: commands.spawn(TemporaryRenderEntity).id(),
880            clip: clip.map(|clip| clip.clip),
881            image: image.id(),
882            extracted_camera_entity,
883            transform: transform.into(),
884            item: ExtractedUiItem::Node {
885                color: LinearRgba::WHITE,
886                rect: Rect {
887                    min: Vec2::ZERO,
888                    max: uinode.size,
889                },
890                atlas_scaling: None,
891                flip_x: false,
892                flip_y: false,
893                border: uinode.border(),
894                border_radius: uinode.border_radius(),
895                node_type: NodeType::Rect,
896            },
897            main_entity: entity.into(),
898        });
899    }
900}
901
902pub fn extract_text_sections(
903    mut commands: Commands,
904    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
905    texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
906    uinode_query: Extract<
907        Query<(
908            Entity,
909            &ComputedNode,
910            &UiGlobalTransform,
911            &InheritedVisibility,
912            Option<&CalculatedClip>,
913            &ComputedUiTargetCamera,
914            &ComputedTextBlock,
915            &TextColor,
916            &TextLayoutInfo,
917        )>,
918    >,
919    text_styles: Extract<Query<&TextColor>>,
920    camera_map: Extract<UiCameraMap>,
921) {
922    let mut start = extracted_uinodes.glyphs.len();
923    let mut end = start + 1;
924
925    let mut camera_mapper = camera_map.get_mapper();
926    for (
927        entity,
928        uinode,
929        transform,
930        inherited_visibility,
931        clip,
932        camera,
933        computed_block,
934        text_color,
935        text_layout_info,
936    ) in &uinode_query
937    {
938        // Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
939        if !inherited_visibility.get() || uinode.is_empty() {
940            continue;
941        }
942
943        let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
944            continue;
945        };
946
947        let transform = Affine2::from(*transform) * Affine2::from_translation(-0.5 * uinode.size());
948
949        let mut color = text_color.0.to_linear();
950
951        let mut current_span_index = 0;
952
953        for (
954            i,
955            PositionedGlyph {
956                position,
957                atlas_info,
958                span_index,
959                ..
960            },
961        ) in text_layout_info.glyphs.iter().enumerate()
962        {
963            if current_span_index != *span_index
964                && let Some(span_entity) =
965                    computed_block.entities().get(*span_index).map(|t| t.entity)
966            {
967                color = text_styles
968                    .get(span_entity)
969                    .map(|text_color| LinearRgba::from(text_color.0))
970                    .unwrap_or_default();
971                current_span_index = *span_index;
972            }
973
974            let rect = texture_atlases
975                .get(atlas_info.texture_atlas)
976                .unwrap()
977                .textures[atlas_info.location.glyph_index]
978                .as_rect();
979            extracted_uinodes.glyphs.push(ExtractedGlyph {
980                color,
981                translation: *position,
982                rect,
983            });
984
985            if text_layout_info
986                .glyphs
987                .get(i + 1)
988                .is_none_or(|info| info.atlas_info.texture != atlas_info.texture)
989            {
990                extracted_uinodes.uinodes.push(ExtractedUiNode {
991                    z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
992                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
993                    image: atlas_info.texture,
994                    clip: clip.map(|clip| clip.clip),
995                    extracted_camera_entity,
996                    item: ExtractedUiItem::Glyphs { range: start..end },
997                    main_entity: entity.into(),
998                    transform,
999                });
1000                start = end;
1001            }
1002
1003            end += 1;
1004        }
1005    }
1006}
1007
1008pub fn extract_text_shadows(
1009    mut commands: Commands,
1010    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1011    texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
1012    uinode_query: Extract<
1013        Query<(
1014            Entity,
1015            &ComputedNode,
1016            &UiGlobalTransform,
1017            &ComputedUiTargetCamera,
1018            &InheritedVisibility,
1019            Option<&CalculatedClip>,
1020            &TextLayoutInfo,
1021            &TextShadow,
1022            &ComputedTextBlock,
1023        )>,
1024    >,
1025    text_decoration_query: Extract<Query<(Has<Strikethrough>, Has<Underline>)>>,
1026    camera_map: Extract<UiCameraMap>,
1027) {
1028    let mut start = extracted_uinodes.glyphs.len();
1029    let mut end = start + 1;
1030
1031    let mut camera_mapper = camera_map.get_mapper();
1032    for (
1033        entity,
1034        uinode,
1035        transform,
1036        target,
1037        inherited_visibility,
1038        clip,
1039        text_layout_info,
1040        shadow,
1041        computed_block,
1042    ) in &uinode_query
1043    {
1044        // Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
1045        if !inherited_visibility.get() || uinode.is_empty() {
1046            continue;
1047        }
1048
1049        let Some(extracted_camera_entity) = camera_mapper.map(target) else {
1050            continue;
1051        };
1052
1053        let node_transform = Affine2::from(*transform)
1054            * Affine2::from_translation(
1055                -0.5 * uinode.size() + shadow.offset / uinode.inverse_scale_factor(),
1056            );
1057
1058        for (
1059            i,
1060            PositionedGlyph {
1061                position,
1062                atlas_info,
1063                span_index,
1064                ..
1065            },
1066        ) in text_layout_info.glyphs.iter().enumerate()
1067        {
1068            let rect = texture_atlases
1069                .get(atlas_info.texture_atlas)
1070                .unwrap()
1071                .textures[atlas_info.location.glyph_index]
1072                .as_rect();
1073            extracted_uinodes.glyphs.push(ExtractedGlyph {
1074                color: shadow.color.into(),
1075                translation: *position,
1076                rect,
1077            });
1078
1079            if text_layout_info.glyphs.get(i + 1).is_none_or(|info| {
1080                info.span_index != *span_index || info.atlas_info.texture != atlas_info.texture
1081            }) {
1082                extracted_uinodes.uinodes.push(ExtractedUiNode {
1083                    transform: node_transform,
1084                    z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1085                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
1086                    image: atlas_info.texture,
1087                    clip: clip.map(|clip| clip.clip),
1088                    extracted_camera_entity,
1089                    item: ExtractedUiItem::Glyphs { range: start..end },
1090                    main_entity: entity.into(),
1091                });
1092                start = end;
1093            }
1094
1095            end += 1;
1096        }
1097
1098        for run in text_layout_info.run_geometry.iter() {
1099            let section_entity = computed_block.entities()[run.span_index].entity;
1100            let Ok((has_strikethrough, has_underline)) = text_decoration_query.get(section_entity)
1101            else {
1102                continue;
1103            };
1104
1105            if has_strikethrough {
1106                extracted_uinodes.uinodes.push(ExtractedUiNode {
1107                    z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1108                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
1109                    clip: clip.map(|clip| clip.clip),
1110                    image: AssetId::default(),
1111                    extracted_camera_entity,
1112                    transform: node_transform
1113                        * Affine2::from_translation(run.strikethrough_position()),
1114                    item: ExtractedUiItem::Node {
1115                        color: shadow.color.into(),
1116                        rect: Rect {
1117                            min: Vec2::ZERO,
1118                            max: run.strikethrough_size(),
1119                        },
1120                        atlas_scaling: None,
1121                        flip_x: false,
1122                        flip_y: false,
1123                        border: BorderRect::ZERO,
1124                        border_radius: ResolvedBorderRadius::ZERO,
1125                        node_type: NodeType::Rect,
1126                    },
1127                    main_entity: entity.into(),
1128                });
1129            }
1130
1131            if has_underline {
1132                extracted_uinodes.uinodes.push(ExtractedUiNode {
1133                    z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1134                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
1135                    clip: clip.map(|clip| clip.clip),
1136                    image: AssetId::default(),
1137                    extracted_camera_entity,
1138                    transform: node_transform * Affine2::from_translation(run.underline_position()),
1139                    item: ExtractedUiItem::Node {
1140                        color: shadow.color.into(),
1141                        rect: Rect {
1142                            min: Vec2::ZERO,
1143                            max: run.underline_size(),
1144                        },
1145                        atlas_scaling: None,
1146                        flip_x: false,
1147                        flip_y: false,
1148                        border: BorderRect::ZERO,
1149                        border_radius: ResolvedBorderRadius::ZERO,
1150                        node_type: NodeType::Rect,
1151                    },
1152                    main_entity: entity.into(),
1153                });
1154            }
1155        }
1156    }
1157}
1158
1159pub fn extract_text_decorations(
1160    mut commands: Commands,
1161    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1162    uinode_query: Extract<
1163        Query<(
1164            Entity,
1165            &ComputedNode,
1166            &ComputedTextBlock,
1167            &UiGlobalTransform,
1168            &InheritedVisibility,
1169            Option<&CalculatedClip>,
1170            &ComputedUiTargetCamera,
1171            &TextLayoutInfo,
1172        )>,
1173    >,
1174    text_background_colors_query: Extract<
1175        Query<(
1176            AnyOf<(&TextBackgroundColor, &Strikethrough, &Underline)>,
1177            &TextColor,
1178            Option<&StrikethroughColor>,
1179            Option<&UnderlineColor>,
1180        )>,
1181    >,
1182    camera_map: Extract<UiCameraMap>,
1183) {
1184    let mut camera_mapper = camera_map.get_mapper();
1185    for (
1186        entity,
1187        uinode,
1188        computed_block,
1189        global_transform,
1190        inherited_visibility,
1191        clip,
1192        camera,
1193        text_layout_info,
1194    ) in &uinode_query
1195    {
1196        // Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
1197        if !inherited_visibility.get() || uinode.is_empty() {
1198            continue;
1199        }
1200
1201        let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
1202            continue;
1203        };
1204
1205        let transform =
1206            Affine2::from(global_transform) * Affine2::from_translation(-0.5 * uinode.size());
1207
1208        for run in text_layout_info.run_geometry.iter() {
1209            let section_entity = computed_block.entities()[run.span_index].entity;
1210            let Ok((
1211                (text_background_color, maybe_strikethrough, maybe_underline),
1212                text_color,
1213                maybe_strikethrough_color,
1214                maybe_underline_color,
1215            )) = text_background_colors_query.get(section_entity)
1216            else {
1217                continue;
1218            };
1219
1220            if let Some(text_background_color) = text_background_color {
1221                extracted_uinodes.uinodes.push(ExtractedUiNode {
1222                    z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1223                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
1224                    clip: clip.map(|clip| clip.clip),
1225                    image: AssetId::default(),
1226                    extracted_camera_entity,
1227                    transform: transform * Affine2::from_translation(run.bounds.center()),
1228                    item: ExtractedUiItem::Node {
1229                        color: text_background_color.0.to_linear(),
1230                        rect: Rect {
1231                            min: Vec2::ZERO,
1232                            max: run.bounds.size(),
1233                        },
1234                        atlas_scaling: None,
1235                        flip_x: false,
1236                        flip_y: false,
1237                        border: uinode.border(),
1238                        border_radius: uinode.border_radius(),
1239                        node_type: NodeType::Rect,
1240                    },
1241                    main_entity: entity.into(),
1242                });
1243            }
1244
1245            if maybe_strikethrough.is_some() {
1246                let color = maybe_strikethrough_color
1247                    .map(|sc| sc.0)
1248                    .unwrap_or(text_color.0)
1249                    .to_linear();
1250
1251                extracted_uinodes.uinodes.push(ExtractedUiNode {
1252                    z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT_STRIKETHROUGH,
1253                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
1254                    clip: clip.map(|clip| clip.clip),
1255                    image: AssetId::default(),
1256                    extracted_camera_entity,
1257                    transform: transform * Affine2::from_translation(run.strikethrough_position()),
1258                    item: ExtractedUiItem::Node {
1259                        color,
1260                        rect: Rect {
1261                            min: Vec2::ZERO,
1262                            max: run.strikethrough_size(),
1263                        },
1264                        atlas_scaling: None,
1265                        flip_x: false,
1266                        flip_y: false,
1267                        border: BorderRect::ZERO,
1268                        border_radius: ResolvedBorderRadius::ZERO,
1269                        node_type: NodeType::Rect,
1270                    },
1271                    main_entity: entity.into(),
1272                });
1273            }
1274
1275            if maybe_underline.is_some() {
1276                let color = maybe_underline_color
1277                    .map(|uc| uc.0)
1278                    .unwrap_or(text_color.0)
1279                    .to_linear();
1280
1281                extracted_uinodes.uinodes.push(ExtractedUiNode {
1282                    z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT_STRIKETHROUGH,
1283                    render_entity: commands.spawn(TemporaryRenderEntity).id(),
1284                    clip: clip.map(|clip| clip.clip),
1285                    image: AssetId::default(),
1286                    extracted_camera_entity,
1287                    transform: transform * Affine2::from_translation(run.underline_position()),
1288                    item: ExtractedUiItem::Node {
1289                        color,
1290                        rect: Rect {
1291                            min: Vec2::ZERO,
1292                            max: run.underline_size(),
1293                        },
1294                        atlas_scaling: None,
1295                        flip_x: false,
1296                        flip_y: false,
1297                        border: BorderRect::ZERO,
1298                        border_radius: ResolvedBorderRadius::ZERO,
1299                        node_type: NodeType::Rect,
1300                    },
1301                    main_entity: entity.into(),
1302                });
1303            }
1304        }
1305    }
1306}
1307
1308#[repr(C)]
1309#[derive(Copy, Clone, Pod, Zeroable)]
1310struct UiVertex {
1311    pub position: [f32; 3],
1312    pub uv: [f32; 2],
1313    pub color: [f32; 4],
1314    /// Shader flags to determine how to render the UI node.
1315    /// See [`shader_flags`] for possible values.
1316    pub flags: u32,
1317    /// Border radius of the UI node.
1318    /// Ordering: top left, top right, bottom right, bottom left.
1319    pub radius: [f32; 4],
1320    /// Border thickness of the UI node.
1321    /// Ordering: left, top, right, bottom.
1322    pub border: [f32; 4],
1323    /// Size of the UI node.
1324    pub size: [f32; 2],
1325    /// Position relative to the center of the UI node.
1326    pub point: [f32; 2],
1327}
1328
1329#[derive(Resource)]
1330pub struct UiMeta {
1331    vertices: RawBufferVec<UiVertex>,
1332    indices: RawBufferVec<u32>,
1333    view_bind_group: Option<BindGroup>,
1334}
1335
1336impl Default for UiMeta {
1337    fn default() -> Self {
1338        Self {
1339            vertices: RawBufferVec::new(BufferUsages::VERTEX),
1340            indices: RawBufferVec::new(BufferUsages::INDEX),
1341            view_bind_group: None,
1342        }
1343    }
1344}
1345
1346pub(crate) const QUAD_VERTEX_POSITIONS: [Vec2; 4] = [
1347    Vec2::new(-0.5, -0.5),
1348    Vec2::new(0.5, -0.5),
1349    Vec2::new(0.5, 0.5),
1350    Vec2::new(-0.5, 0.5),
1351];
1352
1353pub(crate) const QUAD_INDICES: [usize; 6] = [0, 2, 3, 0, 1, 2];
1354
1355#[derive(Component)]
1356pub struct UiBatch {
1357    pub range: Range<u32>,
1358    pub image: AssetId<Image>,
1359}
1360
1361/// The values here should match the values for the constants in `ui.wgsl`
1362pub mod shader_flags {
1363    /// Texture should be ignored
1364    pub const UNTEXTURED: u32 = 0;
1365    /// Textured
1366    pub const TEXTURED: u32 = 1;
1367    /// Ordering: top left, top right, bottom right, bottom left.
1368    pub const CORNERS: [u32; 4] = [0, 2, 2 | 4, 4];
1369    pub const RADIAL: u32 = 16;
1370    pub const FILL_START: u32 = 32;
1371    pub const FILL_END: u32 = 64;
1372    pub const CONIC: u32 = 128;
1373    pub const BORDER_LEFT: u32 = 256;
1374    pub const BORDER_TOP: u32 = 512;
1375    pub const BORDER_RIGHT: u32 = 1024;
1376    pub const BORDER_BOTTOM: u32 = 2048;
1377    pub const BORDER_ALL: u32 = BORDER_LEFT + BORDER_TOP + BORDER_RIGHT + BORDER_BOTTOM;
1378}
1379
1380pub fn queue_uinodes(
1381    extracted_uinodes: Res<ExtractedUiNodes>,
1382    ui_pipeline: Res<UiPipeline>,
1383    mut pipelines: ResMut<SpecializedRenderPipelines<UiPipeline>>,
1384    mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
1385    render_views: Query<(&UiCameraView, Option<&UiAntiAlias>), With<ExtractedView>>,
1386    camera_views: Query<&ExtractedView>,
1387    pipeline_cache: Res<PipelineCache>,
1388    draw_functions: Res<DrawFunctions<TransparentUi>>,
1389) {
1390    let draw_function = draw_functions.read().id::<DrawUi>();
1391    let mut current_camera_entity = Entity::PLACEHOLDER;
1392    let mut current_phase = None;
1393
1394    for (index, extracted_uinode) in extracted_uinodes.uinodes.iter().enumerate() {
1395        if current_camera_entity != extracted_uinode.extracted_camera_entity {
1396            current_phase = render_views
1397                .get(extracted_uinode.extracted_camera_entity)
1398                .ok()
1399                .and_then(|(default_camera_view, ui_anti_alias)| {
1400                    camera_views
1401                        .get(default_camera_view.0)
1402                        .ok()
1403                        .and_then(|view| {
1404                            transparent_render_phases
1405                                .get_mut(&view.retained_view_entity)
1406                                .map(|transparent_phase| (view, ui_anti_alias, transparent_phase))
1407                        })
1408                });
1409            current_camera_entity = extracted_uinode.extracted_camera_entity;
1410        }
1411
1412        let Some((view, ui_anti_alias, transparent_phase)) = current_phase.as_mut() else {
1413            continue;
1414        };
1415
1416        let pipeline = pipelines.specialize(
1417            &pipeline_cache,
1418            &ui_pipeline,
1419            UiPipelineKey {
1420                hdr: view.hdr,
1421                anti_alias: matches!(ui_anti_alias, None | Some(UiAntiAlias::On)),
1422            },
1423        );
1424
1425        transparent_phase.add(TransparentUi {
1426            draw_function,
1427            pipeline,
1428            entity: (extracted_uinode.render_entity, extracted_uinode.main_entity),
1429            sort_key: FloatOrd(extracted_uinode.z_order),
1430            index,
1431            // batch_range will be calculated in prepare_uinodes
1432            batch_range: 0..0,
1433            extra_index: PhaseItemExtraIndex::None,
1434            indexed: true,
1435        });
1436    }
1437}
1438
1439#[derive(Resource, Default)]
1440pub struct ImageNodeBindGroups {
1441    pub values: HashMap<AssetId<Image>, BindGroup>,
1442}
1443
1444pub fn prepare_uinodes(
1445    mut commands: Commands,
1446    render_device: Res<RenderDevice>,
1447    render_queue: Res<RenderQueue>,
1448    pipeline_cache: Res<PipelineCache>,
1449    mut ui_meta: ResMut<UiMeta>,
1450    mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1451    view_uniforms: Res<ViewUniforms>,
1452    ui_pipeline: Res<UiPipeline>,
1453    mut image_bind_groups: ResMut<ImageNodeBindGroups>,
1454    gpu_images: Res<RenderAssets<GpuImage>>,
1455    mut phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
1456    events: Res<SpriteAssetEvents>,
1457    mut previous_len: Local<usize>,
1458) {
1459    // If an image has changed, the GpuImage has (probably) changed
1460    for event in &events.images {
1461        match event {
1462            AssetEvent::Added { .. } |
1463            AssetEvent::Unused { .. } |
1464            // Images don't have dependencies
1465            AssetEvent::LoadedWithDependencies { .. } => {}
1466            AssetEvent::Modified { id } | AssetEvent::Removed { id } => {
1467                image_bind_groups.values.remove(id);
1468            }
1469        };
1470    }
1471
1472    if let Some(view_binding) = view_uniforms.uniforms.binding() {
1473        let mut batches: Vec<(Entity, UiBatch)> = Vec::with_capacity(*previous_len);
1474
1475        ui_meta.vertices.clear();
1476        ui_meta.indices.clear();
1477        ui_meta.view_bind_group = Some(render_device.create_bind_group(
1478            "ui_view_bind_group",
1479            &pipeline_cache.get_bind_group_layout(&ui_pipeline.view_layout),
1480            &BindGroupEntries::single(view_binding),
1481        ));
1482
1483        // Buffer indexes
1484        let mut vertices_index = 0;
1485        let mut indices_index = 0;
1486
1487        for ui_phase in phases.values_mut() {
1488            let mut batch_item_index = 0;
1489            let mut batch_image_handle = AssetId::invalid();
1490
1491            for item_index in 0..ui_phase.items.len() {
1492                let item = &mut ui_phase.items[item_index];
1493                let Some(extracted_uinode) = extracted_uinodes
1494                    .uinodes
1495                    .get(item.index)
1496                    .filter(|n| item.entity() == n.render_entity)
1497                else {
1498                    batch_image_handle = AssetId::invalid();
1499                    continue;
1500                };
1501
1502                let mut existing_batch = batches.last_mut();
1503
1504                if batch_image_handle == AssetId::invalid()
1505                    || existing_batch.is_none()
1506                    || (batch_image_handle != AssetId::default()
1507                        && extracted_uinode.image != AssetId::default()
1508                        && batch_image_handle != extracted_uinode.image)
1509                {
1510                    if let Some(gpu_image) = gpu_images.get(extracted_uinode.image) {
1511                        batch_item_index = item_index;
1512                        batch_image_handle = extracted_uinode.image;
1513
1514                        let new_batch = UiBatch {
1515                            range: vertices_index..vertices_index,
1516                            image: extracted_uinode.image,
1517                        };
1518
1519                        batches.push((item.entity(), new_batch));
1520
1521                        image_bind_groups
1522                            .values
1523                            .entry(batch_image_handle)
1524                            .or_insert_with(|| {
1525                                render_device.create_bind_group(
1526                                    "ui_material_bind_group",
1527                                    &pipeline_cache
1528                                        .get_bind_group_layout(&ui_pipeline.image_layout),
1529                                    &BindGroupEntries::sequential((
1530                                        &gpu_image.texture_view,
1531                                        &gpu_image.sampler,
1532                                    )),
1533                                )
1534                            });
1535
1536                        existing_batch = batches.last_mut();
1537                    } else {
1538                        continue;
1539                    }
1540                } else if batch_image_handle == AssetId::default()
1541                    && extracted_uinode.image != AssetId::default()
1542                {
1543                    if let Some(ref mut existing_batch) = existing_batch
1544                        && let Some(gpu_image) = gpu_images.get(extracted_uinode.image)
1545                    {
1546                        batch_image_handle = extracted_uinode.image;
1547                        existing_batch.1.image = extracted_uinode.image;
1548
1549                        image_bind_groups
1550                            .values
1551                            .entry(batch_image_handle)
1552                            .or_insert_with(|| {
1553                                render_device.create_bind_group(
1554                                    "ui_material_bind_group",
1555                                    &pipeline_cache
1556                                        .get_bind_group_layout(&ui_pipeline.image_layout),
1557                                    &BindGroupEntries::sequential((
1558                                        &gpu_image.texture_view,
1559                                        &gpu_image.sampler,
1560                                    )),
1561                                )
1562                            });
1563                    } else {
1564                        continue;
1565                    }
1566                }
1567                match &extracted_uinode.item {
1568                    ExtractedUiItem::Node {
1569                        atlas_scaling,
1570                        flip_x,
1571                        flip_y,
1572                        border_radius,
1573                        border,
1574                        node_type,
1575                        rect,
1576                        color,
1577                    } => {
1578                        let mut flags = if extracted_uinode.image != AssetId::default() {
1579                            shader_flags::TEXTURED
1580                        } else {
1581                            shader_flags::UNTEXTURED
1582                        };
1583
1584                        let mut uinode_rect = *rect;
1585
1586                        let rect_size = uinode_rect.size();
1587
1588                        let transform = extracted_uinode.transform;
1589
1590                        // Specify the corners of the node
1591                        let positions = QUAD_VERTEX_POSITIONS
1592                            .map(|pos| transform.transform_point2(pos * rect_size).extend(0.));
1593                        let points = QUAD_VERTEX_POSITIONS.map(|pos| pos * rect_size);
1594
1595                        // Calculate the effect of clipping
1596                        // Note: this won't work with rotation/scaling, but that's much more complex (may need more that 2 quads)
1597                        let mut positions_diff = if let Some(clip) = extracted_uinode.clip {
1598                            [
1599                                Vec2::new(
1600                                    f32::max(clip.min.x - positions[0].x, 0.),
1601                                    f32::max(clip.min.y - positions[0].y, 0.),
1602                                ),
1603                                Vec2::new(
1604                                    f32::min(clip.max.x - positions[1].x, 0.),
1605                                    f32::max(clip.min.y - positions[1].y, 0.),
1606                                ),
1607                                Vec2::new(
1608                                    f32::min(clip.max.x - positions[2].x, 0.),
1609                                    f32::min(clip.max.y - positions[2].y, 0.),
1610                                ),
1611                                Vec2::new(
1612                                    f32::max(clip.min.x - positions[3].x, 0.),
1613                                    f32::min(clip.max.y - positions[3].y, 0.),
1614                                ),
1615                            ]
1616                        } else {
1617                            [Vec2::ZERO; 4]
1618                        };
1619
1620                        let positions_clipped = [
1621                            positions[0] + positions_diff[0].extend(0.),
1622                            positions[1] + positions_diff[1].extend(0.),
1623                            positions[2] + positions_diff[2].extend(0.),
1624                            positions[3] + positions_diff[3].extend(0.),
1625                        ];
1626
1627                        let points = [
1628                            points[0] + positions_diff[0],
1629                            points[1] + positions_diff[1],
1630                            points[2] + positions_diff[2],
1631                            points[3] + positions_diff[3],
1632                        ];
1633
1634                        let transformed_rect_size = transform.transform_vector2(rect_size);
1635
1636                        // Don't try to cull nodes that have a rotation
1637                        // In a rotation around the Z-axis, this value is 0.0 for an angle of 0.0 or π
1638                        // In those two cases, the culling check can proceed normally as corners will be on
1639                        // horizontal / vertical lines
1640                        // For all other angles, bypass the culling check
1641                        // This does not properly handles all rotations on all axis
1642                        if transform.x_axis[1] == 0.0 {
1643                            // Cull nodes that are completely clipped
1644                            if positions_diff[0].x - positions_diff[1].x >= transformed_rect_size.x
1645                                || positions_diff[1].y - positions_diff[2].y
1646                                    >= transformed_rect_size.y
1647                            {
1648                                continue;
1649                            }
1650                        }
1651                        let uvs = if flags == shader_flags::UNTEXTURED {
1652                            [Vec2::ZERO, Vec2::X, Vec2::ONE, Vec2::Y]
1653                        } else {
1654                            let image = gpu_images
1655                                .get(extracted_uinode.image)
1656                                .expect("Image was checked during batching and should still exist");
1657                            // Rescale atlases. This is done here because we need texture data that might not be available in Extract.
1658                            let atlas_extent = atlas_scaling
1659                                .map(|scaling| image.size_2d().as_vec2() * scaling)
1660                                .unwrap_or(uinode_rect.max);
1661                            if *flip_x {
1662                                core::mem::swap(&mut uinode_rect.max.x, &mut uinode_rect.min.x);
1663                                positions_diff[0].x *= -1.;
1664                                positions_diff[1].x *= -1.;
1665                                positions_diff[2].x *= -1.;
1666                                positions_diff[3].x *= -1.;
1667                            }
1668                            if *flip_y {
1669                                core::mem::swap(&mut uinode_rect.max.y, &mut uinode_rect.min.y);
1670                                positions_diff[0].y *= -1.;
1671                                positions_diff[1].y *= -1.;
1672                                positions_diff[2].y *= -1.;
1673                                positions_diff[3].y *= -1.;
1674                            }
1675                            [
1676                                Vec2::new(
1677                                    uinode_rect.min.x + positions_diff[0].x,
1678                                    uinode_rect.min.y + positions_diff[0].y,
1679                                ),
1680                                Vec2::new(
1681                                    uinode_rect.max.x + positions_diff[1].x,
1682                                    uinode_rect.min.y + positions_diff[1].y,
1683                                ),
1684                                Vec2::new(
1685                                    uinode_rect.max.x + positions_diff[2].x,
1686                                    uinode_rect.max.y + positions_diff[2].y,
1687                                ),
1688                                Vec2::new(
1689                                    uinode_rect.min.x + positions_diff[3].x,
1690                                    uinode_rect.max.y + positions_diff[3].y,
1691                                ),
1692                            ]
1693                            .map(|pos| pos / atlas_extent)
1694                        };
1695
1696                        let color = color.to_f32_array();
1697                        if let NodeType::Border(border_flags) = *node_type {
1698                            flags |= border_flags;
1699                        }
1700
1701                        for i in 0..4 {
1702                            ui_meta.vertices.push(UiVertex {
1703                                position: positions_clipped[i].into(),
1704                                uv: uvs[i].into(),
1705                                color,
1706                                flags: flags | shader_flags::CORNERS[i],
1707                                radius: (*border_radius).into(),
1708                                border: [
1709                                    border.min_inset.x,
1710                                    border.min_inset.y,
1711                                    border.max_inset.x,
1712                                    border.max_inset.y,
1713                                ],
1714                                size: rect_size.into(),
1715                                point: points[i].into(),
1716                            });
1717                        }
1718
1719                        for &i in &QUAD_INDICES {
1720                            ui_meta.indices.push(indices_index + i as u32);
1721                        }
1722
1723                        vertices_index += 6;
1724                        indices_index += 4;
1725                    }
1726                    ExtractedUiItem::Glyphs { range } => {
1727                        let image = gpu_images
1728                            .get(extracted_uinode.image)
1729                            .expect("Image was checked during batching and should still exist");
1730
1731                        let atlas_extent = image.size_2d().as_vec2();
1732
1733                        for glyph in &extracted_uinodes.glyphs[range.clone()] {
1734                            let color = glyph.color.to_f32_array();
1735                            let glyph_rect = glyph.rect;
1736                            let rect_size = glyph_rect.size();
1737
1738                            // Specify the corners of the glyph
1739                            let positions = QUAD_VERTEX_POSITIONS.map(|pos| {
1740                                extracted_uinode
1741                                    .transform
1742                                    .transform_point2(glyph.translation + pos * glyph_rect.size())
1743                                    .extend(0.)
1744                            });
1745
1746                            let positions_diff = if let Some(clip) = extracted_uinode.clip {
1747                                [
1748                                    Vec2::new(
1749                                        f32::max(clip.min.x - positions[0].x, 0.),
1750                                        f32::max(clip.min.y - positions[0].y, 0.),
1751                                    ),
1752                                    Vec2::new(
1753                                        f32::min(clip.max.x - positions[1].x, 0.),
1754                                        f32::max(clip.min.y - positions[1].y, 0.),
1755                                    ),
1756                                    Vec2::new(
1757                                        f32::min(clip.max.x - positions[2].x, 0.),
1758                                        f32::min(clip.max.y - positions[2].y, 0.),
1759                                    ),
1760                                    Vec2::new(
1761                                        f32::max(clip.min.x - positions[3].x, 0.),
1762                                        f32::min(clip.max.y - positions[3].y, 0.),
1763                                    ),
1764                                ]
1765                            } else {
1766                                [Vec2::ZERO; 4]
1767                            };
1768
1769                            let positions_clipped = [
1770                                positions[0] + positions_diff[0].extend(0.),
1771                                positions[1] + positions_diff[1].extend(0.),
1772                                positions[2] + positions_diff[2].extend(0.),
1773                                positions[3] + positions_diff[3].extend(0.),
1774                            ];
1775
1776                            // cull nodes that are completely clipped
1777                            let transformed_rect_size =
1778                                extracted_uinode.transform.transform_vector2(rect_size);
1779                            if positions_diff[0].x - positions_diff[1].x
1780                                >= transformed_rect_size.x.abs()
1781                                || positions_diff[1].y - positions_diff[2].y
1782                                    >= transformed_rect_size.y.abs()
1783                            {
1784                                continue;
1785                            }
1786
1787                            let uvs = [
1788                                Vec2::new(
1789                                    glyph.rect.min.x + positions_diff[0].x,
1790                                    glyph.rect.min.y + positions_diff[0].y,
1791                                ),
1792                                Vec2::new(
1793                                    glyph.rect.max.x + positions_diff[1].x,
1794                                    glyph.rect.min.y + positions_diff[1].y,
1795                                ),
1796                                Vec2::new(
1797                                    glyph.rect.max.x + positions_diff[2].x,
1798                                    glyph.rect.max.y + positions_diff[2].y,
1799                                ),
1800                                Vec2::new(
1801                                    glyph.rect.min.x + positions_diff[3].x,
1802                                    glyph.rect.max.y + positions_diff[3].y,
1803                                ),
1804                            ]
1805                            .map(|pos| pos / atlas_extent);
1806
1807                            for i in 0..4 {
1808                                ui_meta.vertices.push(UiVertex {
1809                                    position: positions_clipped[i].into(),
1810                                    uv: uvs[i].into(),
1811                                    color,
1812                                    flags: shader_flags::TEXTURED | shader_flags::CORNERS[i],
1813                                    radius: [0.0; 4],
1814                                    border: [0.0; 4],
1815                                    size: rect_size.into(),
1816                                    point: [0.0; 2],
1817                                });
1818                            }
1819
1820                            for &i in &QUAD_INDICES {
1821                                ui_meta.indices.push(indices_index + i as u32);
1822                            }
1823
1824                            vertices_index += 6;
1825                            indices_index += 4;
1826                        }
1827                    }
1828                }
1829                existing_batch.unwrap().1.range.end = vertices_index;
1830                ui_phase.items[batch_item_index].batch_range_mut().end += 1;
1831            }
1832        }
1833
1834        ui_meta.vertices.write_buffer(&render_device, &render_queue);
1835        ui_meta.indices.write_buffer(&render_device, &render_queue);
1836        *previous_len = batches.len();
1837        commands.try_insert_batch(batches);
1838    }
1839    extracted_uinodes.clear();
1840}