render_depth_to_texture/
render_depth_to_texture.rs

1//! Demonstrates how to use depth-only cameras.
2//!
3//! A *depth-only camera* is a camera that renders only to a depth buffer, not
4//! to a color buffer. That depth buffer can then be used in shaders for various
5//! special effects.
6//!
7//! To create a depth-only camera, we create a [`Camera3d`] and set its
8//! [`RenderTarget`] to [`RenderTarget::None`] to disable creation of a color
9//! buffer. Then we add a new node to the render graph that copies the
10//! [`bevy::render::view::ViewDepthTexture`] that Bevy creates for that camera
11//! to a texture. This texture can then be attached to a material and sampled in
12//! the shader.
13//!
14//! This demo consists of a rotating cube with a depth-only camera pointed at
15//! it. The depth texture from the depth-only camera appears on a plane. You can
16//! use the WASD keys to make the depth-only camera orbit around the cube.
17
18use std::f32::consts::{FRAC_PI_2, PI};
19
20use bevy::{
21    asset::RenderAssetUsages,
22    camera::RenderTarget,
23    color::palettes::css::LIME,
24    core_pipeline::{
25        core_3d::graph::{Core3d, Node3d},
26        prepass::DepthPrepass,
27    },
28    ecs::{query::QueryItem, system::lifetimeless::Read},
29    image::{ImageCompareFunction, ImageSampler, ImageSamplerDescriptor},
30    math::ops::{acos, atan2, sin_cos},
31    prelude::*,
32    render::{
33        camera::ExtractedCamera,
34        extract_resource::{ExtractResource, ExtractResourcePlugin},
35        render_asset::RenderAssets,
36        render_graph::{
37            NodeRunError, RenderGraphContext, RenderGraphExt as _, RenderLabel, ViewNode,
38            ViewNodeRunner,
39        },
40        render_resource::{
41            AsBindGroup, CommandEncoderDescriptor, Extent3d, Origin3d, TexelCopyTextureInfo,
42            TextureAspect, TextureDimension, TextureFormat,
43        },
44        renderer::RenderContext,
45        texture::GpuImage,
46        view::ViewDepthTexture,
47        RenderApp,
48    },
49    shader::ShaderRef,
50};
51
52/// A marker component for a rotating cube.
53#[derive(Component)]
54struct RotatingCube;
55
56/// The material that displays the contents of the depth buffer.
57///
58/// This material is placed on the plane.
59#[derive(Clone, Debug, Asset, TypePath, AsBindGroup)]
60struct ShowDepthTextureMaterial {
61    /// A copy of the depth texture that the depth-only camera produced.
62    #[texture(0, sample_type = "depth")]
63    #[sampler(1, sampler_type = "comparison")]
64    depth_texture: Option<Handle<Image>>,
65}
66
67/// A label for the render node that copies the depth buffer from that of the
68/// camera to the [`DemoDepthTexture`].
69#[derive(Clone, PartialEq, Eq, Hash, Debug, RenderLabel)]
70struct CopyDepthTexturePass;
71
72/// The render node that copies the depth buffer from that of the camera to the
73/// [`DemoDepthTexture`].
74#[derive(Default)]
75struct CopyDepthTextureNode;
76
77/// Holds a copy of the depth buffer that the depth-only camera produces.
78///
79/// We need to make a copy for two reasons:
80///
81/// 1. The Bevy renderer automatically creates and maintains depth buffers on
82///    its own. There's no mechanism to fetch the depth buffer for a camera outside
83///    the render app. Thus it can't easily be attached to a material.
84///
85/// 2. `wgpu` doesn't allow applications to simultaneously render to and sample
86///    from a standard depth texture, so a copy must be made regardless.
87#[derive(Clone, Resource)]
88struct DemoDepthTexture(Handle<Image>);
89
90/// [Spherical coordinates], used to implement the camera orbiting
91/// functionality.
92///
93/// Note that these are in the mathematics convention, not the physics
94/// convention. In a real application, one would probably use the physics
95/// convention, but for familiarity's sake we stick to the most common
96/// convention here.
97///
98/// [Spherical coordinates]: https://en.wikipedia.org/wiki/Spherical_coordinate_system
99#[derive(Clone, Copy, Debug)]
100struct SphericalCoordinates {
101    /// The radius, in world units.
102    radius: f32,
103    /// The elevation angle (latitude).
104    inclination: f32,
105    /// The azimuth angle (longitude).
106    azimuth: f32,
107}
108
109/// The path to the shader that renders the depth texture.
110static SHADER_ASSET_PATH: &str = "shaders/show_depth_texture_material.wgsl";
111
112/// The size in texels of a depth texture.
113const DEPTH_TEXTURE_SIZE: u32 = 256;
114
115/// The rate at which the user can move the camera, in radians per second.
116const CAMERA_MOVEMENT_SPEED: f32 = 2.0;
117
118/// The entry point.
119fn main() {
120    let mut app = App::new();
121
122    app.add_plugins(DefaultPlugins)
123        .add_plugins(MaterialPlugin::<ShowDepthTextureMaterial>::default())
124        .add_plugins(ExtractResourcePlugin::<DemoDepthTexture>::default())
125        .init_resource::<DemoDepthTexture>()
126        .add_systems(Startup, setup)
127        .add_systems(Update, rotate_cube)
128        .add_systems(Update, draw_camera_gizmo)
129        .add_systems(Update, move_camera);
130
131    // Add the `CopyDepthTextureNode` to the render app.
132    let render_app = app
133        .get_sub_app_mut(RenderApp)
134        .expect("Render app should be present");
135    render_app.add_render_graph_node::<ViewNodeRunner<CopyDepthTextureNode>>(
136        Core3d,
137        CopyDepthTexturePass,
138    );
139    // We have the texture copy operation run in between the prepasses and
140    // the opaque pass. Since the depth rendering is part of the prepass, this
141    // is a reasonable time to perform the operation.
142    render_app.add_render_graph_edges(
143        Core3d,
144        (
145            Node3d::EndPrepasses,
146            CopyDepthTexturePass,
147            Node3d::MainOpaquePass,
148        ),
149    );
150
151    app.run();
152}
153
154/// Creates the scene.
155fn setup(
156    mut commands: Commands,
157    mut meshes: ResMut<Assets<Mesh>>,
158    mut standard_materials: ResMut<Assets<StandardMaterial>>,
159    mut show_depth_texture_materials: ResMut<Assets<ShowDepthTextureMaterial>>,
160    demo_depth_texture: Res<DemoDepthTexture>,
161) {
162    spawn_rotating_cube(&mut commands, &mut meshes, &mut standard_materials);
163    spawn_plane(
164        &mut commands,
165        &mut meshes,
166        &mut show_depth_texture_materials,
167        &demo_depth_texture,
168    );
169    spawn_light(&mut commands);
170    spawn_depth_only_camera(&mut commands);
171    spawn_main_camera(&mut commands);
172    spawn_instructions(&mut commands);
173}
174
175/// Spawns the main rotating cube.
176fn spawn_rotating_cube(
177    commands: &mut Commands,
178    meshes: &mut Assets<Mesh>,
179    standard_materials: &mut Assets<StandardMaterial>,
180) {
181    let cube_handle = meshes.add(Cuboid::new(3.0, 3.0, 3.0));
182    let rotating_cube_material_handle = standard_materials.add(StandardMaterial {
183        base_color: Color::WHITE,
184        unlit: false,
185        ..default()
186    });
187    commands.spawn((
188        Mesh3d(cube_handle.clone()),
189        MeshMaterial3d(rotating_cube_material_handle),
190        Transform::IDENTITY,
191        RotatingCube,
192    ));
193}
194
195// Spawns the plane that shows the depth texture.
196fn spawn_plane(
197    commands: &mut Commands,
198    meshes: &mut Assets<Mesh>,
199    show_depth_texture_materials: &mut Assets<ShowDepthTextureMaterial>,
200    demo_depth_texture: &DemoDepthTexture,
201) {
202    let plane_handle = meshes.add(Plane3d::new(Vec3::Z, Vec2::splat(2.0)));
203    let show_depth_texture_material = show_depth_texture_materials.add(ShowDepthTextureMaterial {
204        depth_texture: Some(demo_depth_texture.0.clone()),
205    });
206    commands.spawn((
207        Mesh3d(plane_handle),
208        MeshMaterial3d(show_depth_texture_material),
209        Transform::from_xyz(10.0, 4.0, 0.0).with_scale(Vec3::splat(2.5)),
210    ));
211}
212
213/// Spawns a light.
214fn spawn_light(commands: &mut Commands) {
215    commands.spawn((PointLight::default(), Transform::from_xyz(5.0, 6.0, 7.0)));
216}
217
218/// Spawns the depth-only camera.
219fn spawn_depth_only_camera(commands: &mut Commands) {
220    commands.spawn((
221        Camera3d::default(),
222        Transform::from_xyz(-4.0, -5.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
223        Camera {
224            // We specify no color render target, for maximum efficiency.
225            target: RenderTarget::None {
226                // When specifying no render target, we must manually specify
227                // the viewport size. Otherwise, Bevy won't know how big to make
228                // the depth buffer.
229                size: UVec2::splat(DEPTH_TEXTURE_SIZE),
230            },
231            // Make sure that we render from this depth-only camera *before*
232            // rendering from the main camera.
233            order: -1,
234            ..Camera::default()
235        },
236        // We need to disable multisampling or the depth texture will be
237        // multisampled, which adds complexity we don't care about for this
238        // demo.
239        Msaa::Off,
240        // Cameras with no render target render *nothing* by default. To get
241        // them to render something, we must add a prepass that specifies what
242        // we want to render: in this case, depth.
243        DepthPrepass,
244    ));
245}
246
247/// Spawns the main camera that renders to the window.
248fn spawn_main_camera(commands: &mut Commands) {
249    commands.spawn((
250        Camera3d::default(),
251        Transform::from_xyz(5.0, 2.0, 30.0).looking_at(vec3(5.0, 2.0, 0.0), Vec3::Y),
252        // Disable antialiasing just for simplicity's sake.
253        Msaa::Off,
254    ));
255}
256
257/// Spawns the instructional text at the top of the screen.
258fn spawn_instructions(commands: &mut Commands) {
259    commands.spawn((
260        Text::new("Use WASD to move the secondary camera"),
261        Node {
262            position_type: PositionType::Absolute,
263            top: Val::Px(12.0),
264            left: Val::Px(12.0),
265            ..Node::default()
266        },
267    ));
268}
269
270/// Spins the cube a bit every frame.
271fn rotate_cube(mut cubes: Query<&mut Transform, With<RotatingCube>>, time: Res<Time>) {
272    for mut transform in &mut cubes {
273        transform.rotate_x(1.5 * time.delta_secs());
274        transform.rotate_y(1.1 * time.delta_secs());
275        transform.rotate_z(-1.3 * time.delta_secs());
276    }
277}
278
279impl Material for ShowDepthTextureMaterial {
280    fn fragment_shader() -> ShaderRef {
281        SHADER_ASSET_PATH.into()
282    }
283}
284
285impl ViewNode for CopyDepthTextureNode {
286    type ViewQuery = (Read<ExtractedCamera>, Read<ViewDepthTexture>);
287
288    fn run<'w>(
289        &self,
290        _: &mut RenderGraphContext,
291        render_context: &mut RenderContext<'w>,
292        (camera, depth_texture): QueryItem<'w, '_, Self::ViewQuery>,
293        world: &'w World,
294    ) -> Result<(), NodeRunError> {
295        // Make sure we only run on the depth-only camera.
296        // We could make a marker component for that camera and extract it to
297        // the render world, but using `order` as a tag to tell the main camera
298        // and the depth-only camera apart works in a pinch.
299        if camera.order >= 0 {
300            return Ok(());
301        }
302
303        // Grab the texture we're going to copy to.
304        let demo_depth_texture = world.resource::<DemoDepthTexture>();
305        let image_assets = world.resource::<RenderAssets<GpuImage>>();
306        let Some(demo_depth_image) = image_assets.get(demo_depth_texture.0.id()) else {
307            return Ok(());
308        };
309
310        // Perform the copy.
311        render_context.add_command_buffer_generation_task(move |render_device| {
312            let mut command_encoder =
313                render_device.create_command_encoder(&CommandEncoderDescriptor {
314                    label: Some("copy depth to demo texture command encoder"),
315                });
316            command_encoder.push_debug_group("copy depth to demo texture");
317
318            // Copy from the view's depth texture to the destination depth
319            // texture.
320            command_encoder.copy_texture_to_texture(
321                TexelCopyTextureInfo {
322                    texture: &depth_texture.texture,
323                    mip_level: 0,
324                    origin: Origin3d::default(),
325                    aspect: TextureAspect::DepthOnly,
326                },
327                TexelCopyTextureInfo {
328                    texture: &demo_depth_image.texture,
329                    mip_level: 0,
330                    origin: Origin3d::default(),
331                    aspect: TextureAspect::DepthOnly,
332                },
333                Extent3d {
334                    width: DEPTH_TEXTURE_SIZE,
335                    height: DEPTH_TEXTURE_SIZE,
336                    depth_or_array_layers: 1,
337                },
338            );
339
340            command_encoder.pop_debug_group();
341            command_encoder.finish()
342        });
343
344        Ok(())
345    }
346}
347
348impl FromWorld for DemoDepthTexture {
349    fn from_world(world: &mut World) -> Self {
350        let mut images = world.resource_mut::<Assets<Image>>();
351
352        // Create a new 32-bit floating point depth texture.
353        let mut depth_image = Image::new_uninit(
354            Extent3d {
355                width: DEPTH_TEXTURE_SIZE,
356                height: DEPTH_TEXTURE_SIZE,
357                depth_or_array_layers: 1,
358            },
359            TextureDimension::D2,
360            TextureFormat::Depth32Float,
361            RenderAssetUsages::default(),
362        );
363
364        // Create a sampler. Note that this needs to specify a `compare`
365        // function in order to be compatible with depth textures.
366        depth_image.sampler = ImageSampler::Descriptor(ImageSamplerDescriptor {
367            label: Some("custom depth image sampler".to_owned()),
368            compare: Some(ImageCompareFunction::Always),
369            ..ImageSamplerDescriptor::default()
370        });
371
372        let depth_image_handle = images.add(depth_image);
373        DemoDepthTexture(depth_image_handle)
374    }
375}
376
377impl ExtractResource for DemoDepthTexture {
378    type Source = Self;
379
380    fn extract_resource(source: &Self::Source) -> Self {
381        // Share the `DemoDepthTexture` resource over to the render world so
382        // that our `CopyDepthTextureNode` can access it.
383        (*source).clone()
384    }
385}
386
387/// Draws an outline of the depth texture on the screen.
388fn draw_camera_gizmo(cameras: Query<(&Camera, &GlobalTransform)>, mut gizmos: Gizmos) {
389    for (camera, transform) in &cameras {
390        // As above, we use the order as a cheap tag to tell the depth texture
391        // apart from the main texture.
392        if camera.order >= 0 {
393            continue;
394        }
395
396        // Draw a cone representing the camera.
397        gizmos.primitive_3d(
398            &Cone {
399                radius: 1.0,
400                height: 3.0,
401            },
402            Isometry3d::new(
403                transform.translation(),
404                // We have to rotate here because `Cone` primitives are oriented
405                // along +Y and cameras point along +Z.
406                transform.rotation() * Quat::from_rotation_x(FRAC_PI_2),
407            ),
408            LIME,
409        );
410    }
411}
412
413/// Orbits the cube when WASD is pressed.
414fn move_camera(
415    mut cameras: Query<(&Camera, &mut Transform)>,
416    keyboard: Res<ButtonInput<KeyCode>>,
417    time: Res<Time>,
418) {
419    for (camera, mut transform) in &mut cameras {
420        // Only affect the depth camera.
421        if camera.order >= 0 {
422            continue;
423        }
424
425        // Convert the camera's position from Cartesian to spherical coordinates.
426        let mut spherical_coords = SphericalCoordinates::from_cartesian(transform.translation);
427
428        // Modify those spherical coordinates as appropriate.
429        let mut changed = false;
430        if keyboard.pressed(KeyCode::KeyW) {
431            spherical_coords.inclination -= time.delta_secs() * CAMERA_MOVEMENT_SPEED;
432            changed = true;
433        }
434        if keyboard.pressed(KeyCode::KeyS) {
435            spherical_coords.inclination += time.delta_secs() * CAMERA_MOVEMENT_SPEED;
436            changed = true;
437        }
438        if keyboard.pressed(KeyCode::KeyA) {
439            spherical_coords.azimuth += time.delta_secs() * CAMERA_MOVEMENT_SPEED;
440            changed = true;
441        }
442        if keyboard.pressed(KeyCode::KeyD) {
443            spherical_coords.azimuth -= time.delta_secs() * CAMERA_MOVEMENT_SPEED;
444            changed = true;
445        }
446
447        // If they were changed, convert from spherical coordinates back to
448        // Cartesian ones, and update the camera's transform.
449        if changed {
450            spherical_coords.inclination = spherical_coords.inclination.clamp(0.01, PI - 0.01);
451            transform.translation = spherical_coords.to_cartesian();
452            transform.look_at(Vec3::ZERO, Vec3::Y);
453        }
454    }
455}
456
457impl SphericalCoordinates {
458    /// [Converts] from Cartesian coordinates to spherical coordinates.
459    ///
460    /// [Converts]: https://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates
461    fn from_cartesian(p: Vec3) -> SphericalCoordinates {
462        let radius = p.length();
463        SphericalCoordinates {
464            radius,
465            inclination: acos(p.y / radius),
466            azimuth: atan2(p.z, p.x),
467        }
468    }
469
470    /// [Converts] from spherical coordinates to Cartesian coordinates.
471    ///
472    /// [Converts]: https://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates
473    fn to_cartesian(self) -> Vec3 {
474        let (sin_inclination, cos_inclination) = sin_cos(self.inclination);
475        let (sin_azimuth, cos_azimuth) = sin_cos(self.azimuth);
476        self.radius
477            * vec3(
478                sin_inclination * cos_azimuth,
479                cos_inclination,
480                sin_inclination * sin_azimuth,
481            )
482    }
483}