custom_post_processing/
custom_post_processing.rs

1//! This example shows how to create a custom render pass that runs after the main pass
2//! and reads the texture generated by the main pass.
3//!
4//! The example shader is a very simple implementation of chromatic aberration.
5//! To adapt this example for 2D, replace all instances of 3D structures (such as `Core3D`, etc.) with their corresponding 2D counterparts.
6//!
7//! This is a fairly low level example and assumes some familiarity with rendering concepts and wgpu.
8
9use bevy::{
10    core_pipeline::{
11        core_3d::graph::{Core3d, Node3d},
12        FullscreenShader,
13    },
14    ecs::query::QueryItem,
15    prelude::*,
16    render::{
17        extract_component::{
18            ComponentUniforms, DynamicUniformIndex, ExtractComponent, ExtractComponentPlugin,
19            UniformComponentPlugin,
20        },
21        render_graph::{
22            NodeRunError, RenderGraphContext, RenderGraphExt, RenderLabel, ViewNode, ViewNodeRunner,
23        },
24        render_resource::{
25            binding_types::{sampler, texture_2d, uniform_buffer},
26            *,
27        },
28        renderer::{RenderContext, RenderDevice},
29        view::ViewTarget,
30        RenderApp, RenderStartup,
31    },
32};
33
34/// This example uses a shader source file from the assets subdirectory
35const SHADER_ASSET_PATH: &str = "shaders/post_processing.wgsl";
36
37fn main() {
38    App::new()
39        .add_plugins((DefaultPlugins, PostProcessPlugin))
40        .add_systems(Startup, setup)
41        .add_systems(Update, (rotate, update_settings))
42        .run();
43}
44
45/// It is generally encouraged to set up post processing effects as a plugin
46struct PostProcessPlugin;
47
48impl Plugin for PostProcessPlugin {
49    fn build(&self, app: &mut App) {
50        app.add_plugins((
51            // The settings will be a component that lives in the main world but will
52            // be extracted to the render world every frame.
53            // This makes it possible to control the effect from the main world.
54            // This plugin will take care of extracting it automatically.
55            // It's important to derive [`ExtractComponent`] on [`PostProcessingSettings`]
56            // for this plugin to work correctly.
57            ExtractComponentPlugin::<PostProcessSettings>::default(),
58            // The settings will also be the data used in the shader.
59            // This plugin will prepare the component for the GPU by creating a uniform buffer
60            // and writing the data to that buffer every frame.
61            UniformComponentPlugin::<PostProcessSettings>::default(),
62        ));
63
64        // We need to get the render app from the main app
65        let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
66            return;
67        };
68
69        // RenderStartup runs once on startup after all plugins are built
70        // It is useful to initialize data that will only live in the RenderApp
71        render_app.add_systems(RenderStartup, init_post_process_pipeline);
72
73        render_app
74            // Bevy's renderer uses a render graph which is a collection of nodes in a directed acyclic graph.
75            // It currently runs on each view/camera and executes each node in the specified order.
76            // It will make sure that any node that needs a dependency from another node
77            // only runs when that dependency is done.
78            //
79            // Each node can execute arbitrary work, but it generally runs at least one render pass.
80            // A node only has access to the render world, so if you need data from the main world
81            // you need to extract it manually or with the plugin like above.
82            // Add a [`Node`] to the [`RenderGraph`]
83            // The Node needs to impl FromWorld
84            //
85            // The [`ViewNodeRunner`] is a special [`Node`] that will automatically run the node for each view
86            // matching the [`ViewQuery`]
87            .add_render_graph_node::<ViewNodeRunner<PostProcessNode>>(
88                // Specify the label of the graph, in this case we want the graph for 3d
89                Core3d,
90                // It also needs the label of the node
91                PostProcessLabel,
92            )
93            .add_render_graph_edges(
94                Core3d,
95                // Specify the node ordering.
96                // This will automatically create all required node edges to enforce the given ordering.
97                (
98                    Node3d::Tonemapping,
99                    PostProcessLabel,
100                    Node3d::EndMainPassPostProcessing,
101                ),
102            );
103    }
104}
105
106#[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)]
107struct PostProcessLabel;
108
109// The post process node used for the render graph
110#[derive(Default)]
111struct PostProcessNode;
112
113// The ViewNode trait is required by the ViewNodeRunner
114impl ViewNode for PostProcessNode {
115    // The node needs a query to gather data from the ECS in order to do its rendering,
116    // but it's not a normal system so we need to define it manually.
117    //
118    // This query will only run on the view entity
119    type ViewQuery = (
120        &'static ViewTarget,
121        // This makes sure the node only runs on cameras with the PostProcessSettings component
122        &'static PostProcessSettings,
123        // As there could be multiple post processing components sent to the GPU (one per camera),
124        // we need to get the index of the one that is associated with the current view.
125        &'static DynamicUniformIndex<PostProcessSettings>,
126    );
127
128    // Runs the node logic
129    // This is where you encode draw commands.
130    //
131    // This will run on every view on which the graph is running.
132    // If you don't want your effect to run on every camera,
133    // you'll need to make sure you have a marker component as part of [`ViewQuery`]
134    // to identify which camera(s) should run the effect.
135    fn run(
136        &self,
137        _graph: &mut RenderGraphContext,
138        render_context: &mut RenderContext,
139        (view_target, _post_process_settings, settings_index): QueryItem<Self::ViewQuery>,
140        world: &World,
141    ) -> Result<(), NodeRunError> {
142        // Get the pipeline resource that contains the global data we need
143        // to create the render pipeline
144        let post_process_pipeline = world.resource::<PostProcessPipeline>();
145
146        // The pipeline cache is a cache of all previously created pipelines.
147        // It is required to avoid creating a new pipeline each frame,
148        // which is expensive due to shader compilation.
149        let pipeline_cache = world.resource::<PipelineCache>();
150
151        // Get the pipeline from the cache
152        let Some(pipeline) = pipeline_cache.get_render_pipeline(post_process_pipeline.pipeline_id)
153        else {
154            return Ok(());
155        };
156
157        // Get the settings uniform binding
158        let settings_uniforms = world.resource::<ComponentUniforms<PostProcessSettings>>();
159        let Some(settings_binding) = settings_uniforms.uniforms().binding() else {
160            return Ok(());
161        };
162
163        // This will start a new "post process write", obtaining two texture
164        // views from the view target - a `source` and a `destination`.
165        // `source` is the "current" main texture and you _must_ write into
166        // `destination` because calling `post_process_write()` on the
167        // [`ViewTarget`] will internally flip the [`ViewTarget`]'s main
168        // texture to the `destination` texture. Failing to do so will cause
169        // the current main texture information to be lost.
170        let post_process = view_target.post_process_write();
171
172        // The bind_group gets created each frame.
173        //
174        // Normally, you would create a bind_group in the Queue set,
175        // but this doesn't work with the post_process_write().
176        // The reason it doesn't work is because each post_process_write will alternate the source/destination.
177        // The only way to have the correct source/destination for the bind_group
178        // is to make sure you get it during the node execution.
179        let bind_group = render_context.render_device().create_bind_group(
180            "post_process_bind_group",
181            &post_process_pipeline.layout,
182            // It's important for this to match the BindGroupLayout defined in the PostProcessPipeline
183            &BindGroupEntries::sequential((
184                // Make sure to use the source view
185                post_process.source,
186                // Use the sampler created for the pipeline
187                &post_process_pipeline.sampler,
188                // Set the settings binding
189                settings_binding.clone(),
190            )),
191        );
192
193        // Begin the render pass
194        let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
195            label: Some("post_process_pass"),
196            color_attachments: &[Some(RenderPassColorAttachment {
197                // We need to specify the post process destination view here
198                // to make sure we write to the appropriate texture.
199                view: post_process.destination,
200                depth_slice: None,
201                resolve_target: None,
202                ops: Operations::default(),
203            })],
204            depth_stencil_attachment: None,
205            timestamp_writes: None,
206            occlusion_query_set: None,
207        });
208
209        // This is mostly just wgpu boilerplate for drawing a fullscreen triangle,
210        // using the pipeline/bind_group created above
211        render_pass.set_render_pipeline(pipeline);
212        // By passing in the index of the post process settings on this view, we ensure
213        // that in the event that multiple settings were sent to the GPU (as would be the
214        // case with multiple cameras), we use the correct one.
215        render_pass.set_bind_group(0, &bind_group, &[settings_index.index()]);
216        render_pass.draw(0..3, 0..1);
217
218        Ok(())
219    }
220}
221
222// This contains global data used by the render pipeline. This will be created once on startup.
223#[derive(Resource)]
224struct PostProcessPipeline {
225    layout: BindGroupLayout,
226    sampler: Sampler,
227    pipeline_id: CachedRenderPipelineId,
228}
229
230fn init_post_process_pipeline(
231    mut commands: Commands,
232    render_device: Res<RenderDevice>,
233    asset_server: Res<AssetServer>,
234    fullscreen_shader: Res<FullscreenShader>,
235    pipeline_cache: Res<PipelineCache>,
236) {
237    // We need to define the bind group layout used for our pipeline
238    let layout = render_device.create_bind_group_layout(
239        "post_process_bind_group_layout",
240        &BindGroupLayoutEntries::sequential(
241            // The layout entries will only be visible in the fragment stage
242            ShaderStages::FRAGMENT,
243            (
244                // The screen texture
245                texture_2d(TextureSampleType::Float { filterable: true }),
246                // The sampler that will be used to sample the screen texture
247                sampler(SamplerBindingType::Filtering),
248                // The settings uniform that will control the effect
249                uniform_buffer::<PostProcessSettings>(true),
250            ),
251        ),
252    );
253    // We can create the sampler here since it won't change at runtime and doesn't depend on the view
254    let sampler = render_device.create_sampler(&SamplerDescriptor::default());
255
256    // Get the shader handle
257    let shader = asset_server.load(SHADER_ASSET_PATH);
258    // This will setup a fullscreen triangle for the vertex state.
259    let vertex_state = fullscreen_shader.to_vertex_state();
260    let pipeline_id = pipeline_cache
261        // This will add the pipeline to the cache and queue its creation
262        .queue_render_pipeline(RenderPipelineDescriptor {
263            label: Some("post_process_pipeline".into()),
264            layout: vec![layout.clone()],
265            vertex: vertex_state,
266            fragment: Some(FragmentState {
267                shader,
268                // Make sure this matches the entry point of your shader.
269                // It can be anything as long as it matches here and in the shader.
270                targets: vec![Some(ColorTargetState {
271                    format: TextureFormat::bevy_default(),
272                    blend: None,
273                    write_mask: ColorWrites::ALL,
274                })],
275                ..default()
276            }),
277            ..default()
278        });
279    commands.insert_resource(PostProcessPipeline {
280        layout,
281        sampler,
282        pipeline_id,
283    });
284}
285
286// This is the component that will get passed to the shader
287#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)]
288struct PostProcessSettings {
289    intensity: f32,
290    // WebGL2 structs must be 16 byte aligned.
291    #[cfg(feature = "webgl2")]
292    _webgl2_padding: Vec3,
293}
294
295/// Set up a simple 3D scene
296fn setup(
297    mut commands: Commands,
298    mut meshes: ResMut<Assets<Mesh>>,
299    mut materials: ResMut<Assets<StandardMaterial>>,
300) {
301    // camera
302    commands.spawn((
303        Camera3d::default(),
304        Transform::from_translation(Vec3::new(0.0, 0.0, 5.0)).looking_at(Vec3::default(), Vec3::Y),
305        Camera {
306            clear_color: Color::WHITE.into(),
307            ..default()
308        },
309        // Add the setting to the camera.
310        // This component is also used to determine on which camera to run the post processing effect.
311        PostProcessSettings {
312            intensity: 0.02,
313            ..default()
314        },
315    ));
316
317    // cube
318    commands.spawn((
319        Mesh3d(meshes.add(Cuboid::default())),
320        MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
321        Transform::from_xyz(0.0, 0.5, 0.0),
322        Rotates,
323    ));
324    // light
325    commands.spawn(DirectionalLight {
326        illuminance: 1_000.,
327        ..default()
328    });
329}
330
331#[derive(Component)]
332struct Rotates;
333
334/// Rotates any entity around the x and y axis
335fn rotate(time: Res<Time>, mut query: Query<&mut Transform, With<Rotates>>) {
336    for mut transform in &mut query {
337        transform.rotate_x(0.55 * time.delta_secs());
338        transform.rotate_z(0.15 * time.delta_secs());
339    }
340}
341
342// Change the intensity over time to show that the effect is controlled from the main world
343fn update_settings(mut settings: Query<&mut PostProcessSettings>, time: Res<Time>) {
344    for mut setting in &mut settings {
345        let mut intensity = ops::sin(time.elapsed_secs());
346        // Make it loop periodically
347        intensity = ops::sin(intensity);
348        // Remap it to 0..1 because the intensity can't be negative
349        intensity = intensity * 0.5 + 0.5;
350        // Scale it to a more reasonable level
351        intensity *= 0.015;
352
353        // Set the intensity.
354        // This will then be extracted to the render world and uploaded to the GPU automatically by the [`UniformComponentPlugin`]
355        setting.intensity = intensity;
356    }
357}