custom_post_processing/custom_post_processing.rs
1//! This example shows how to create a custom render pass that runs after the main pass
2//! and reads the texture generated by the main pass.
3//!
4//! The example shader is a very simple implementation of chromatic aberration.
5//! To adapt this example for 2D, replace all instances of 3D structures (such as `Core3D`, etc.) with their corresponding 2D counterparts.
6//!
7//! This is a fairly low level example and assumes some familiarity with rendering concepts and wgpu.
8
9use bevy::{
10 core_pipeline::{
11 core_3d::graph::{Core3d, Node3d},
12 fullscreen_vertex_shader::fullscreen_shader_vertex_state,
13 },
14 ecs::query::QueryItem,
15 prelude::*,
16 render::{
17 extract_component::{
18 ComponentUniforms, DynamicUniformIndex, ExtractComponent, ExtractComponentPlugin,
19 UniformComponentPlugin,
20 },
21 render_graph::{
22 NodeRunError, RenderGraphApp, RenderGraphContext, RenderLabel, ViewNode, ViewNodeRunner,
23 },
24 render_resource::{
25 binding_types::{sampler, texture_2d, uniform_buffer},
26 *,
27 },
28 renderer::{RenderContext, RenderDevice},
29 view::ViewTarget,
30 RenderApp,
31 },
32};
33
34/// This example uses a shader source file from the assets subdirectory
35const SHADER_ASSET_PATH: &str = "shaders/post_processing.wgsl";
36
37fn main() {
38 App::new()
39 .add_plugins((DefaultPlugins, PostProcessPlugin))
40 .add_systems(Startup, setup)
41 .add_systems(Update, (rotate, update_settings))
42 .run();
43}
44
45/// It is generally encouraged to set up post processing effects as a plugin
46struct PostProcessPlugin;
47
48impl Plugin for PostProcessPlugin {
49 fn build(&self, app: &mut App) {
50 app.add_plugins((
51 // The settings will be a component that lives in the main world but will
52 // be extracted to the render world every frame.
53 // This makes it possible to control the effect from the main world.
54 // This plugin will take care of extracting it automatically.
55 // It's important to derive [`ExtractComponent`] on [`PostProcessingSettings`]
56 // for this plugin to work correctly.
57 ExtractComponentPlugin::<PostProcessSettings>::default(),
58 // The settings will also be the data used in the shader.
59 // This plugin will prepare the component for the GPU by creating a uniform buffer
60 // and writing the data to that buffer every frame.
61 UniformComponentPlugin::<PostProcessSettings>::default(),
62 ));
63
64 // We need to get the render app from the main app
65 let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
66 return;
67 };
68
69 render_app
70 // Bevy's renderer uses a render graph which is a collection of nodes in a directed acyclic graph.
71 // It currently runs on each view/camera and executes each node in the specified order.
72 // It will make sure that any node that needs a dependency from another node
73 // only runs when that dependency is done.
74 //
75 // Each node can execute arbitrary work, but it generally runs at least one render pass.
76 // A node only has access to the render world, so if you need data from the main world
77 // you need to extract it manually or with the plugin like above.
78 // Add a [`Node`] to the [`RenderGraph`]
79 // The Node needs to impl FromWorld
80 //
81 // The [`ViewNodeRunner`] is a special [`Node`] that will automatically run the node for each view
82 // matching the [`ViewQuery`]
83 .add_render_graph_node::<ViewNodeRunner<PostProcessNode>>(
84 // Specify the label of the graph, in this case we want the graph for 3d
85 Core3d,
86 // It also needs the label of the node
87 PostProcessLabel,
88 )
89 .add_render_graph_edges(
90 Core3d,
91 // Specify the node ordering.
92 // This will automatically create all required node edges to enforce the given ordering.
93 (
94 Node3d::Tonemapping,
95 PostProcessLabel,
96 Node3d::EndMainPassPostProcessing,
97 ),
98 );
99 }
100
101 fn finish(&self, app: &mut App) {
102 // We need to get the render app from the main app
103 let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
104 return;
105 };
106
107 render_app
108 // Initialize the pipeline
109 .init_resource::<PostProcessPipeline>();
110 }
111}
112
113#[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)]
114struct PostProcessLabel;
115
116// The post process node used for the render graph
117#[derive(Default)]
118struct PostProcessNode;
119
120// The ViewNode trait is required by the ViewNodeRunner
121impl ViewNode for PostProcessNode {
122 // The node needs a query to gather data from the ECS in order to do its rendering,
123 // but it's not a normal system so we need to define it manually.
124 //
125 // This query will only run on the view entity
126 type ViewQuery = (
127 &'static ViewTarget,
128 // This makes sure the node only runs on cameras with the PostProcessSettings component
129 &'static PostProcessSettings,
130 // As there could be multiple post processing components sent to the GPU (one per camera),
131 // we need to get the index of the one that is associated with the current view.
132 &'static DynamicUniformIndex<PostProcessSettings>,
133 );
134
135 // Runs the node logic
136 // This is where you encode draw commands.
137 //
138 // This will run on every view on which the graph is running.
139 // If you don't want your effect to run on every camera,
140 // you'll need to make sure you have a marker component as part of [`ViewQuery`]
141 // to identify which camera(s) should run the effect.
142 fn run(
143 &self,
144 _graph: &mut RenderGraphContext,
145 render_context: &mut RenderContext,
146 (view_target, _post_process_settings, settings_index): QueryItem<Self::ViewQuery>,
147 world: &World,
148 ) -> Result<(), NodeRunError> {
149 // Get the pipeline resource that contains the global data we need
150 // to create the render pipeline
151 let post_process_pipeline = world.resource::<PostProcessPipeline>();
152
153 // The pipeline cache is a cache of all previously created pipelines.
154 // It is required to avoid creating a new pipeline each frame,
155 // which is expensive due to shader compilation.
156 let pipeline_cache = world.resource::<PipelineCache>();
157
158 // Get the pipeline from the cache
159 let Some(pipeline) = pipeline_cache.get_render_pipeline(post_process_pipeline.pipeline_id)
160 else {
161 return Ok(());
162 };
163
164 // Get the settings uniform binding
165 let settings_uniforms = world.resource::<ComponentUniforms<PostProcessSettings>>();
166 let Some(settings_binding) = settings_uniforms.uniforms().binding() else {
167 return Ok(());
168 };
169
170 // This will start a new "post process write", obtaining two texture
171 // views from the view target - a `source` and a `destination`.
172 // `source` is the "current" main texture and you _must_ write into
173 // `destination` because calling `post_process_write()` on the
174 // [`ViewTarget`] will internally flip the [`ViewTarget`]'s main
175 // texture to the `destination` texture. Failing to do so will cause
176 // the current main texture information to be lost.
177 let post_process = view_target.post_process_write();
178
179 // The bind_group gets created each frame.
180 //
181 // Normally, you would create a bind_group in the Queue set,
182 // but this doesn't work with the post_process_write().
183 // The reason it doesn't work is because each post_process_write will alternate the source/destination.
184 // The only way to have the correct source/destination for the bind_group
185 // is to make sure you get it during the node execution.
186 let bind_group = render_context.render_device().create_bind_group(
187 "post_process_bind_group",
188 &post_process_pipeline.layout,
189 // It's important for this to match the BindGroupLayout defined in the PostProcessPipeline
190 &BindGroupEntries::sequential((
191 // Make sure to use the source view
192 post_process.source,
193 // Use the sampler created for the pipeline
194 &post_process_pipeline.sampler,
195 // Set the settings binding
196 settings_binding.clone(),
197 )),
198 );
199
200 // Begin the render pass
201 let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
202 label: Some("post_process_pass"),
203 color_attachments: &[Some(RenderPassColorAttachment {
204 // We need to specify the post process destination view here
205 // to make sure we write to the appropriate texture.
206 view: post_process.destination,
207 resolve_target: None,
208 ops: Operations::default(),
209 })],
210 depth_stencil_attachment: None,
211 timestamp_writes: None,
212 occlusion_query_set: None,
213 });
214
215 // This is mostly just wgpu boilerplate for drawing a fullscreen triangle,
216 // using the pipeline/bind_group created above
217 render_pass.set_render_pipeline(pipeline);
218 // By passing in the index of the post process settings on this view, we ensure
219 // that in the event that multiple settings were sent to the GPU (as would be the
220 // case with multiple cameras), we use the correct one.
221 render_pass.set_bind_group(0, &bind_group, &[settings_index.index()]);
222 render_pass.draw(0..3, 0..1);
223
224 Ok(())
225 }
226}
227
228// This contains global data used by the render pipeline. This will be created once on startup.
229#[derive(Resource)]
230struct PostProcessPipeline {
231 layout: BindGroupLayout,
232 sampler: Sampler,
233 pipeline_id: CachedRenderPipelineId,
234}
235
236impl FromWorld for PostProcessPipeline {
237 fn from_world(world: &mut World) -> Self {
238 let render_device = world.resource::<RenderDevice>();
239
240 // We need to define the bind group layout used for our pipeline
241 let layout = render_device.create_bind_group_layout(
242 "post_process_bind_group_layout",
243 &BindGroupLayoutEntries::sequential(
244 // The layout entries will only be visible in the fragment stage
245 ShaderStages::FRAGMENT,
246 (
247 // The screen texture
248 texture_2d(TextureSampleType::Float { filterable: true }),
249 // The sampler that will be used to sample the screen texture
250 sampler(SamplerBindingType::Filtering),
251 // The settings uniform that will control the effect
252 uniform_buffer::<PostProcessSettings>(true),
253 ),
254 ),
255 );
256
257 // We can create the sampler here since it won't change at runtime and doesn't depend on the view
258 let sampler = render_device.create_sampler(&SamplerDescriptor::default());
259
260 // Get the shader handle
261 let shader = world.load_asset(SHADER_ASSET_PATH);
262
263 let pipeline_id = world
264 .resource_mut::<PipelineCache>()
265 // This will add the pipeline to the cache and queue its creation
266 .queue_render_pipeline(RenderPipelineDescriptor {
267 label: Some("post_process_pipeline".into()),
268 layout: vec![layout.clone()],
269 // This will setup a fullscreen triangle for the vertex state
270 vertex: fullscreen_shader_vertex_state(),
271 fragment: Some(FragmentState {
272 shader,
273 shader_defs: vec![],
274 // Make sure this matches the entry point of your shader.
275 // It can be anything as long as it matches here and in the shader.
276 entry_point: "fragment".into(),
277 targets: vec![Some(ColorTargetState {
278 format: TextureFormat::bevy_default(),
279 blend: None,
280 write_mask: ColorWrites::ALL,
281 })],
282 }),
283 // All of the following properties are not important for this effect so just use the default values.
284 // This struct doesn't have the Default trait implemented because not all fields can have a default value.
285 primitive: PrimitiveState::default(),
286 depth_stencil: None,
287 multisample: MultisampleState::default(),
288 push_constant_ranges: vec![],
289 zero_initialize_workgroup_memory: false,
290 });
291
292 Self {
293 layout,
294 sampler,
295 pipeline_id,
296 }
297 }
298}
299
300// This is the component that will get passed to the shader
301#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)]
302struct PostProcessSettings {
303 intensity: f32,
304 // WebGL2 structs must be 16 byte aligned.
305 #[cfg(feature = "webgl2")]
306 _webgl2_padding: Vec3,
307}
308
309/// Set up a simple 3D scene
310fn setup(
311 mut commands: Commands,
312 mut meshes: ResMut<Assets<Mesh>>,
313 mut materials: ResMut<Assets<StandardMaterial>>,
314) {
315 // camera
316 commands.spawn((
317 Camera3d::default(),
318 Transform::from_translation(Vec3::new(0.0, 0.0, 5.0)).looking_at(Vec3::default(), Vec3::Y),
319 Camera {
320 clear_color: Color::WHITE.into(),
321 ..default()
322 },
323 // Add the setting to the camera.
324 // This component is also used to determine on which camera to run the post processing effect.
325 PostProcessSettings {
326 intensity: 0.02,
327 ..default()
328 },
329 ));
330
331 // cube
332 commands.spawn((
333 Mesh3d(meshes.add(Cuboid::default())),
334 MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),
335 Transform::from_xyz(0.0, 0.5, 0.0),
336 Rotates,
337 ));
338 // light
339 commands.spawn(DirectionalLight {
340 illuminance: 1_000.,
341 ..default()
342 });
343}
344
345#[derive(Component)]
346struct Rotates;
347
348/// Rotates any entity around the x and y axis
349fn rotate(time: Res<Time>, mut query: Query<&mut Transform, With<Rotates>>) {
350 for mut transform in &mut query {
351 transform.rotate_x(0.55 * time.delta_secs());
352 transform.rotate_z(0.15 * time.delta_secs());
353 }
354}
355
356// Change the intensity over time to show that the effect is controlled from the main world
357fn update_settings(mut settings: Query<&mut PostProcessSettings>, time: Res<Time>) {
358 for mut setting in &mut settings {
359 let mut intensity = ops::sin(time.elapsed_secs());
360 // Make it loop periodically
361 intensity = ops::sin(intensity);
362 // Remap it to 0..1 because the intensity can't be negative
363 intensity = intensity * 0.5 + 0.5;
364 // Scale it to a more reasonable level
365 intensity *= 0.015;
366
367 // Set the intensity.
368 // This will then be extracted to the render world and uploaded to the GPU automatically by the [`UniformComponentPlugin`]
369 setting.intensity = intensity;
370 }
371}