1use std::{collections::HashMap, hash::{DefaultHasher, Hash, Hasher}, ops::Range, sync::{atomic::{AtomicBool, Ordering}, Arc}};
2
3use crate::{
4 math::{Color, Point2, RectF},
5 utils::ArcRef,
6};
7
8use super::{
9 utils::BindGroupType,
10 drawing::DrawingContext,
11 super::{
12 GPUInner,
13 texture::{
14 Texture,
15 BlendState,
16 TextureSampler,
17 TextureUsage,
18 TextureFormat,
19 SampleCount
20 },
21 buffer::{Buffer, BufferUsage},
22 pipeline::{
23 render::RenderPipeline,
24 manager::{VertexAttributeLayout, GraphicsPipelineDesc},
25 },
26 shader::{
27 graphics::{GraphicsShader, GraphicsShaderType},
28 bind_group_manager::BindGroupCreateInfo,
29 types::ShaderReflect,
30 BindGroupLayout,
31 ShaderTopology,
32 ShaderCullMode,
33 ShaderFrontFace,
34 ShaderPollygonMode,
35 IndexBufferSize,
36 ShaderBindingType,
37 },
38 command::{BindGroupAttachment, SurfaceTexture},
39 }
40};
41
42
43#[derive(Debug, Clone)]
77pub struct RenderPass {
78 pub(crate) graphics: ArcRef<GPUInner>,
79 pub(crate) inner: ArcRef<RenderPassInner>,
80}
81
82impl RenderPass {
83 pub(crate) fn new(
84 graphics: ArcRef<GPUInner>,
85 cmd: ArcRef<wgpu::CommandEncoder>,
86 atomic_pass: Arc<AtomicBool>,
87 ) -> Self {
88 let inner = RenderPassInner {
89 cmd,
90 atomic_pass,
91
92 render_targets: Vec::new(),
93 depth_target: None,
94 depth_target_format: None,
95 surface_size: Point2::new(0.0, 0.0),
96
97 multi_sample_count: None,
98 multi_sample_target: Vec::new(),
99
100 clear_color: None,
101 viewport: None,
102 scissor: None,
103
104 vertex: None,
105 index: None,
106
107 shader: None,
108 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
109 shader_reflection: None,
110
111 attachments: Vec::new(),
112 push_constant: None,
113
114 queues: Vec::new(),
115 };
116
117 Self {
118 graphics,
119 inner: ArcRef::new(inner),
120 }
121 }
122
123 #[inline]
124 pub fn surface_size(&self) -> Point2 {
125 let inner = self.inner.borrow();
126
127 inner.surface_size
128 }
129
130 #[inline]
131 pub fn set_clear_color(&mut self, _color: Color) {
132 let mut inner = self.inner.borrow_mut();
133 inner.clear_color = Some(_color);
134 }
135
136 #[inline]
137 pub fn get_clear_color(&self) -> Option<Color> {
138 let inner = self.inner.borrow();
139 inner.clear_color.clone()
140 }
141
142 #[inline]
143 pub fn set_blend(&mut self, index: usize, blend: Option<&BlendState>) {
144 let mut inner = self.inner.borrow_mut();
145
146 match inner.render_targets.get_mut(index) {
147 Some(target) => {
148 if let Some(blend) = blend {
149 target.blend = Some(blend.create_wgpu_blend_state());
150 target.write_mask = Some(blend.create_wgpu_color_write_mask());
151 } else {
152 target.blend = None;
153 target.write_mask = Some(wgpu::ColorWrites::COLOR);
154 }
155 }
156 None => {
157 panic!("Render target at index {} does not exist", index);
158 }
159 }
160 }
161
162 #[inline]
163 pub fn get_blend(&self, index: usize) -> Option<BlendState> {
164 let inner = self.inner.borrow();
165
166 match inner.render_targets.get(index) {
167 Some(target) => {
168 let state = target.blend.clone();
169 let color_write_mask = target.write_mask.clone();
170
171 Some(BlendState::from_wgpu(state, color_write_mask))
172 }
173 None => None,
174 }
175 }
176
177 #[inline]
178 pub fn set_gpu_buffer(&mut self, vertex: Option<&Buffer>, index: Option<&Buffer>) {
179 self.set_gpu_buffer_wgpu(
180 vertex.map(|v| v.inner.borrow().buffer.clone()),
181 index.map(|i| i.inner.borrow().buffer.clone()),
182 );
183 }
184
185 #[inline]
186 pub fn set_gpu_buffer_raw<T, T2>(&mut self, vertex: Option<&[T]>, index: Option<&[T2]>)
187 where
188 T: bytemuck::Pod + bytemuck::Zeroable,
189 T2: bytemuck::Pod + bytemuck::Zeroable,
190 {
191 let (vertex_buffer, index_buffer) = {
192 let mut gpu_inner = self.graphics.borrow_mut();
193
194 let vertex_buffer = match vertex {
195 Some(data) => {
196 let buffer = gpu_inner.create_buffer_with(data, wgpu::BufferUsages::VERTEX);
197 Some(buffer)
198 }
199 None => None,
200 };
201
202 let index_buffer = match index {
203 Some(data) => {
204 let buffer = gpu_inner.create_buffer_with(data, wgpu::BufferUsages::INDEX);
205 Some(buffer)
206 }
207 None => None,
208 };
209
210 (vertex_buffer, index_buffer)
211 };
212
213 self.set_gpu_buffer_wgpu(vertex_buffer, index_buffer);
214 }
215
216 pub(crate) fn set_gpu_buffer_wgpu(
217 &mut self,
218 vertex: Option<wgpu::Buffer>,
219 index: Option<wgpu::Buffer>,
220 ) {
221 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
222 {
223 let inner = self.inner.borrow();
224 if inner.shader.is_none() {
225 panic!("Shader is not set");
226 }
227
228 let shader = inner.shader.as_ref().unwrap();
229
230 let index_format = match shader {
231 RenderShaderBinding::Intermediate(IntermediateRenderPipeline {
232 index_format,
233 ..
234 }) => index_format,
235 RenderShaderBinding::Pipeline(RenderPipeline { index_format, .. }) => index_format,
236 };
237
238 if index_format.is_none() && index.is_some() {
239 panic!("Index buffer is set, but shader not configured to use index buffer");
240 }
241 }
242
243 let mut inner = self.inner.borrow_mut();
244
245 inner.vertex = vertex;
246 inner.index = index;
247 }
248
249 #[inline]
250 pub fn get_gpu_buffer(&self) -> (Option<wgpu::Buffer>, Option<wgpu::Buffer>) {
251 let inner = self.inner.borrow();
252 (inner.vertex.clone(), inner.index.clone())
253 }
254
255 #[inline]
256 pub fn set_shader(&mut self, shader: Option<&GraphicsShader>) {
257 self.set_shader_ex(shader, None, None, None, None, None);
258 }
259
260 #[inline]
261 pub fn set_shader_ex(
262 &mut self,
263 shader: Option<&GraphicsShader>,
264 topology: Option<ShaderTopology>,
265 cull_mode: Option<ShaderCullMode>,
266 front_face: Option<ShaderFrontFace>,
267 polygon_mode: Option<ShaderPollygonMode>,
268 index_format: Option<IndexBufferSize>,
269 ) {
270 let mut inner = self.inner.borrow_mut();
271
272 match shader {
273 Some(shader) => {
274 let shader_inner = shader.inner.borrow();
275 let (vertex_shader, fragment_shader) = match &shader_inner.ty {
276 GraphicsShaderType::GraphicsSplit {
277 vertex_module,
278 fragment_module,
279 } => (vertex_module.clone(), fragment_module.clone()),
280 GraphicsShaderType::GraphicsSingle { module } => (module.clone(), module.clone()),
281 };
282
283 let layout = shader_inner.bind_group_layouts.clone();
284
285 let vertex_reflect = shader_inner.reflection.iter().find(|r| {
286 matches!(r, ShaderReflect::Vertex { .. })
287 || matches!(r, ShaderReflect::VertexFragment { .. })
288 });
289
290 let fragment_reflect = shader_inner.reflection.iter().find(|r| {
291 matches!(r, ShaderReflect::Fragment { .. })
292 || matches!(r, ShaderReflect::VertexFragment { .. })
293 });
294
295 let vertex_entry_point = match vertex_reflect {
296 Some(ShaderReflect::Vertex { entry_point, .. }) => Some(entry_point),
297 Some(ShaderReflect::VertexFragment {
298 vertex_entry_point, ..
299 }) => Some(vertex_entry_point),
300 _ => None,
301 };
302
303 let fragment_entry_point = match fragment_reflect {
304 Some(ShaderReflect::Fragment { entry_point, .. }) => Some(entry_point),
305 Some(ShaderReflect::VertexFragment {
306 fragment_entry_point,
307 ..
308 }) => Some(fragment_entry_point),
309 _ => None,
310 };
311
312 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
313 {
314 if vertex_entry_point.is_none() {
315 panic!("Vertex shader entry point is not found in shader reflection");
316 }
317
318 if fragment_entry_point.is_none() {
319 panic!("Fragment shader entry point is not found in shader reflection");
320 }
321 }
322
323 let vertex_entry_point = vertex_entry_point.unwrap();
324 let fragment_entry_point = fragment_entry_point.unwrap();
325
326 let attrib_inner = shader.attrib.borrow();
327 let shader_binding = IntermediateRenderPipeline {
328 shader: (vertex_shader, fragment_shader),
329 vertex_attribute: (attrib_inner.stride, attrib_inner.attributes.clone()),
330 shader_entry: (vertex_entry_point.clone(), fragment_entry_point.clone()),
331 layout: layout,
332 topology: topology.unwrap_or(attrib_inner.topology),
333 cull_mode: cull_mode.into(),
334 front_face: front_face.unwrap_or(attrib_inner.front_face),
335 polygon_mode: polygon_mode.unwrap_or(attrib_inner.polygon_mode),
336 index_format: index_format.or_else(|| attrib_inner.index.clone()),
337 };
338
339 inner.shader = Some(RenderShaderBinding::Intermediate(shader_binding));
340
341 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
342 {
343 inner.shader_reflection = Some(shader_inner.reflection.clone());
344 }
345 }
346 None => {
347 inner.shader = None;
348
349 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
350 {
351 inner.shader_reflection = None;
352 }
353 }
354 }
355 }
356
357 pub fn set_pipeline(&mut self, pipeline: Option<&RenderPipeline>) {
358 let mut inner = self.inner.borrow_mut();
359
360 match pipeline {
361 Some(pipeline) => {
362 inner.shader = Some(RenderShaderBinding::Pipeline(pipeline.clone()));
363 }
364 None => {
365 inner.shader = None;
366 }
367 }
368 }
369
370 #[inline]
371 pub(crate) fn remove_attachment(&mut self, group: u32, binding: u32) {
372 let mut inner = self.inner.borrow_mut();
373
374 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
375 {
376 match &inner.shader {
377 Some(RenderShaderBinding::Pipeline(_)) => {
378 panic!("Cannot insert or replace attachment when using a pipeline shader");
379 }
380 _ => {}
381 }
382 }
383
384 inner
385 .attachments
386 .retain(|a| a.group != group || a.binding != binding);
387 }
388
389 pub(crate) fn insert_or_replace_attachment(
390 &mut self,
391 group: u32,
392 binding: u32,
393 attachment: BindGroupAttachment,
394 ) {
395 let mut inner = self.inner.borrow_mut();
396
397 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
398 {
399 if inner.shader.is_none() {
400 panic!("Shader is not set");
401 }
402
403 match &inner.shader {
404 Some(RenderShaderBinding::Pipeline(_)) => {
405 panic!("Cannot insert or replace attachment when using a pipeline shader");
406 }
407 _ => {}
408 }
409
410 let r#type = inner
411 .shader_reflection
412 .as_ref()
413 .unwrap()
414 .iter()
415 .find_map(|b| {
416 let bindings = match b {
417 ShaderReflect::Vertex { bindings, .. }
418 | ShaderReflect::Fragment { bindings, .. }
419 | ShaderReflect::VertexFragment { bindings, .. } => bindings,
420 _ => return None,
421 };
422
423 bindings.iter().find_map(|shaderbinding| {
424 if shaderbinding.group == group && shaderbinding.binding == binding {
425 Some(shaderbinding)
426 } else {
427 None
428 }
429 })
430 })
431 .unwrap_or_else(|| {
432 panic!(
433 "Shader does not have binding group: {} binding: {}",
434 group, binding
435 );
436 });
437
438 if !match r#type.ty {
439 ShaderBindingType::UniformBuffer(_) => {
440 matches!(attachment.attachment, BindGroupType::Uniform(_))
441 }
442 ShaderBindingType::StorageBuffer(_, _) => {
443 matches!(attachment.attachment, BindGroupType::Storage(_))
444 }
445 ShaderBindingType::StorageTexture(_) => {
446 matches!(attachment.attachment, BindGroupType::TextureStorage(_))
447 }
448 ShaderBindingType::Sampler(_) => {
449 matches!(attachment.attachment, BindGroupType::Sampler(_))
450 }
451 ShaderBindingType::Texture(_) => {
452 matches!(attachment.attachment, BindGroupType::Texture(_))
453 }
454 ShaderBindingType::PushConstant(_) => {
455 matches!(attachment.attachment, BindGroupType::Uniform(_))
456 }
457 } {
458 panic!(
459 "Attachment group: {} binding: {} type: {} not match with shader type: {}",
460 group, binding, attachment.attachment, r#type.ty
461 );
462 }
463 }
464
465 let index = inner
466 .attachments
467 .iter()
468 .position(|a| a.group == group && a.binding == binding);
469
470 if let Some(index) = index {
471 inner.attachments[index] = attachment;
472 } else {
473 inner.attachments.push(attachment);
474 }
475 }
476
477 #[inline]
478 pub fn set_viewport(&mut self, _viewport: Option<RectF>, min_depth: f32, max_depth: f32) {
479 let mut inner = self.inner.borrow_mut();
480
481 match _viewport {
482 Some(viewport) => {
483 inner.viewport = Some((viewport, min_depth, max_depth));
484 }
485 None => {
486 inner.viewport = None;
487 }
488 }
489 }
490
491 #[inline]
492 pub fn get_viewport(&self) -> Option<(RectF, f32, f32)> {
493 let inner = self.inner.borrow();
494 inner.viewport.clone()
495 }
496
497 #[inline]
498 pub fn set_scissor(&mut self, _scissor: Option<RectF>) {
499 let mut inner = self.inner.borrow_mut();
500
501 match _scissor {
502 Some(scissor) => {
503 inner.scissor = Some(scissor);
504 }
505 None => {
506 inner.scissor = None;
507 }
508 }
509 }
510
511 #[inline]
512 pub fn get_scissor(&self) -> Option<RectF> {
513 let inner = self.inner.borrow();
514 inner.scissor.clone()
515 }
516
517 #[inline]
518 pub fn push_msaa_texture(&mut self, texture: &Texture) {
519 let mut inner = self.inner.borrow_mut();
520
521 if inner.multi_sample_count.is_none() {
522 inner.multi_sample_count = Some(texture.inner.borrow().sample_count.into());
523 }
524
525 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
527 {
528 let msaa_count = texture.inner.borrow().sample_count.into();
529 if inner.multi_sample_count.unwrap() != msaa_count {
530 panic!("Multi sample texture count must match render target count");
531 }
532 }
533
534 inner
535 .multi_sample_target
536 .push(texture.inner.borrow().wgpu_view.clone());
537 }
538
539 #[inline]
540 pub fn set_depth_texture(&mut self, texture: Option<&Texture>) {
541 let mut inner = self.inner.borrow_mut();
542
543 match texture {
544 Some(texture) => {
545 let texture_inner = texture.inner.borrow();
546 let format = texture_inner.format.into();
547
548 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
549 {
550 if !texture_inner
551 .usages
552 .contains(TextureUsage::RenderAttachment)
553 {
554 panic!("Texture must have render attachment usage");
555 }
556
557 let expected_depth_format = [
558 wgpu::TextureFormat::Depth32Float,
559 wgpu::TextureFormat::Depth24Plus,
560 wgpu::TextureFormat::Depth24PlusStencil8,
561 ];
562
563 if !expected_depth_format.contains(&format) {
564 panic!("Texture must have depth format");
565 }
566
567 if texture_inner.sample_count != SampleCount::SampleCount1 {
568 panic!("Depth texture must be single sampled");
569 }
570
571 let depth_size = texture_inner.size;
572 if depth_size.x == 0 || depth_size.y == 0 {
573 panic!("Depth texture size must be greater than 0");
574 }
575
576 if depth_size.x != inner.surface_size.x || depth_size.y != inner.surface_size.y
577 {
578 panic!("Depth texture size must match render target size");
579 }
580 }
581
582 inner.depth_target = Some(texture_inner.wgpu_view.clone());
583 inner.depth_target_format = Some(format);
584 }
585 None => {
586 inner.depth_target = None;
587 inner.depth_target_format = None;
588 }
589 }
590 }
591
592 #[inline]
593 pub fn set_push_constants(&mut self, _data: Option<&[u8]>) {
594 let mut inner = self.inner.borrow_mut();
595
596 match _data {
597 Some(data) => {
598 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
599 {
600 if inner.shader.is_none() {
601 panic!("Shader is not set");
602 }
603
604 let size = inner
605 .shader_reflection
606 .as_ref()
607 .unwrap()
608 .iter()
609 .find_map(|b| {
610 let bindings = match b {
611 ShaderReflect::Vertex { bindings, .. }
612 | ShaderReflect::Fragment { bindings, .. }
613 | ShaderReflect::VertexFragment { bindings, .. } => bindings,
614 _ => return None,
615 };
616
617 bindings.iter().find_map(|binding| {
618 if let ShaderBindingType::PushConstant(size) = binding.ty {
619 Some(size)
620 } else {
621 None
622 }
623 })
624 })
625 .unwrap_or(0);
626
627 if data.len() > size as usize {
628 panic!("Data size must be less or equal to the push constant size");
629 }
630 }
631
632 let mut data = data.to_vec();
633 if data.len() % 4 != 0 {
634 let padding = 4 - (data.len() % 4);
635 data.extend(vec![0; padding]);
636 }
637
638 inner.push_constant = Some(data);
639 }
640 None => {
641 inner.push_constant = None;
642 return;
643 }
644 }
645 }
646
647 #[inline]
648 pub fn set_push_constants_raw<T: bytemuck::Pod + bytemuck::Zeroable>(
649 &mut self,
650 data: Option<&[T]>,
651 ) {
652 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
653 {
654 let inner = self.inner.borrow();
655
656 if inner.shader.is_none() {
657 panic!("Shader is not set");
658 }
659 }
660
661 match data {
662 Some(data) => {
663 let mut bytemuck_data: Vec<u8> = bytemuck::cast_slice(data).to_vec();
664
665 if bytemuck_data.len() % 4 != 0 {
666 let padding = 4 - (bytemuck_data.len() % 4);
667 bytemuck_data.extend(vec![0; padding]);
668 }
669
670 self.set_push_constants(Some(&bytemuck_data));
671 }
672 None => {
673 self.set_push_constants(None);
674 }
675 }
676 }
677
678 #[inline]
679 pub fn set_push_constants_struct_raw<T: bytemuck::Pod + bytemuck::Zeroable>(
680 &mut self,
681 data: Option<&[T]>,
682 ) {
683 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
684 {
685 let inner = self.inner.borrow();
686
687 if inner.shader.is_none() {
688 panic!("Shader is not set");
689 }
690 }
691
692 match data {
693 Some(data) => {
694 let mut bytemuck_data: Vec<u8> = bytemuck::cast_slice(data).to_vec();
695
696 if bytemuck_data.len() % 4 != 0 {
697 let padding = 4 - (bytemuck_data.len() % 4);
698 bytemuck_data.extend(vec![0; padding]);
699 }
700
701 self.set_push_constants(Some(&bytemuck_data));
702 }
703 None => {
704 self.set_push_constants(None);
705 }
706 }
707 }
708
709 #[inline]
710 pub fn set_attachment_sampler(
711 &mut self,
712 group: u32,
713 binding: u32,
714 sampler: Option<&TextureSampler>,
715 ) {
716 match sampler {
717 Some(sampler) => {
718 let inner = self.graphics.borrow();
719 let attachment = BindGroupAttachment {
720 group,
721 binding,
722 attachment: BindGroupType::Sampler(sampler.make_wgpu(inner.device())),
723 };
724
725 drop(inner);
726
727 self.insert_or_replace_attachment(group, binding, attachment);
728 }
729 None => {
730 self.remove_attachment(group, binding);
731 }
732 }
733 }
734
735 #[inline]
736 pub fn set_attachment_texture(&mut self, group: u32, binding: u32, texture: Option<&Texture>) {
737 match texture {
738 Some(texture) => {
739 let inner = texture.inner.borrow();
740 let attachment = BindGroupAttachment {
741 group,
742 binding,
743 attachment: BindGroupType::Texture(inner.wgpu_view.clone()),
744 };
745
746 drop(inner);
747
748 self.insert_or_replace_attachment(group, binding, attachment);
749 }
750 None => {
751 self.remove_attachment(group, binding);
752 }
753 }
754 }
755
756 #[inline]
757 pub fn set_attachment_texture_storage(
758 &mut self,
759 group: u32,
760 binding: u32,
761 texture: Option<&Texture>,
762 ) {
763 match texture {
764 Some(texture) => {
765 let inner = texture.inner.borrow();
766 let attachment = BindGroupAttachment {
767 group,
768 binding,
769 attachment: BindGroupType::TextureStorage(inner.wgpu_view.clone()),
770 };
771
772 self.insert_or_replace_attachment(group, binding, attachment);
773 }
774 None => {
775 self.remove_attachment(group, binding);
776 }
777 }
778 }
779
780 #[inline]
781 pub fn set_attachment_uniform(&mut self, group: u32, binding: u32, buffer: Option<&Buffer>) {
782 match buffer {
783 Some(buffer) => {
784 let inner = buffer.inner.borrow();
785 let attachment = BindGroupAttachment {
786 group,
787 binding,
788 attachment: BindGroupType::Uniform(inner.buffer.clone()),
789 };
790
791 self.insert_or_replace_attachment(group, binding, attachment);
792 }
793 None => {
794 self.remove_attachment(group, binding);
795 }
796 }
797 }
798
799 #[inline]
800 pub fn set_attachment_uniform_vec<T>(&mut self, group: u32, binding: u32, buffer: Option<Vec<T>>)
801 where
802 T: bytemuck::Pod + bytemuck::Zeroable,
803 {
804 match buffer {
805 Some(buffer) => {
806 let mut inner = self.graphics.borrow_mut();
807
808 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
809 let attachment = BindGroupAttachment {
810 group,
811 binding,
812 attachment: BindGroupType::Uniform(buffer),
813 };
814
815 drop(inner);
816
817 self.insert_or_replace_attachment(group, binding, attachment);
818 }
819 None => {
820 self.remove_attachment(group, binding);
821 }
822 }
823 }
824
825 #[inline]
826 pub fn set_attachment_uniform_raw<T>(&mut self, group: u32, binding: u32, buffer: Option<&[T]>)
827 where
828 T: bytemuck::Pod + bytemuck::Zeroable,
829 {
830 match buffer {
831 Some(buffer) => {
832 let mut inner = self.graphics.borrow_mut();
833
834 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
835 let attachment = BindGroupAttachment {
836 group,
837 binding,
838 attachment: BindGroupType::Uniform(buffer),
839 };
840
841 drop(inner);
842
843 self.insert_or_replace_attachment(group, binding, attachment);
844 }
845 None => {
846 self.remove_attachment(group, binding);
847 }
848 }
849 }
850
851 #[inline]
852 pub fn set_attachment_storage(&mut self, group: u32, binding: u32, buffer: Option<&Buffer>) {
853 match buffer {
854 Some(buffer) => {
855 let inner = buffer.inner.borrow();
856
857 let attachment = BindGroupAttachment {
858 group,
859 binding,
860 attachment: BindGroupType::Storage(inner.buffer.clone()),
861 };
862
863 self.insert_or_replace_attachment(group, binding, attachment);
864 }
865 None => {
866 self.remove_attachment(group, binding);
867 }
868 }
869 }
870
871 #[inline]
872 pub fn set_attachment_storage_raw<T>(&mut self, group: u32, binding: u32, buffer: Option<&[T]>)
873 where
874 T: bytemuck::Pod + bytemuck::Zeroable,
875 {
876 match buffer {
877 Some(buffer) => {
878 let mut inner = self.graphics.borrow_mut();
879
880 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
881 let attachment = BindGroupAttachment {
882 group,
883 binding,
884 attachment: BindGroupType::Storage(buffer),
885 };
886
887 drop(inner);
888
889 self.insert_or_replace_attachment(group, binding, attachment);
890 }
891 None => {
892 self.remove_attachment(group, binding);
893 }
894 }
895 }
896
897 #[inline]
898 pub fn set_attachment_storage_vec<T>(&mut self, group: u32, binding: u32, buffer: Option<Vec<T>>)
899 where
900 T: bytemuck::Pod + bytemuck::Zeroable,
901 {
902 match buffer {
903 Some(buffer) => {
904 let mut inner = self.graphics.borrow_mut();
905
906 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
907 let attachment = BindGroupAttachment {
908 group,
909 binding,
910 attachment: BindGroupType::Storage(buffer),
911 };
912
913 drop(inner);
914
915 self.insert_or_replace_attachment(group, binding, attachment);
916 }
917 None => {
918 self.remove_attachment(group, binding);
919 }
920 }
921 }
922
923 #[inline]
924 pub fn draw(&mut self, vertex_ranges: Range<u32>, num_of_instances: u32) {
925 self.prepare_draw(false, vertex_ranges, 0, num_of_instances);
926 }
927
928 #[inline]
929 pub fn draw_indexed(
930 &mut self,
931 index_ranges: Range<u32>,
932 vertex_offset: i32,
933 num_of_instances: u32,
934 ) {
935 self.prepare_draw(true, index_ranges, vertex_offset, num_of_instances);
936 }
937
938 #[inline]
939 fn prepare_draw(
940 &mut self,
941 use_index_buffer: bool,
942 ranges: Range<u32>,
943 vertex_offset: i32,
944 num_of_instances: u32,
945 ) {
946 {
951 let inner = self.inner.borrow();
952
953 if let Some((viewport, _, _)) = &inner.viewport {
954 if viewport.w <= 0.0 || viewport.h <= 0.0 {
955 return;
956 }
957 }
958
959 if let Some(scissor) = &inner.scissor {
960 if scissor.w <= 0.0 || scissor.h <= 0.0 {
961 return;
962 }
963 }
964 }
965
966 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
968 {
969 let inner = self.inner.borrow();
970 if inner.vertex.is_none() {
971 panic!("Vertex buffer is not set");
972 }
973
974 if use_index_buffer && inner.index.is_none() {
975 panic!("Index buffer is not set");
976 }
977 }
978
979 let (pipeline, bind_group, index_format) = self.prepare_pipeline();
981
982 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
984 {
985 if index_format.is_none() && use_index_buffer {
986 panic!(
987 "Index format is not set, setup with shader.set_index_format() or render_pass.set_shader_ex()"
988 );
989 }
990 }
991
992 let mut inner = self.inner.borrow_mut();
993
994 let queue = RenderPassQueue {
995 pipeline,
996 bind_group,
997 vbo: inner.vertex.clone(),
998 ibo: if use_index_buffer {
999 inner.index.clone()
1000 } else {
1001 None
1002 },
1003 itype: if use_index_buffer {
1004 Some(index_format.unwrap().into())
1005 } else {
1006 None
1007 },
1008 viewport: inner.viewport.clone(),
1009 scissor: inner.scissor.clone(),
1010 ty: DrawCallType::Direct {
1011 ranges,
1012 vertex_offset,
1013 num_of_instances,
1014 },
1015 push_constant: inner.push_constant.clone(),
1016 };
1017
1018 inner.queues.push(queue);
1019 }
1020
1021 #[inline]
1022 pub fn draw_indirect(&mut self, buffer: &Buffer, offset: u64) {
1023 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1024 if buffer.inner.borrow().usage.contains(BufferUsage::INDIRECT) {
1025 panic!("Buffer must have INDIRECT usage");
1026 }
1027
1028 self.prepare_draw_indirect(buffer, offset, false);
1029 }
1030
1031 #[inline]
1032 pub fn draw_indexed_indirect(&mut self, buffer: &Buffer, offset: u64) {
1033 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1034 if buffer.inner.borrow().usage.contains(BufferUsage::INDIRECT) {
1035 panic!("Buffer must have INDIRECT usage");
1036 }
1037
1038 self.prepare_draw_indirect(buffer, offset, true);
1039 }
1040
1041 #[inline]
1042 fn prepare_draw_indirect(&mut self, buffer: &Buffer, offset: u64, use_index_buffer: bool) {
1043 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1044 {
1045 let inner = self.inner.borrow();
1046 if inner.vertex.is_none() {
1047 panic!("Vertex buffer is not set");
1048 }
1049
1050 if use_index_buffer && inner.index.is_none() {
1051 panic!("Index buffer is not set");
1052 }
1053 }
1054
1055 let (pipeline, bind_group, index_format) = self.prepare_pipeline();
1056
1057 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1058 {
1059 if index_format.is_none() && use_index_buffer {
1060 panic!(
1061 "Index format is not set, setup with shader.set_index_format() or render_pass.set_shader_ex()"
1062 );
1063 }
1064 }
1065
1066 let mut inner = self.inner.borrow_mut();
1067 let queue = RenderPassQueue {
1068 pipeline,
1069 bind_group,
1070 vbo: inner.vertex.clone(),
1071 ibo: if use_index_buffer {
1072 inner.index.clone()
1073 } else {
1074 None
1075 },
1076 itype: if use_index_buffer {
1077 Some(index_format.unwrap().into())
1078 } else {
1079 None
1080 },
1081 viewport: inner.viewport.clone(),
1082 scissor: inner.scissor.clone(),
1083 ty: DrawCallType::InDirect {
1084 buffer: buffer.inner.borrow().buffer.clone(),
1085 offset,
1086 },
1087 push_constant: inner.push_constant.clone(),
1088 };
1089
1090 inner.queues.push(queue);
1091 }
1092
1093 fn prepare_pipeline(
1094 &self,
1095 ) -> (
1096 wgpu::RenderPipeline,
1097 Vec<(u32, wgpu::BindGroup)>,
1098 Option<IndexBufferSize>,
1099 ) {
1100 let inner = self.inner.borrow();
1101
1102 match &inner.shader {
1103 Some(RenderShaderBinding::Intermediate(shader_binding)) => {
1104 let bind_group_hash_key = {
1105 let mut hasher = DefaultHasher::new();
1106 hasher.write_u64(0u64); for attachment in &inner.attachments {
1109 attachment.group.hash(&mut hasher);
1110 attachment.binding.hash(&mut hasher);
1111 match &attachment.attachment {
1112 BindGroupType::Uniform(uniform) => {
1113 uniform.hash(&mut hasher);
1114 }
1115 BindGroupType::Texture(texture) => {
1116 texture.hash(&mut hasher);
1117 }
1118 BindGroupType::TextureStorage(texture) => texture.hash(&mut hasher),
1119 BindGroupType::Sampler(sampler) => sampler.hash(&mut hasher),
1120 BindGroupType::Storage(storage) => storage.hash(&mut hasher),
1121 }
1122 }
1123
1124 hasher.finish()
1125 };
1126
1127 let bind_group_attachments = {
1128 let mut gpu_inner = self.graphics.borrow_mut();
1129
1130 match gpu_inner.get_bind_group(bind_group_hash_key) {
1131 Some(bind_group) => bind_group,
1132 None => {
1133 let mut bind_group_attachments: HashMap<
1134 u32,
1135 Vec<wgpu::BindGroupEntry>,
1136 > = inner.attachments.iter().fold(HashMap::new(), |mut map, e| {
1137 let (group, binding, attachment) =
1138 (e.group, e.binding, &e.attachment);
1139
1140 let entry = match attachment {
1141 BindGroupType::Uniform(buffer) => wgpu::BindGroupEntry {
1142 binding,
1143 resource: wgpu::BindingResource::Buffer(
1144 wgpu::BufferBinding {
1145 buffer,
1146 offset: 0,
1147 size: None,
1148 },
1149 ),
1150 },
1151 BindGroupType::Texture(texture) => wgpu::BindGroupEntry {
1152 binding,
1153 resource: wgpu::BindingResource::TextureView(texture),
1154 },
1155 BindGroupType::Sampler(sampler) => wgpu::BindGroupEntry {
1156 binding,
1157 resource: wgpu::BindingResource::Sampler(sampler),
1158 },
1159 BindGroupType::Storage(buffer) => wgpu::BindGroupEntry {
1160 binding,
1161 resource: wgpu::BindingResource::Buffer(
1162 wgpu::BufferBinding {
1163 buffer,
1164 offset: 0,
1165 size: None,
1166 },
1167 ),
1168 },
1169 BindGroupType::TextureStorage(texture) => {
1170 wgpu::BindGroupEntry {
1171 binding,
1172 resource: wgpu::BindingResource::TextureView(texture),
1173 }
1174 }
1175 };
1176
1177 map.entry(group).or_insert_with(Vec::new).push(entry);
1178 map
1179 });
1180
1181 for entries in bind_group_attachments.values_mut() {
1185 entries.sort_by_key(|e| e.binding);
1186 }
1187
1188 let bind_group = bind_group_attachments
1189 .iter()
1190 .map(|(group, entries)| {
1191 let layout = shader_binding
1192 .layout
1193 .iter()
1194 .find(|l| l.group == *group)
1195 .unwrap();
1196
1197 (layout, entries.as_slice())
1198 })
1199 .collect::<Vec<_>>();
1200
1201 let create_info = BindGroupCreateInfo {
1202 entries: bind_group,
1203 };
1204
1205 gpu_inner.create_bind_group(bind_group_hash_key, create_info)
1206 }
1207 }
1208 };
1209
1210 let pipeline_hash_key = {
1211 let mut hasher = DefaultHasher::new();
1212 shader_binding.hash(&mut hasher);
1213
1214 for target in &inner.render_targets {
1215 target.format.hash(&mut hasher);
1216 target.blend.hash(&mut hasher);
1217 target.write_mask.hash(&mut hasher);
1218 }
1219
1220 inner.depth_target_format.hash(&mut hasher);
1221 inner.multi_sample_count.hash(&mut hasher);
1222
1223 hasher.finish()
1224 };
1225
1226 let pipeline = {
1227 let mut graphics_inner = self.graphics.borrow_mut();
1228 match graphics_inner.get_graphics_pipeline(pipeline_hash_key) {
1229 Some(pipeline) => pipeline,
1230 None => {
1231 let attribute = &shader_binding.vertex_attribute;
1232 let vertex_desc = VertexAttributeLayout {
1233 stride: attribute.0 as wgpu::BufferAddress,
1234 step_mode: wgpu::VertexStepMode::Vertex,
1235 attributes: attribute.1.clone(),
1236 };
1237
1238 let primitive_state = wgpu::PrimitiveState {
1239 topology: shader_binding.topology.into(),
1240 strip_index_format: None,
1241 front_face: shader_binding.front_face.into(),
1242 cull_mode: shader_binding.cull_mode.map(|c| c.into()),
1243 polygon_mode: shader_binding.polygon_mode.into(),
1244 unclipped_depth: false,
1245 conservative: false,
1246 };
1247
1248 let layout = shader_binding
1249 .layout
1250 .iter()
1251 .map(|l| l.layout.clone())
1252 .collect::<Vec<_>>();
1253
1254 let mut pipeline_desc = GraphicsPipelineDesc {
1255 shaders: shader_binding.shader.clone(),
1256 entry_point: shader_binding.shader_entry.clone(),
1257 render_target: Vec::with_capacity(inner.render_targets.len()),
1258 depth_stencil: inner.depth_target_format,
1259 vertex_desc,
1260 primitive_state,
1261 bind_group_layout: layout,
1262 msaa_count: inner.multi_sample_count.unwrap_or(1),
1263 };
1264
1265 for target in &inner.render_targets {
1266 pipeline_desc.render_target.push((
1267 target.format,
1268 target.blend,
1269 target.write_mask,
1270 ));
1271 }
1272
1273 graphics_inner
1274 .create_graphics_pipeline(pipeline_hash_key, pipeline_desc)
1275 }
1276 }
1277 };
1278
1279 (
1280 pipeline,
1281 bind_group_attachments,
1282 shader_binding.index_format,
1283 )
1284 }
1285 Some(RenderShaderBinding::Pipeline(pipeline)) => {
1286 let mut pipeline_desc = pipeline.pipeline_desc.clone();
1287
1288 for target in &inner.render_targets {
1289 pipeline_desc.render_target.push((
1290 target.format,
1291 target.blend,
1292 target.write_mask,
1293 ));
1294 }
1295
1296 pipeline_desc.depth_stencil = inner.depth_target_format;
1297 pipeline_desc.msaa_count = inner.multi_sample_count.unwrap_or(1);
1298
1299 let pipeline_hash_key = {
1300 let mut hasher = DefaultHasher::new();
1301 pipeline_desc.hash(&mut hasher);
1302
1303 for target in &inner.render_targets {
1304 target.format.hash(&mut hasher);
1305 target.blend.hash(&mut hasher);
1306 target.write_mask.hash(&mut hasher);
1307 }
1308
1309 inner.depth_target_format.hash(&mut hasher);
1310 inner.multi_sample_count.hash(&mut hasher);
1311
1312 hasher.finish()
1313 };
1314
1315 let wgpu_pipeline = {
1316 let mut graphics_inner = self.graphics.borrow_mut();
1317 match graphics_inner.get_graphics_pipeline(pipeline_hash_key) {
1318 Some(pipeline) => pipeline,
1319 None => graphics_inner
1320 .create_graphics_pipeline(pipeline_hash_key, pipeline_desc),
1321 }
1322 };
1323
1324 let bind_group_attachments = pipeline.bind_group.clone();
1325 let index_format = pipeline.index_format.clone();
1326
1327 (wgpu_pipeline, bind_group_attachments, index_format)
1328 }
1329 None => {
1330 panic!("Shader is not set");
1331 }
1332 }
1333 }
1334
1335 #[inline]
1336 pub fn begin_drawing(&mut self) -> Option<DrawingContext> {
1337 DrawingContext::new(self.clone())
1338 }
1339
1340 pub(crate) fn end(&mut self) {
1341 let inner = self.inner.borrow_mut();
1342 let mut cmd = inner.cmd.borrow_mut();
1343
1344 let clear_color = inner.clear_color.unwrap_or(Color::BLACK);
1345
1346 let load_op = if clear_color.a <= 0.0 {
1347 wgpu::LoadOp::Load
1348 } else {
1349 wgpu::LoadOp::Clear(wgpu::Color {
1350 r: clear_color.r as f64,
1351 g: clear_color.g as f64,
1352 b: clear_color.b as f64,
1353 a: clear_color.a as f64,
1354 })
1355 };
1356
1357 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1358 {
1359 if inner.multi_sample_count.is_some()
1360 && inner.multi_sample_target.len() != inner.render_targets.len()
1361 {
1362 panic!("Multi sample target must match the number of render targets");
1363 }
1364 }
1365
1366 let mut color_attachments = Vec::with_capacity(inner.render_targets.len());
1367 let has_msaa = inner.multi_sample_count.is_some();
1368
1369 for i in 0..inner.render_targets.len() {
1370 let target_view = if has_msaa {
1371 &inner.multi_sample_target[i]
1372 } else {
1373 &inner.render_targets[i].view
1374 };
1375
1376 color_attachments.push(Some(wgpu::RenderPassColorAttachment {
1377 view: target_view,
1378 resolve_target: if has_msaa {
1379 Some(&inner.render_targets[i].view)
1380 } else {
1381 None
1382 },
1383 ops: wgpu::Operations {
1384 load: load_op,
1385 store: wgpu::StoreOp::Store,
1386 },
1387 }));
1388 }
1389
1390 let mut depth_stencil_attachment = None;
1391 if let Some(depth_target) = inner.depth_target.as_ref() {
1392 depth_stencil_attachment = Some(wgpu::RenderPassDepthStencilAttachment {
1393 view: depth_target,
1394 depth_ops: Some(wgpu::Operations {
1395 load: wgpu::LoadOp::Clear(1.0),
1396 store: wgpu::StoreOp::Store,
1397 }),
1398 stencil_ops: None,
1399 });
1400 }
1401
1402 let mut render_pass = cmd.begin_render_pass(&wgpu::RenderPassDescriptor {
1403 label: Some("Render Pass"),
1404 color_attachments: color_attachments.as_slice(),
1405 depth_stencil_attachment,
1406 ..Default::default()
1407 });
1408
1409 for queue in &inner.queues {
1410 render_pass.set_pipeline(&queue.pipeline);
1411
1412 for (group, bind) in &queue.bind_group {
1413 render_pass.set_bind_group(*group, bind, &[]);
1414 }
1415
1416 if let Some(vbo) = &queue.vbo {
1417 render_pass.set_vertex_buffer(0, vbo.slice(..));
1418 }
1419
1420 #[cfg(not(target_arch = "wasm32"))]
1421 if let Some(pc) = &queue.push_constant {
1422 use wgpu::ShaderStages;
1423
1424 render_pass.set_push_constants(ShaderStages::all(), 0, pc);
1425 }
1426
1427 if let Some(scissor) = queue.scissor.as_ref() {
1428 if scissor.w <= 0.0 || scissor.h <= 0.0 {
1429 continue; }
1431
1432 render_pass.set_scissor_rect(
1433 scissor.x as u32,
1434 scissor.y as u32,
1435 scissor.w as u32,
1436 scissor.h as u32,
1437 );
1438 }
1439
1440 if let Some(viewport) = queue.viewport.as_ref() {
1441 let size = viewport.0;
1442 let min_depth = viewport.1;
1443 let max_depth = viewport.2;
1444
1445 if size.w <= 0.0 || size.h <= 0.0 {
1446 continue; }
1448
1449 render_pass.set_viewport(size.x, size.y, size.w, size.h, min_depth, max_depth);
1450 }
1451
1452 match &queue.ty {
1453 DrawCallType::Direct {
1454 ranges,
1455 vertex_offset,
1456 num_of_instances,
1457 } => {
1458 if let Some(ibo) = &queue.ibo {
1459 render_pass.set_index_buffer(ibo.slice(..), queue.itype.unwrap());
1460 render_pass.draw_indexed(
1461 ranges.clone(),
1462 *vertex_offset,
1463 0..*num_of_instances,
1464 );
1465 } else {
1466 render_pass.draw(ranges.clone(), 0..*num_of_instances);
1467 }
1468 }
1469 DrawCallType::InDirect { buffer, offset } => {
1470 if let Some(ibo) = &queue.ibo {
1471 render_pass.set_index_buffer(ibo.slice(..), queue.itype.unwrap());
1472 render_pass.draw_indexed_indirect(buffer, *offset);
1473 } else {
1474 render_pass.draw_indirect(buffer, *offset);
1475 }
1476 }
1477 }
1478 }
1479
1480 inner.atomic_pass.store(false, Ordering::Relaxed);
1481 }
1482}
1483
1484impl Drop for RenderPass {
1485 fn drop(&mut self) {
1486 if std::thread::panicking() {
1487 return;
1488 }
1489
1490 self.end();
1491 }
1492}
1493
1494#[derive(Debug, Clone)]
1495pub(crate) struct RenderpassRenderTarget {
1496 pub view: wgpu::TextureView,
1497 pub format: wgpu::TextureFormat,
1498 pub blend: Option<wgpu::BlendState>,
1499 pub write_mask: Option<wgpu::ColorWrites>,
1500}
1501
1502#[derive(Debug, Clone)]
1503pub(crate) struct RenderPassInner {
1504 pub cmd: ArcRef<wgpu::CommandEncoder>,
1505 pub atomic_pass: Arc<AtomicBool>,
1506
1507 pub render_targets: Vec<RenderpassRenderTarget>,
1508 pub depth_target: Option<wgpu::TextureView>,
1509 pub depth_target_format: Option<wgpu::TextureFormat>,
1510
1511 pub surface_size: Point2,
1512
1513 pub multi_sample_target: Vec<wgpu::TextureView>,
1514 pub multi_sample_count: Option<u32>,
1515
1516 pub clear_color: Option<Color>,
1517 pub viewport: Option<(RectF, f32, f32)>,
1518 pub scissor: Option<RectF>,
1519
1520 pub vertex: Option<wgpu::Buffer>,
1521 pub index: Option<wgpu::Buffer>,
1522
1523 pub shader: Option<RenderShaderBinding>,
1524 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1525 pub shader_reflection: Option<Vec<ShaderReflect>>,
1526
1527 pub attachments: Vec<BindGroupAttachment>,
1528 pub push_constant: Option<Vec<u8>>,
1529
1530 pub queues: Vec<RenderPassQueue>,
1531}
1532
1533#[derive(Clone, Debug)]
1534pub(crate) enum RenderpassAttachment<'a> {
1535 SurfaceTexture(&'a SurfaceTexture),
1536 Texture(&'a Texture),
1537}
1538
1539#[derive(Clone, Debug)]
1540pub struct RenderpassBuilder<'a> {
1541 gpu: ArcRef<GPUInner>,
1542 cmd: ArcRef<wgpu::CommandEncoder>,
1543 atomic_pass: Arc<AtomicBool>,
1544
1545 color_attachments: Vec<(RenderpassAttachment<'a>, Option<BlendState>)>,
1546 msaa_attachments: Vec<&'a Texture>,
1547 depth_attachment: Option<&'a Texture>,
1548}
1549
1550impl<'a> RenderpassBuilder<'a> {
1551 pub(crate) fn new(
1552 gpu: ArcRef<GPUInner>,
1553 cmd: ArcRef<wgpu::CommandEncoder>,
1554 atomic_pass: Arc<AtomicBool>,
1555 ) -> Self {
1556 Self {
1557 gpu,
1558 cmd,
1559 atomic_pass,
1560
1561 color_attachments: Vec::new(),
1562 msaa_attachments: Vec::new(),
1563 depth_attachment: None,
1564 }
1565 }
1566
1567 pub fn add_surface_color_attachment(
1569 mut self,
1570 surface: &'a SurfaceTexture,
1571 blend: Option<&BlendState>,
1572 ) -> Self {
1573 self.color_attachments.push((
1574 RenderpassAttachment::SurfaceTexture(surface),
1575 blend.cloned(),
1576 ));
1577
1578 self
1579 }
1580
1581 pub fn add_color_attachment(
1582 mut self,
1583 texture: &'a Texture,
1584 blend: Option<&BlendState>,
1585 ) -> Self {
1586 self.color_attachments
1587 .push((RenderpassAttachment::Texture(texture), blend.cloned()));
1588
1589 self
1590 }
1591
1592 pub fn add_msaa_attachment(mut self, texture: &'a Texture) -> Self {
1593 self.msaa_attachments.push(texture);
1594
1595 self
1596 }
1597
1598 pub fn set_depth_attachment(mut self, texture: &'a Texture) -> Self {
1599 self.depth_attachment = Some(texture);
1600
1601 self
1602 }
1603
1604 pub fn build(self) -> Result<RenderPass, RenderPassBuildError> {
1605 let mut surface_size = None;
1606
1607 let mut color_attachments = Vec::with_capacity(self.color_attachments.len());
1608 for (attachment, blend) in self.color_attachments {
1609 let (view, format, size) = match attachment {
1610 RenderpassAttachment::SurfaceTexture(surface_texture) => {
1611 let view = surface_texture.get_view();
1612 let format = surface_texture.get_format();
1613 let size = surface_texture.get_size();
1614
1615 (view, format, Point2::new(size.width, size.height))
1616 }
1617 RenderpassAttachment::Texture(texture) => {
1618 let texture_inner = texture.inner.borrow();
1619
1620 if !texture_inner
1621 .usages
1622 .contains(TextureUsage::RenderAttachment)
1623 {
1624 return Err(RenderPassBuildError::ColorAttachmentNotRenderTarget);
1625 }
1626
1627 if texture_inner.size.x == 0 || texture_inner.size.y == 0 {
1628 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1629 Point2::new(0.0, 0.0),
1630 texture_inner.size,
1631 ));
1632 }
1633
1634 if texture_inner.sample_count != SampleCount::SampleCount1 {
1635 return Err(RenderPassBuildError::ColorAttachmentMultiSampled);
1636 }
1637
1638 (
1639 texture_inner.wgpu_view.clone(),
1640 texture_inner.format.into(),
1641 texture_inner.size,
1642 )
1643 }
1644 };
1645
1646 if surface_size.is_some() {
1647 let surface_size = surface_size.unwrap();
1648 if surface_size != size {
1649 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1650 surface_size,
1651 size,
1652 ));
1653 }
1654 }
1655
1656 if surface_size.is_none() {
1657 surface_size = Some(size);
1658 }
1659
1660 color_attachments.push(RenderpassRenderTarget {
1661 view,
1662 format,
1663 blend: blend.map(|b| b.create_wgpu_blend_state()),
1664 write_mask: blend.map(|b| b.create_wgpu_color_write_mask()),
1665 });
1666 }
1667
1668 let mut multi_sample_target = Vec::with_capacity(self.msaa_attachments.len());
1669 let mut multi_sample_count = None;
1670
1671 for msaa_texture in self.msaa_attachments {
1672 let texture_inner = msaa_texture.inner.borrow();
1673
1674 if !texture_inner
1675 .usages
1676 .contains(TextureUsage::RenderAttachment)
1677 {
1678 return Err(RenderPassBuildError::MsaaTextureNotRenderAttachment);
1679 }
1680
1681 if texture_inner.sample_count == SampleCount::SampleCount1 {
1682 return Err(RenderPassBuildError::MsaaTextureNotMultiSampled);
1683 }
1684
1685 if texture_inner.size.x == 0 || texture_inner.size.y == 0 {
1686 return Err(RenderPassBuildError::MsaaTextureInvalidSize(Point2::new(
1687 0.0, 0.0,
1688 )));
1689 }
1690
1691 if surface_size.is_some() {
1692 let surface_size = surface_size.unwrap();
1693 if surface_size != texture_inner.size {
1694 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1695 surface_size,
1696 texture_inner.size,
1697 ));
1698 }
1699 }
1700
1701 let sample_count: u32 = texture_inner.sample_count.into();
1702
1703 if multi_sample_count.is_some() && multi_sample_count.unwrap() != sample_count {
1704 return Err(RenderPassBuildError::MismatchedAttachmentSampleCount(
1705 multi_sample_count.unwrap(),
1706 sample_count,
1707 ));
1708 }
1709
1710 if multi_sample_count.is_none() {
1711 multi_sample_count = Some(sample_count);
1712 }
1713
1714 multi_sample_target.push(texture_inner.wgpu_view.clone());
1715 }
1716
1717 let mut depth_view = None;
1718 let mut depth_format = None;
1719
1720 if let Some(depth_texture) = self.depth_attachment {
1721 let texture_inner = depth_texture.inner.borrow();
1722
1723 if !texture_inner
1724 .usages
1725 .contains(TextureUsage::RenderAttachment)
1726 {
1727 return Err(RenderPassBuildError::DepthTextureNotRenderAttachment);
1728 }
1729
1730 if texture_inner.size.x == 0 || texture_inner.size.y == 0 {
1731 return Err(RenderPassBuildError::DepthTextureInvalidSize(Point2::new(
1732 0.0, 0.0,
1733 )));
1734 }
1735
1736 if texture_inner.format != TextureFormat::Depth32Float
1737 && texture_inner.format != TextureFormat::Depth24PlusStencil8
1738 {
1739 return Err(RenderPassBuildError::DepthTextureFormatNotSupported(
1740 texture_inner.format,
1741 ));
1742 }
1743
1744 if surface_size.is_some() {
1745 let surface_size = surface_size.unwrap();
1746 if surface_size != texture_inner.size {
1747 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1748 surface_size,
1749 texture_inner.size,
1750 ));
1751 }
1752 }
1753
1754 if surface_size.is_none() {
1755 surface_size = Some(texture_inner.size);
1756 }
1757
1758 depth_view = Some(texture_inner.wgpu_view.clone());
1759 depth_format = Some(texture_inner.format.into());
1760 }
1761
1762 if surface_size.is_none() {
1763 return Err(RenderPassBuildError::NoColorOrDepthAttachment);
1764 }
1765
1766 let renderpass = RenderPass::new(self.gpu, self.cmd, self.atomic_pass);
1767 {
1768 let mut inner = renderpass.inner.borrow_mut();
1769
1770 inner.render_targets = color_attachments;
1771 inner.multi_sample_target = multi_sample_target;
1772 inner.multi_sample_count = multi_sample_count;
1773 inner.depth_target = depth_view;
1774 inner.depth_target_format = depth_format;
1775 inner.surface_size = surface_size.unwrap();
1776 }
1777
1778 Ok(renderpass)
1779 }
1780}
1781
1782pub enum RenderPassBuildError {
1783 NoColorOrDepthAttachment,
1784 ColorAttachmentNotRenderTarget,
1785 ColorAttachmentMultiSampled,
1786 MismatchedAttachmentCount(usize, usize),
1787 MismatchedAttachmentSize(Point2, Point2),
1788 MismatchedAttachmentSampleCount(u32, u32),
1789 MismatchedAttachmentFormat(TextureFormat, TextureFormat),
1790 MsaaTextureNotMultiSampled,
1791 MsaaTextureNotRenderAttachment,
1792 MsaaTextureInvalidSize(Point2),
1793 DepthTextureNotRenderAttachment,
1794 DepthTextureInvalidSize(Point2),
1795 DepthTextureFormatNotSupported(TextureFormat),
1796 SwapchainError(String),
1797}
1798
1799impl std::fmt::Display for RenderPassBuildError {
1800 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1801 match self {
1802 RenderPassBuildError::NoColorOrDepthAttachment => write!(f, "No color attachment provided"),
1803 RenderPassBuildError::ColorAttachmentNotRenderTarget => {
1804 write!(f, "Color attachment is not a render target")
1805 }
1806 RenderPassBuildError::ColorAttachmentMultiSampled => {
1807 write!(f, "Color attachment is multi-sampled")
1808 }
1809 RenderPassBuildError::MismatchedAttachmentCount(expected, actual) => {
1810 write!(f, "Expected {} attachments, but got {}", expected, actual)
1811 }
1812 RenderPassBuildError::MismatchedAttachmentSize(expected, actual) => write!(
1813 f,
1814 "Expected attachment size {:?}, but got {:?}",
1815 expected, actual
1816 ),
1817 RenderPassBuildError::MismatchedAttachmentSampleCount(expected, actual) => {
1818 write!(f, "Expected sample count {}, but got {}", expected, actual)
1819 }
1820 RenderPassBuildError::MismatchedAttachmentFormat(expected, actual) => {
1821 write!(f, "Expected format {:?}, but got {:?}", expected, actual)
1822 }
1823 RenderPassBuildError::MsaaTextureNotMultiSampled => {
1824 write!(f, "MSAA texture is not multi-sampled")
1825 }
1826 RenderPassBuildError::MsaaTextureNotRenderAttachment => {
1827 write!(f, "MSAA texture is not a render attachment")
1828 }
1829 RenderPassBuildError::MsaaTextureInvalidSize(size) => {
1830 write!(f, "MSAA texture has invalid size {:?}", size)
1831 }
1832 RenderPassBuildError::DepthTextureNotRenderAttachment => {
1833 write!(f, "Depth texture is not a render attachment")
1834 }
1835 RenderPassBuildError::DepthTextureInvalidSize(size) => {
1836 write!(f, "Depth texture has invalid size {:?}", size)
1837 }
1838 RenderPassBuildError::DepthTextureFormatNotSupported(format) => {
1839 write!(f, "Depth texture format {:?} is not supported", format)
1840 }
1841 RenderPassBuildError::SwapchainError(err) => write!(f, "Swapchain error: {}", err),
1842 }
1843 }
1844}
1845
1846#[derive(Clone, Debug, Hash)]
1847pub(crate) struct IntermediateRenderPipeline {
1848 pub shader: (wgpu::ShaderModule, wgpu::ShaderModule),
1849 pub vertex_attribute: (u64, Vec<wgpu::VertexAttribute>),
1850 pub shader_entry: (String, String),
1851 pub layout: Vec<BindGroupLayout>,
1852 pub topology: ShaderTopology,
1853 pub cull_mode: Option<ShaderCullMode>,
1854 pub front_face: ShaderFrontFace,
1855 pub polygon_mode: ShaderPollygonMode,
1856 pub index_format: Option<IndexBufferSize>,
1857}
1858
1859#[derive(Debug, Clone)]
1860pub(crate) struct RenderPassQueue {
1861 pub pipeline: wgpu::RenderPipeline,
1862 pub bind_group: Vec<(u32, wgpu::BindGroup)>,
1863
1864 pub vbo: Option<wgpu::Buffer>,
1865 pub ibo: Option<wgpu::Buffer>,
1866 pub itype: Option<wgpu::IndexFormat>,
1867
1868 pub viewport: Option<(RectF, f32, f32)>,
1869 pub scissor: Option<RectF>,
1870
1871 pub ty: DrawCallType,
1872 pub push_constant: Option<Vec<u8>>,
1873}
1874
1875#[derive(Clone, Debug)]
1876pub(crate) enum RenderShaderBinding {
1877 Intermediate(IntermediateRenderPipeline),
1878 Pipeline(RenderPipeline),
1879}
1880
1881#[derive(Debug, Clone)]
1882pub enum DrawCallType {
1883 Direct {
1884 ranges: Range<u32>,
1885 vertex_offset: i32,
1886 num_of_instances: u32,
1887 },
1888
1889 InDirect {
1890 buffer: wgpu::Buffer,
1891 offset: u64,
1892 },
1893}