1use std::{collections::HashMap, hash::{DefaultHasher, Hash, Hasher}, ops::Range, sync::{atomic::{AtomicBool, Ordering}, Arc}};
2
3use crate::{
4 math::{Color, Point2, RectF},
5 utils::ArcRef,
6};
7
8use super::{
9 utils::BindGroupType,
10 drawing::DrawingContext,
11 super::{
12 GPUInner,
13 texture::{
14 Texture,
15 BlendState,
16 TextureSampler,
17 TextureUsage,
18 TextureFormat,
19 SampleCount
20 },
21 buffer::{Buffer, BufferUsage},
22 pipeline::{
23 render::RenderPipeline,
24 manager::{VertexAttributeLayout, GraphicsPipelineDesc},
25 },
26 shader::{
27 graphics::{GraphicsShader, GraphicsShaderType},
28 bind_group_manager::BindGroupCreateInfo,
29 types::ShaderReflect,
30 BindGroupLayout,
31 ShaderTopology,
32 ShaderCullMode,
33 ShaderFrontFace,
34 ShaderPollygonMode,
35 IndexBufferSize,
36 ShaderBindingType,
37 },
38 command::{BindGroupAttachment, SurfaceTexture},
39 }
40};
41
42
43#[derive(Debug, Clone)]
77pub struct RenderPass {
78 pub(crate) graphics: ArcRef<GPUInner>,
79 pub(crate) inner: ArcRef<RenderPassInner>,
80}
81
82impl RenderPass {
83 pub(crate) fn new(
84 graphics: ArcRef<GPUInner>,
85 cmd: ArcRef<wgpu::CommandEncoder>,
86 atomic_pass: Arc<AtomicBool>,
87 ) -> Self {
88 let inner = RenderPassInner {
89 cmd,
90 atomic_pass,
91
92 render_targets: Vec::new(),
93 depth_target: None,
94 depth_target_format: None,
95 surface_size: Point2::new(0.0, 0.0),
96
97 multi_sample_count: None,
98 multi_sample_target: Vec::new(),
99
100 clear_color: None,
101 viewport: None,
102 scissor: None,
103
104 vertex: None,
105 index: None,
106
107 shader: None,
108 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
109 shader_reflection: None,
110
111 attachments: Vec::new(),
112 push_constant: None,
113
114 queues: Vec::new(),
115 };
116
117 Self {
118 graphics,
119 inner: ArcRef::new(inner),
120 }
121 }
122
123 #[inline]
124 pub fn set_clear_color(&mut self, _color: Color) {
125 let mut inner = self.inner.borrow_mut();
126 inner.clear_color = Some(_color);
127 }
128
129 #[inline]
130 pub fn get_clear_color(&self) -> Option<Color> {
131 let inner = self.inner.borrow();
132 inner.clear_color.clone()
133 }
134
135 #[inline]
136 pub fn set_blend(&mut self, index: usize, blend: Option<&BlendState>) {
137 let mut inner = self.inner.borrow_mut();
138
139 match inner.render_targets.get_mut(index) {
140 Some(target) => {
141 if let Some(blend) = blend {
142 target.blend = Some(blend.create_wgpu_blend_state());
143 target.write_mask = Some(blend.create_wgpu_color_write_mask());
144 } else {
145 target.blend = None;
146 target.write_mask = Some(wgpu::ColorWrites::COLOR);
147 }
148 }
149 None => {
150 panic!("Render target at index {} does not exist", index);
151 }
152 }
153 }
154
155 #[inline]
156 pub fn get_blend(&self, index: usize) -> Option<BlendState> {
157 let inner = self.inner.borrow();
158
159 match inner.render_targets.get(index) {
160 Some(target) => {
161 let state = target.blend.clone();
162 let color_write_mask = target.write_mask.clone();
163
164 Some(BlendState::from_wgpu(state, color_write_mask))
165 }
166 None => None,
167 }
168 }
169
170 #[inline]
171 pub fn set_gpu_buffer(&mut self, vertex: Option<&Buffer>, index: Option<&Buffer>) {
172 self.set_gpu_buffer_wgpu(
173 vertex.map(|v| v.inner.borrow().buffer.clone()),
174 index.map(|i| i.inner.borrow().buffer.clone()),
175 );
176 }
177
178 #[inline]
179 pub fn set_gpu_buffer_raw<T, T2>(&mut self, vertex: Option<&[T]>, index: Option<&[T2]>)
180 where
181 T: bytemuck::Pod + bytemuck::Zeroable,
182 T2: bytemuck::Pod + bytemuck::Zeroable,
183 {
184 let (vertex_buffer, index_buffer) = {
185 let mut gpu_inner = self.graphics.borrow_mut();
186
187 let vertex_buffer = match vertex {
188 Some(data) => {
189 let buffer = gpu_inner.create_buffer_with(data, wgpu::BufferUsages::VERTEX);
190 Some(buffer)
191 }
192 None => None,
193 };
194
195 let index_buffer = match index {
196 Some(data) => {
197 let buffer = gpu_inner.create_buffer_with(data, wgpu::BufferUsages::INDEX);
198 Some(buffer)
199 }
200 None => None,
201 };
202
203 (vertex_buffer, index_buffer)
204 };
205
206 self.set_gpu_buffer_wgpu(vertex_buffer, index_buffer);
207 }
208
209 pub(crate) fn set_gpu_buffer_wgpu(
210 &mut self,
211 vertex: Option<wgpu::Buffer>,
212 index: Option<wgpu::Buffer>,
213 ) {
214 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
215 {
216 let inner = self.inner.borrow();
217 if inner.shader.is_none() {
218 panic!("Shader is not set");
219 }
220
221 let shader = inner.shader.as_ref().unwrap();
222
223 let index_format = match shader {
224 RenderShaderBinding::Intermediate(IntermediateRenderPipeline {
225 index_format,
226 ..
227 }) => index_format,
228 RenderShaderBinding::Pipeline(RenderPipeline { index_format, .. }) => index_format,
229 };
230
231 if index_format.is_none() && index.is_some() {
232 panic!("Index buffer is set, but shader not configured to use index buffer");
233 }
234 }
235
236 let mut inner = self.inner.borrow_mut();
237
238 inner.vertex = vertex;
239 inner.index = index;
240 }
241
242 #[inline]
243 pub fn get_gpu_buffer(&self) -> (Option<wgpu::Buffer>, Option<wgpu::Buffer>) {
244 let inner = self.inner.borrow();
245 (inner.vertex.clone(), inner.index.clone())
246 }
247
248 #[inline]
249 pub fn set_shader(&mut self, shader: Option<&GraphicsShader>) {
250 self.set_shader_ex(shader, None, None, None, None, None);
251 }
252
253 #[inline]
254 pub fn set_shader_ex(
255 &mut self,
256 shader: Option<&GraphicsShader>,
257 topology: Option<ShaderTopology>,
258 cull_mode: Option<ShaderCullMode>,
259 front_face: Option<ShaderFrontFace>,
260 polygon_mode: Option<ShaderPollygonMode>,
261 index_format: Option<IndexBufferSize>,
262 ) {
263 let mut inner = self.inner.borrow_mut();
264
265 match shader {
266 Some(shader) => {
267 let shader_inner = shader.inner.borrow();
268 let (vertex_shader, fragment_shader) = match &shader_inner.ty {
269 GraphicsShaderType::GraphicsSplit {
270 vertex_module,
271 fragment_module,
272 } => (vertex_module.clone(), fragment_module.clone()),
273 GraphicsShaderType::GraphicsSingle { module } => (module.clone(), module.clone()),
274 };
275
276 let layout = shader_inner.bind_group_layouts.clone();
277
278 let vertex_reflect = shader_inner.reflection.iter().find(|r| {
279 matches!(r, ShaderReflect::Vertex { .. })
280 || matches!(r, ShaderReflect::VertexFragment { .. })
281 });
282
283 let fragment_reflect = shader_inner.reflection.iter().find(|r| {
284 matches!(r, ShaderReflect::Fragment { .. })
285 || matches!(r, ShaderReflect::VertexFragment { .. })
286 });
287
288 let vertex_entry_point = match vertex_reflect {
289 Some(ShaderReflect::Vertex { entry_point, .. }) => Some(entry_point),
290 Some(ShaderReflect::VertexFragment {
291 vertex_entry_point, ..
292 }) => Some(vertex_entry_point),
293 _ => None,
294 };
295
296 let fragment_entry_point = match fragment_reflect {
297 Some(ShaderReflect::Fragment { entry_point, .. }) => Some(entry_point),
298 Some(ShaderReflect::VertexFragment {
299 fragment_entry_point,
300 ..
301 }) => Some(fragment_entry_point),
302 _ => None,
303 };
304
305 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
306 {
307 if vertex_entry_point.is_none() {
308 panic!("Vertex shader entry point is not found in shader reflection");
309 }
310
311 if fragment_entry_point.is_none() {
312 panic!("Fragment shader entry point is not found in shader reflection");
313 }
314 }
315
316 let vertex_entry_point = vertex_entry_point.unwrap();
317 let fragment_entry_point = fragment_entry_point.unwrap();
318
319 let attrib_inner = shader.attrib.borrow();
320 let shader_binding = IntermediateRenderPipeline {
321 shader: (vertex_shader, fragment_shader),
322 vertex_attribute: (attrib_inner.stride, attrib_inner.attributes.clone()),
323 shader_entry: (vertex_entry_point.clone(), fragment_entry_point.clone()),
324 layout: layout,
325 topology: topology.unwrap_or(attrib_inner.topology),
326 cull_mode: cull_mode.into(),
327 front_face: front_face.unwrap_or(attrib_inner.front_face),
328 polygon_mode: polygon_mode.unwrap_or(attrib_inner.polygon_mode),
329 index_format: index_format.or_else(|| attrib_inner.index.clone()),
330 };
331
332 inner.shader = Some(RenderShaderBinding::Intermediate(shader_binding));
333
334 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
335 {
336 inner.shader_reflection = Some(shader_inner.reflection.clone());
337 }
338 }
339 None => {
340 inner.shader = None;
341
342 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
343 {
344 inner.shader_reflection = None;
345 }
346 }
347 }
348 }
349
350 pub fn set_pipeline(&mut self, pipeline: Option<&RenderPipeline>) {
351 let mut inner = self.inner.borrow_mut();
352
353 match pipeline {
354 Some(pipeline) => {
355 inner.shader = Some(RenderShaderBinding::Pipeline(pipeline.clone()));
356 }
357 None => {
358 inner.shader = None;
359 }
360 }
361 }
362
363 #[inline]
364 pub(crate) fn remove_attachment(&mut self, group: u32, binding: u32) {
365 let mut inner = self.inner.borrow_mut();
366
367 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
368 {
369 match &inner.shader {
370 Some(RenderShaderBinding::Pipeline(_)) => {
371 panic!("Cannot insert or replace attachment when using a pipeline shader");
372 }
373 _ => {}
374 }
375 }
376
377 inner
378 .attachments
379 .retain(|a| a.group != group || a.binding != binding);
380 }
381
382 pub(crate) fn insert_or_replace_attachment(
383 &mut self,
384 group: u32,
385 binding: u32,
386 attachment: BindGroupAttachment,
387 ) {
388 let mut inner = self.inner.borrow_mut();
389
390 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
391 {
392 if inner.shader.is_none() {
393 panic!("Shader is not set");
394 }
395
396 match &inner.shader {
397 Some(RenderShaderBinding::Pipeline(_)) => {
398 panic!("Cannot insert or replace attachment when using a pipeline shader");
399 }
400 _ => {}
401 }
402
403 let r#type = inner
404 .shader_reflection
405 .as_ref()
406 .unwrap()
407 .iter()
408 .find_map(|b| {
409 let bindings = match b {
410 ShaderReflect::Vertex { bindings, .. }
411 | ShaderReflect::Fragment { bindings, .. }
412 | ShaderReflect::VertexFragment { bindings, .. } => bindings,
413 _ => return None,
414 };
415
416 bindings.iter().find_map(|shaderbinding| {
417 if shaderbinding.group == group && shaderbinding.binding == binding {
418 Some(shaderbinding)
419 } else {
420 None
421 }
422 })
423 })
424 .unwrap_or_else(|| {
425 panic!(
426 "Shader does not have binding group: {} binding: {}",
427 group, binding
428 );
429 });
430
431 if !match r#type.ty {
432 ShaderBindingType::UniformBuffer(_) => {
433 matches!(attachment.attachment, BindGroupType::Uniform(_))
434 }
435 ShaderBindingType::StorageBuffer(_, _) => {
436 matches!(attachment.attachment, BindGroupType::Storage(_))
437 }
438 ShaderBindingType::StorageTexture(_) => {
439 matches!(attachment.attachment, BindGroupType::TextureStorage(_))
440 }
441 ShaderBindingType::Sampler(_) => {
442 matches!(attachment.attachment, BindGroupType::Sampler(_))
443 }
444 ShaderBindingType::Texture(_) => {
445 matches!(attachment.attachment, BindGroupType::Texture(_))
446 }
447 ShaderBindingType::PushConstant(_) => {
448 matches!(attachment.attachment, BindGroupType::Uniform(_))
449 }
450 } {
451 panic!(
452 "Attachment group: {} binding: {} type: {} not match with shader type: {}",
453 group, binding, attachment.attachment, r#type.ty
454 );
455 }
456 }
457
458 let index = inner
459 .attachments
460 .iter()
461 .position(|a| a.group == group && a.binding == binding);
462
463 if let Some(index) = index {
464 inner.attachments[index] = attachment;
465 } else {
466 inner.attachments.push(attachment);
467 }
468 }
469
470 #[inline]
471 pub fn set_viewport(&mut self, _viewport: Option<RectF>, min_depth: f32, max_depth: f32) {
472 let mut inner = self.inner.borrow_mut();
473
474 match _viewport {
475 Some(viewport) => {
476 inner.viewport = Some((viewport, min_depth, max_depth));
477 }
478 None => {
479 inner.viewport = None;
480 }
481 }
482 }
483
484 #[inline]
485 pub fn get_viewport(&self) -> Option<(RectF, f32, f32)> {
486 let inner = self.inner.borrow();
487 inner.viewport.clone()
488 }
489
490 #[inline]
491 pub fn set_scissor(&mut self, _scissor: Option<RectF>) {
492 let mut inner = self.inner.borrow_mut();
493
494 match _scissor {
495 Some(scissor) => {
496 inner.scissor = Some(scissor);
497 }
498 None => {
499 inner.scissor = None;
500 }
501 }
502 }
503
504 #[inline]
505 pub fn get_scissor(&self) -> Option<RectF> {
506 let inner = self.inner.borrow();
507 inner.scissor.clone()
508 }
509
510 #[inline]
511 pub fn push_msaa_texture(&mut self, texture: &Texture) {
512 let mut inner = self.inner.borrow_mut();
513
514 if inner.multi_sample_count.is_none() {
515 inner.multi_sample_count = Some(texture.inner.borrow().sample_count.into());
516 }
517
518 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
520 {
521 let msaa_count = texture.inner.borrow().sample_count.into();
522 if inner.multi_sample_count.unwrap() != msaa_count {
523 panic!("Multi sample texture count must match render target count");
524 }
525 }
526
527 inner
528 .multi_sample_target
529 .push(texture.inner.borrow().wgpu_view.clone());
530 }
531
532 #[inline]
533 pub fn set_depth_texture(&mut self, texture: Option<&Texture>) {
534 let mut inner = self.inner.borrow_mut();
535
536 match texture {
537 Some(texture) => {
538 let texture_inner = texture.inner.borrow();
539 let format = texture_inner.format.into();
540
541 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
542 {
543 if !texture_inner
544 .usages
545 .contains(TextureUsage::RenderAttachment)
546 {
547 panic!("Texture must have render attachment usage");
548 }
549
550 let expected_depth_format = [
551 wgpu::TextureFormat::Depth32Float,
552 wgpu::TextureFormat::Depth24Plus,
553 wgpu::TextureFormat::Depth24PlusStencil8,
554 ];
555
556 if !expected_depth_format.contains(&format) {
557 panic!("Texture must have depth format");
558 }
559
560 if texture_inner.sample_count != SampleCount::SampleCount1 {
561 panic!("Depth texture must be single sampled");
562 }
563
564 let depth_size = texture_inner.size;
565 if depth_size.x == 0 || depth_size.y == 0 {
566 panic!("Depth texture size must be greater than 0");
567 }
568
569 if depth_size.x != inner.surface_size.x || depth_size.y != inner.surface_size.y
570 {
571 panic!("Depth texture size must match render target size");
572 }
573 }
574
575 inner.depth_target = Some(texture_inner.wgpu_view.clone());
576 inner.depth_target_format = Some(format);
577 }
578 None => {
579 inner.depth_target = None;
580 inner.depth_target_format = None;
581 }
582 }
583 }
584
585 #[inline]
586 pub fn set_push_constants(&mut self, _data: Option<&[u8]>) {
587 let mut inner = self.inner.borrow_mut();
588
589 match _data {
590 Some(data) => {
591 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
592 {
593 if inner.shader.is_none() {
594 panic!("Shader is not set");
595 }
596
597 let size = inner
598 .shader_reflection
599 .as_ref()
600 .unwrap()
601 .iter()
602 .find_map(|b| {
603 let bindings = match b {
604 ShaderReflect::Vertex { bindings, .. }
605 | ShaderReflect::Fragment { bindings, .. }
606 | ShaderReflect::VertexFragment { bindings, .. } => bindings,
607 _ => return None,
608 };
609
610 bindings.iter().find_map(|binding| {
611 if let ShaderBindingType::PushConstant(size) = binding.ty {
612 Some(size)
613 } else {
614 None
615 }
616 })
617 })
618 .unwrap_or(0);
619
620 if data.len() > size as usize {
621 panic!("Data size must be less or equal to the push constant size");
622 }
623 }
624
625 let mut data = data.to_vec();
626 if data.len() % 4 != 0 {
627 let padding = 4 - (data.len() % 4);
628 data.extend(vec![0; padding]);
629 }
630
631 inner.push_constant = Some(data);
632 }
633 None => {
634 inner.push_constant = None;
635 return;
636 }
637 }
638 }
639
640 #[inline]
641 pub fn set_push_constants_raw<T: bytemuck::Pod + bytemuck::Zeroable>(
642 &mut self,
643 data: Option<&[T]>,
644 ) {
645 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
646 {
647 let inner = self.inner.borrow();
648
649 if inner.shader.is_none() {
650 panic!("Shader is not set");
651 }
652 }
653
654 match data {
655 Some(data) => {
656 let mut bytemuck_data: Vec<u8> = bytemuck::cast_slice(data).to_vec();
657
658 if bytemuck_data.len() % 4 != 0 {
659 let padding = 4 - (bytemuck_data.len() % 4);
660 bytemuck_data.extend(vec![0; padding]);
661 }
662
663 self.set_push_constants(Some(&bytemuck_data));
664 }
665 None => {
666 self.set_push_constants(None);
667 }
668 }
669 }
670
671 #[inline]
672 pub fn set_push_constants_struct_raw<T: bytemuck::Pod + bytemuck::Zeroable>(
673 &mut self,
674 data: Option<&[T]>,
675 ) {
676 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
677 {
678 let inner = self.inner.borrow();
679
680 if inner.shader.is_none() {
681 panic!("Shader is not set");
682 }
683 }
684
685 match data {
686 Some(data) => {
687 let mut bytemuck_data: Vec<u8> = bytemuck::cast_slice(data).to_vec();
688
689 if bytemuck_data.len() % 4 != 0 {
690 let padding = 4 - (bytemuck_data.len() % 4);
691 bytemuck_data.extend(vec![0; padding]);
692 }
693
694 self.set_push_constants(Some(&bytemuck_data));
695 }
696 None => {
697 self.set_push_constants(None);
698 }
699 }
700 }
701
702 #[inline]
703 pub fn set_attachment_sampler(
704 &mut self,
705 group: u32,
706 binding: u32,
707 sampler: Option<&TextureSampler>,
708 ) {
709 match sampler {
710 Some(sampler) => {
711 let inner = self.graphics.borrow();
712 let attachment = BindGroupAttachment {
713 group,
714 binding,
715 attachment: BindGroupType::Sampler(sampler.make_wgpu(inner.device())),
716 };
717
718 drop(inner);
719
720 self.insert_or_replace_attachment(group, binding, attachment);
721 }
722 None => {
723 self.remove_attachment(group, binding);
724 }
725 }
726 }
727
728 #[inline]
729 pub fn set_attachment_texture(&mut self, group: u32, binding: u32, texture: Option<&Texture>) {
730 match texture {
731 Some(texture) => {
732 let inner = texture.inner.borrow();
733 let attachment = BindGroupAttachment {
734 group,
735 binding,
736 attachment: BindGroupType::Texture(inner.wgpu_view.clone()),
737 };
738
739 drop(inner);
740
741 self.insert_or_replace_attachment(group, binding, attachment);
742 }
743 None => {
744 self.remove_attachment(group, binding);
745 }
746 }
747 }
748
749 #[inline]
750 pub fn set_attachment_texture_storage(
751 &mut self,
752 group: u32,
753 binding: u32,
754 texture: Option<&Texture>,
755 ) {
756 match texture {
757 Some(texture) => {
758 let inner = texture.inner.borrow();
759 let attachment = BindGroupAttachment {
760 group,
761 binding,
762 attachment: BindGroupType::TextureStorage(inner.wgpu_view.clone()),
763 };
764
765 self.insert_or_replace_attachment(group, binding, attachment);
766 }
767 None => {
768 self.remove_attachment(group, binding);
769 }
770 }
771 }
772
773 #[inline]
774 pub fn set_attachment_uniform(&mut self, group: u32, binding: u32, buffer: Option<&Buffer>) {
775 match buffer {
776 Some(buffer) => {
777 let inner = buffer.inner.borrow();
778 let attachment = BindGroupAttachment {
779 group,
780 binding,
781 attachment: BindGroupType::Uniform(inner.buffer.clone()),
782 };
783
784 self.insert_or_replace_attachment(group, binding, attachment);
785 }
786 None => {
787 self.remove_attachment(group, binding);
788 }
789 }
790 }
791
792 #[inline]
793 pub fn set_attachment_uniform_vec<T>(&mut self, group: u32, binding: u32, buffer: Option<Vec<T>>)
794 where
795 T: bytemuck::Pod + bytemuck::Zeroable,
796 {
797 match buffer {
798 Some(buffer) => {
799 let mut inner = self.graphics.borrow_mut();
800
801 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
802 let attachment = BindGroupAttachment {
803 group,
804 binding,
805 attachment: BindGroupType::Uniform(buffer),
806 };
807
808 drop(inner);
809
810 self.insert_or_replace_attachment(group, binding, attachment);
811 }
812 None => {
813 self.remove_attachment(group, binding);
814 }
815 }
816 }
817
818 #[inline]
819 pub fn set_attachment_uniform_raw<T>(&mut self, group: u32, binding: u32, buffer: Option<&[T]>)
820 where
821 T: bytemuck::Pod + bytemuck::Zeroable,
822 {
823 match buffer {
824 Some(buffer) => {
825 let mut inner = self.graphics.borrow_mut();
826
827 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
828 let attachment = BindGroupAttachment {
829 group,
830 binding,
831 attachment: BindGroupType::Uniform(buffer),
832 };
833
834 drop(inner);
835
836 self.insert_or_replace_attachment(group, binding, attachment);
837 }
838 None => {
839 self.remove_attachment(group, binding);
840 }
841 }
842 }
843
844 #[inline]
845 pub fn set_attachment_storage(&mut self, group: u32, binding: u32, buffer: Option<&Buffer>) {
846 match buffer {
847 Some(buffer) => {
848 let inner = buffer.inner.borrow();
849
850 let attachment = BindGroupAttachment {
851 group,
852 binding,
853 attachment: BindGroupType::Storage(inner.buffer.clone()),
854 };
855
856 self.insert_or_replace_attachment(group, binding, attachment);
857 }
858 None => {
859 self.remove_attachment(group, binding);
860 }
861 }
862 }
863
864 #[inline]
865 pub fn set_attachment_storage_raw<T>(&mut self, group: u32, binding: u32, buffer: Option<&[T]>)
866 where
867 T: bytemuck::Pod + bytemuck::Zeroable,
868 {
869 match buffer {
870 Some(buffer) => {
871 let mut inner = self.graphics.borrow_mut();
872
873 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
874 let attachment = BindGroupAttachment {
875 group,
876 binding,
877 attachment: BindGroupType::Storage(buffer),
878 };
879
880 drop(inner);
881
882 self.insert_or_replace_attachment(group, binding, attachment);
883 }
884 None => {
885 self.remove_attachment(group, binding);
886 }
887 }
888 }
889
890 #[inline]
891 pub fn set_attachment_storage_vec<T>(&mut self, group: u32, binding: u32, buffer: Option<Vec<T>>)
892 where
893 T: bytemuck::Pod + bytemuck::Zeroable,
894 {
895 match buffer {
896 Some(buffer) => {
897 let mut inner = self.graphics.borrow_mut();
898
899 let buffer = inner.create_buffer_with(&buffer, wgpu::BufferUsages::COPY_DST);
900 let attachment = BindGroupAttachment {
901 group,
902 binding,
903 attachment: BindGroupType::Storage(buffer),
904 };
905
906 drop(inner);
907
908 self.insert_or_replace_attachment(group, binding, attachment);
909 }
910 None => {
911 self.remove_attachment(group, binding);
912 }
913 }
914 }
915
916 #[inline]
917 pub fn draw(&mut self, vertex_ranges: Range<u32>, num_of_instances: u32) {
918 self.prepare_draw(false, vertex_ranges, 0, num_of_instances);
919 }
920
921 #[inline]
922 pub fn draw_indexed(
923 &mut self,
924 index_ranges: Range<u32>,
925 vertex_offset: i32,
926 num_of_instances: u32,
927 ) {
928 self.prepare_draw(true, index_ranges, vertex_offset, num_of_instances);
929 }
930
931 #[inline]
932 fn prepare_draw(
933 &mut self,
934 use_index_buffer: bool,
935 ranges: Range<u32>,
936 vertex_offset: i32,
937 num_of_instances: u32,
938 ) {
939 {
944 let inner = self.inner.borrow();
945
946 if let Some((viewport, _, _)) = &inner.viewport {
947 if viewport.w <= 0.0 || viewport.h <= 0.0 {
948 return;
949 }
950 }
951
952 if let Some(scissor) = &inner.scissor {
953 if scissor.w <= 0.0 || scissor.h <= 0.0 {
954 return;
955 }
956 }
957 }
958
959 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
961 {
962 let inner = self.inner.borrow();
963 if inner.vertex.is_none() {
964 panic!("Vertex buffer is not set");
965 }
966
967 if use_index_buffer && inner.index.is_none() {
968 panic!("Index buffer is not set");
969 }
970 }
971
972 let (pipeline, bind_group, index_format) = self.prepare_pipeline();
974
975 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
977 {
978 if index_format.is_none() && use_index_buffer {
979 panic!(
980 "Index format is not set, setup with shader.set_index_format() or render_pass.set_shader_ex()"
981 );
982 }
983 }
984
985 let mut inner = self.inner.borrow_mut();
986
987 let queue = RenderPassQueue {
988 pipeline,
989 bind_group,
990 vbo: inner.vertex.clone(),
991 ibo: if use_index_buffer {
992 inner.index.clone()
993 } else {
994 None
995 },
996 itype: if use_index_buffer {
997 Some(index_format.unwrap().into())
998 } else {
999 None
1000 },
1001 viewport: inner.viewport.clone(),
1002 scissor: inner.scissor.clone(),
1003 ty: DrawCallType::Direct {
1004 ranges,
1005 vertex_offset,
1006 num_of_instances,
1007 },
1008 push_constant: inner.push_constant.clone(),
1009 };
1010
1011 inner.queues.push(queue);
1012 }
1013
1014 #[inline]
1015 pub fn draw_indirect(&mut self, buffer: &Buffer, offset: u64) {
1016 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1017 if buffer.inner.borrow().usage.contains(BufferUsage::INDIRECT) {
1018 panic!("Buffer must have INDIRECT usage");
1019 }
1020
1021 self.prepare_draw_indirect(buffer, offset, false);
1022 }
1023
1024 #[inline]
1025 pub fn draw_indexed_indirect(&mut self, buffer: &Buffer, offset: u64) {
1026 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1027 if buffer.inner.borrow().usage.contains(BufferUsage::INDIRECT) {
1028 panic!("Buffer must have INDIRECT usage");
1029 }
1030
1031 self.prepare_draw_indirect(buffer, offset, true);
1032 }
1033
1034 #[inline]
1035 fn prepare_draw_indirect(&mut self, buffer: &Buffer, offset: u64, use_index_buffer: bool) {
1036 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1037 {
1038 let inner = self.inner.borrow();
1039 if inner.vertex.is_none() {
1040 panic!("Vertex buffer is not set");
1041 }
1042
1043 if use_index_buffer && inner.index.is_none() {
1044 panic!("Index buffer is not set");
1045 }
1046 }
1047
1048 let (pipeline, bind_group, index_format) = self.prepare_pipeline();
1049
1050 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1051 {
1052 if index_format.is_none() && use_index_buffer {
1053 panic!(
1054 "Index format is not set, setup with shader.set_index_format() or render_pass.set_shader_ex()"
1055 );
1056 }
1057 }
1058
1059 let mut inner = self.inner.borrow_mut();
1060 let queue = RenderPassQueue {
1061 pipeline,
1062 bind_group,
1063 vbo: inner.vertex.clone(),
1064 ibo: if use_index_buffer {
1065 inner.index.clone()
1066 } else {
1067 None
1068 },
1069 itype: if use_index_buffer {
1070 Some(index_format.unwrap().into())
1071 } else {
1072 None
1073 },
1074 viewport: inner.viewport.clone(),
1075 scissor: inner.scissor.clone(),
1076 ty: DrawCallType::InDirect {
1077 buffer: buffer.inner.borrow().buffer.clone(),
1078 offset,
1079 },
1080 push_constant: inner.push_constant.clone(),
1081 };
1082
1083 inner.queues.push(queue);
1084 }
1085
1086 fn prepare_pipeline(
1087 &self,
1088 ) -> (
1089 wgpu::RenderPipeline,
1090 Vec<(u32, wgpu::BindGroup)>,
1091 Option<IndexBufferSize>,
1092 ) {
1093 let inner = self.inner.borrow();
1094
1095 match &inner.shader {
1096 Some(RenderShaderBinding::Intermediate(shader_binding)) => {
1097 let bind_group_hash_key = {
1098 let mut hasher = DefaultHasher::new();
1099 hasher.write_u64(0u64); for attachment in &inner.attachments {
1102 attachment.group.hash(&mut hasher);
1103 attachment.binding.hash(&mut hasher);
1104 match &attachment.attachment {
1105 BindGroupType::Uniform(uniform) => {
1106 uniform.hash(&mut hasher);
1107 }
1108 BindGroupType::Texture(texture) => {
1109 texture.hash(&mut hasher);
1110 }
1111 BindGroupType::TextureStorage(texture) => texture.hash(&mut hasher),
1112 BindGroupType::Sampler(sampler) => sampler.hash(&mut hasher),
1113 BindGroupType::Storage(storage) => storage.hash(&mut hasher),
1114 }
1115 }
1116
1117 hasher.finish()
1118 };
1119
1120 let bind_group_attachments = {
1121 let mut gpu_inner = self.graphics.borrow_mut();
1122
1123 match gpu_inner.get_bind_group(bind_group_hash_key) {
1124 Some(bind_group) => bind_group,
1125 None => {
1126 let mut bind_group_attachments: HashMap<
1127 u32,
1128 Vec<wgpu::BindGroupEntry>,
1129 > = inner.attachments.iter().fold(HashMap::new(), |mut map, e| {
1130 let (group, binding, attachment) =
1131 (e.group, e.binding, &e.attachment);
1132
1133 let entry = match attachment {
1134 BindGroupType::Uniform(buffer) => wgpu::BindGroupEntry {
1135 binding,
1136 resource: wgpu::BindingResource::Buffer(
1137 wgpu::BufferBinding {
1138 buffer,
1139 offset: 0,
1140 size: None,
1141 },
1142 ),
1143 },
1144 BindGroupType::Texture(texture) => wgpu::BindGroupEntry {
1145 binding,
1146 resource: wgpu::BindingResource::TextureView(texture),
1147 },
1148 BindGroupType::Sampler(sampler) => wgpu::BindGroupEntry {
1149 binding,
1150 resource: wgpu::BindingResource::Sampler(sampler),
1151 },
1152 BindGroupType::Storage(buffer) => wgpu::BindGroupEntry {
1153 binding,
1154 resource: wgpu::BindingResource::Buffer(
1155 wgpu::BufferBinding {
1156 buffer,
1157 offset: 0,
1158 size: None,
1159 },
1160 ),
1161 },
1162 BindGroupType::TextureStorage(texture) => {
1163 wgpu::BindGroupEntry {
1164 binding,
1165 resource: wgpu::BindingResource::TextureView(texture),
1166 }
1167 }
1168 };
1169
1170 map.entry(group).or_insert_with(Vec::new).push(entry);
1171 map
1172 });
1173
1174 for entries in bind_group_attachments.values_mut() {
1178 entries.sort_by_key(|e| e.binding);
1179 }
1180
1181 let bind_group = bind_group_attachments
1182 .iter()
1183 .map(|(group, entries)| {
1184 let layout = shader_binding
1185 .layout
1186 .iter()
1187 .find(|l| l.group == *group)
1188 .unwrap();
1189
1190 (layout, entries.as_slice())
1191 })
1192 .collect::<Vec<_>>();
1193
1194 let create_info = BindGroupCreateInfo {
1195 entries: bind_group,
1196 };
1197
1198 gpu_inner.create_bind_group(bind_group_hash_key, create_info)
1199 }
1200 }
1201 };
1202
1203 let pipeline_hash_key = {
1204 let mut hasher = DefaultHasher::new();
1205 shader_binding.hash(&mut hasher);
1206
1207 for target in &inner.render_targets {
1208 target.format.hash(&mut hasher);
1209 target.blend.hash(&mut hasher);
1210 target.write_mask.hash(&mut hasher);
1211 }
1212
1213 inner.depth_target_format.hash(&mut hasher);
1214 inner.multi_sample_count.hash(&mut hasher);
1215
1216 hasher.finish()
1217 };
1218
1219 let pipeline = {
1220 let mut graphics_inner = self.graphics.borrow_mut();
1221 match graphics_inner.get_graphics_pipeline(pipeline_hash_key) {
1222 Some(pipeline) => pipeline,
1223 None => {
1224 let attribute = &shader_binding.vertex_attribute;
1225 let vertex_desc = VertexAttributeLayout {
1226 stride: attribute.0 as wgpu::BufferAddress,
1227 step_mode: wgpu::VertexStepMode::Vertex,
1228 attributes: attribute.1.clone(),
1229 };
1230
1231 let primitive_state = wgpu::PrimitiveState {
1232 topology: shader_binding.topology.into(),
1233 strip_index_format: None,
1234 front_face: shader_binding.front_face.into(),
1235 cull_mode: shader_binding.cull_mode.map(|c| c.into()),
1236 polygon_mode: shader_binding.polygon_mode.into(),
1237 unclipped_depth: false,
1238 conservative: false,
1239 };
1240
1241 let layout = shader_binding
1242 .layout
1243 .iter()
1244 .map(|l| l.layout.clone())
1245 .collect::<Vec<_>>();
1246
1247 let mut pipeline_desc = GraphicsPipelineDesc {
1248 shaders: shader_binding.shader.clone(),
1249 entry_point: shader_binding.shader_entry.clone(),
1250 render_target: Vec::with_capacity(inner.render_targets.len()),
1251 depth_stencil: inner.depth_target_format,
1252 vertex_desc,
1253 primitive_state,
1254 bind_group_layout: layout,
1255 msaa_count: inner.multi_sample_count.unwrap_or(1),
1256 };
1257
1258 for target in &inner.render_targets {
1259 pipeline_desc.render_target.push((
1260 target.format,
1261 target.blend,
1262 target.write_mask,
1263 ));
1264 }
1265
1266 graphics_inner
1267 .create_graphics_pipeline(pipeline_hash_key, pipeline_desc)
1268 }
1269 }
1270 };
1271
1272 (
1273 pipeline,
1274 bind_group_attachments,
1275 shader_binding.index_format,
1276 )
1277 }
1278 Some(RenderShaderBinding::Pipeline(pipeline)) => {
1279 let mut pipeline_desc = pipeline.pipeline_desc.clone();
1280
1281 for target in &inner.render_targets {
1282 pipeline_desc.render_target.push((
1283 target.format,
1284 target.blend,
1285 target.write_mask,
1286 ));
1287 }
1288
1289 pipeline_desc.depth_stencil = inner.depth_target_format;
1290 pipeline_desc.msaa_count = inner.multi_sample_count.unwrap_or(1);
1291
1292 let pipeline_hash_key = {
1293 let mut hasher = DefaultHasher::new();
1294 pipeline_desc.hash(&mut hasher);
1295
1296 for target in &inner.render_targets {
1297 target.format.hash(&mut hasher);
1298 target.blend.hash(&mut hasher);
1299 target.write_mask.hash(&mut hasher);
1300 }
1301
1302 inner.depth_target_format.hash(&mut hasher);
1303 inner.multi_sample_count.hash(&mut hasher);
1304
1305 hasher.finish()
1306 };
1307
1308 let wgpu_pipeline = {
1309 let mut graphics_inner = self.graphics.borrow_mut();
1310 match graphics_inner.get_graphics_pipeline(pipeline_hash_key) {
1311 Some(pipeline) => pipeline,
1312 None => graphics_inner
1313 .create_graphics_pipeline(pipeline_hash_key, pipeline_desc),
1314 }
1315 };
1316
1317 let bind_group_attachments = pipeline.bind_group.clone();
1318 let index_format = pipeline.index_format.clone();
1319
1320 (wgpu_pipeline, bind_group_attachments, index_format)
1321 }
1322 None => {
1323 panic!("Shader is not set");
1324 }
1325 }
1326 }
1327
1328 #[inline]
1329 pub fn begin_drawing(&mut self) -> Option<DrawingContext> {
1330 DrawingContext::new(self.clone())
1331 }
1332
1333 pub(crate) fn end(&mut self) {
1334 let inner = self.inner.borrow_mut();
1335 let mut cmd = inner.cmd.borrow_mut();
1336
1337 let clear_color = inner.clear_color.unwrap_or(Color::BLACK);
1338
1339 let load_op = if clear_color.a <= 0.0 {
1340 wgpu::LoadOp::Load
1341 } else {
1342 wgpu::LoadOp::Clear(wgpu::Color {
1343 r: clear_color.r as f64,
1344 g: clear_color.g as f64,
1345 b: clear_color.b as f64,
1346 a: clear_color.a as f64,
1347 })
1348 };
1349
1350 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1351 {
1352 if inner.multi_sample_count.is_some()
1353 && inner.multi_sample_target.len() != inner.render_targets.len()
1354 {
1355 panic!("Multi sample target must match the number of render targets");
1356 }
1357 }
1358
1359 let mut color_attachments = Vec::with_capacity(inner.render_targets.len());
1360 let has_msaa = inner.multi_sample_count.is_some();
1361
1362 for i in 0..inner.render_targets.len() {
1363 let target_view = if has_msaa {
1364 &inner.multi_sample_target[i]
1365 } else {
1366 &inner.render_targets[i].view
1367 };
1368
1369 color_attachments.push(Some(wgpu::RenderPassColorAttachment {
1370 view: target_view,
1371 resolve_target: if has_msaa {
1372 Some(&inner.render_targets[i].view)
1373 } else {
1374 None
1375 },
1376 ops: wgpu::Operations {
1377 load: load_op,
1378 store: wgpu::StoreOp::Store,
1379 },
1380 }));
1381 }
1382
1383 let mut depth_stencil_attachment = None;
1384 if let Some(depth_target) = inner.depth_target.as_ref() {
1385 depth_stencil_attachment = Some(wgpu::RenderPassDepthStencilAttachment {
1386 view: depth_target,
1387 depth_ops: Some(wgpu::Operations {
1388 load: wgpu::LoadOp::Clear(1.0),
1389 store: wgpu::StoreOp::Store,
1390 }),
1391 stencil_ops: None,
1392 });
1393 }
1394
1395 let mut render_pass = cmd.begin_render_pass(&wgpu::RenderPassDescriptor {
1396 label: Some("Render Pass"),
1397 color_attachments: color_attachments.as_slice(),
1398 depth_stencil_attachment,
1399 ..Default::default()
1400 });
1401
1402 for queue in &inner.queues {
1403 render_pass.set_pipeline(&queue.pipeline);
1404
1405 for (group, bind) in &queue.bind_group {
1406 render_pass.set_bind_group(*group, bind, &[]);
1407 }
1408
1409 if let Some(vbo) = &queue.vbo {
1410 render_pass.set_vertex_buffer(0, vbo.slice(..));
1411 }
1412
1413 #[cfg(not(target_arch = "wasm32"))]
1414 if let Some(pc) = &queue.push_constant {
1415 use wgpu::ShaderStages;
1416
1417 render_pass.set_push_constants(ShaderStages::all(), 0, pc);
1418 }
1419
1420 if let Some(scissor) = queue.scissor.as_ref() {
1421 if scissor.w <= 0.0 || scissor.h <= 0.0 {
1422 continue; }
1424
1425 render_pass.set_scissor_rect(
1426 scissor.x as u32,
1427 scissor.y as u32,
1428 scissor.w as u32,
1429 scissor.h as u32,
1430 );
1431 }
1432
1433 if let Some(viewport) = queue.viewport.as_ref() {
1434 let size = viewport.0;
1435 let min_depth = viewport.1;
1436 let max_depth = viewport.2;
1437
1438 if size.w <= 0.0 || size.h <= 0.0 {
1439 continue; }
1441
1442 render_pass.set_viewport(size.x, size.y, size.w, size.h, min_depth, max_depth);
1443 }
1444
1445 match &queue.ty {
1446 DrawCallType::Direct {
1447 ranges,
1448 vertex_offset,
1449 num_of_instances,
1450 } => {
1451 if let Some(ibo) = &queue.ibo {
1452 render_pass.set_index_buffer(ibo.slice(..), queue.itype.unwrap());
1453 render_pass.draw_indexed(
1454 ranges.clone(),
1455 *vertex_offset,
1456 0..*num_of_instances,
1457 );
1458 } else {
1459 render_pass.draw(ranges.clone(), 0..*num_of_instances);
1460 }
1461 }
1462 DrawCallType::InDirect { buffer, offset } => {
1463 if let Some(ibo) = &queue.ibo {
1464 render_pass.set_index_buffer(ibo.slice(..), queue.itype.unwrap());
1465 render_pass.draw_indexed_indirect(buffer, *offset);
1466 } else {
1467 render_pass.draw_indirect(buffer, *offset);
1468 }
1469 }
1470 }
1471 }
1472
1473 inner.atomic_pass.store(false, Ordering::Relaxed);
1474 }
1475}
1476
1477impl Drop for RenderPass {
1478 fn drop(&mut self) {
1479 if std::thread::panicking() {
1480 return;
1481 }
1482
1483 self.end();
1484 }
1485}
1486
1487#[derive(Debug, Clone)]
1488pub(crate) struct RenderpassRenderTarget {
1489 pub view: wgpu::TextureView,
1490 pub format: wgpu::TextureFormat,
1491 pub blend: Option<wgpu::BlendState>,
1492 pub write_mask: Option<wgpu::ColorWrites>,
1493}
1494
1495#[derive(Debug, Clone)]
1496pub(crate) struct RenderPassInner {
1497 pub cmd: ArcRef<wgpu::CommandEncoder>,
1498 pub atomic_pass: Arc<AtomicBool>,
1499
1500 pub render_targets: Vec<RenderpassRenderTarget>,
1501 pub depth_target: Option<wgpu::TextureView>,
1502 pub depth_target_format: Option<wgpu::TextureFormat>,
1503
1504 pub surface_size: Point2,
1505
1506 pub multi_sample_target: Vec<wgpu::TextureView>,
1507 pub multi_sample_count: Option<u32>,
1508
1509 pub clear_color: Option<Color>,
1510 pub viewport: Option<(RectF, f32, f32)>,
1511 pub scissor: Option<RectF>,
1512
1513 pub vertex: Option<wgpu::Buffer>,
1514 pub index: Option<wgpu::Buffer>,
1515
1516 pub shader: Option<RenderShaderBinding>,
1517 #[cfg(any(debug_assertions, feature = "enable-release-validation"))]
1518 pub shader_reflection: Option<Vec<ShaderReflect>>,
1519
1520 pub attachments: Vec<BindGroupAttachment>,
1521 pub push_constant: Option<Vec<u8>>,
1522
1523 pub queues: Vec<RenderPassQueue>,
1524}
1525
1526#[derive(Clone, Debug)]
1527pub(crate) enum RenderpassAttachment<'a> {
1528 SurfaceTexture(&'a SurfaceTexture),
1529 Texture(&'a Texture),
1530}
1531
1532#[derive(Clone, Debug)]
1533pub struct RenderpassBuilder<'a> {
1534 gpu: ArcRef<GPUInner>,
1535 cmd: ArcRef<wgpu::CommandEncoder>,
1536 atomic_pass: Arc<AtomicBool>,
1537
1538 color_attachments: Vec<(RenderpassAttachment<'a>, Option<BlendState>)>,
1539 msaa_attachments: Vec<&'a Texture>,
1540 depth_attachment: Option<&'a Texture>,
1541}
1542
1543impl<'a> RenderpassBuilder<'a> {
1544 pub(crate) fn new(
1545 gpu: ArcRef<GPUInner>,
1546 cmd: ArcRef<wgpu::CommandEncoder>,
1547 atomic_pass: Arc<AtomicBool>,
1548 ) -> Self {
1549 Self {
1550 gpu,
1551 cmd,
1552 atomic_pass,
1553
1554 color_attachments: Vec::new(),
1555 msaa_attachments: Vec::new(),
1556 depth_attachment: None,
1557 }
1558 }
1559
1560 pub fn add_surface_color_attachment(
1562 mut self,
1563 surface: &'a SurfaceTexture,
1564 blend: Option<&BlendState>,
1565 ) -> Self {
1566 self.color_attachments.push((
1567 RenderpassAttachment::SurfaceTexture(surface),
1568 blend.cloned(),
1569 ));
1570
1571 self
1572 }
1573
1574 pub fn add_color_attachment(
1575 mut self,
1576 texture: &'a Texture,
1577 blend: Option<&BlendState>,
1578 ) -> Self {
1579 self.color_attachments
1580 .push((RenderpassAttachment::Texture(texture), blend.cloned()));
1581
1582 self
1583 }
1584
1585 pub fn add_msaa_attachment(mut self, texture: &'a Texture) -> Self {
1586 self.msaa_attachments.push(texture);
1587
1588 self
1589 }
1590
1591 pub fn set_depth_attachment(mut self, texture: &'a Texture) -> Self {
1592 self.depth_attachment = Some(texture);
1593
1594 self
1595 }
1596
1597 pub fn build(self) -> Result<RenderPass, RenderPassBuildError> {
1598 let mut surface_size = None;
1599
1600 let mut color_attachments = Vec::with_capacity(self.color_attachments.len());
1601 for (attachment, blend) in self.color_attachments {
1602 let (view, format, size) = match attachment {
1603 RenderpassAttachment::SurfaceTexture(surface_texture) => {
1604 let view = surface_texture.get_view();
1605 let format = surface_texture.get_format();
1606 let size = surface_texture.get_size();
1607
1608 (view, format, Point2::new(size.width, size.height))
1609 }
1610 RenderpassAttachment::Texture(texture) => {
1611 let texture_inner = texture.inner.borrow();
1612
1613 if !texture_inner
1614 .usages
1615 .contains(TextureUsage::RenderAttachment)
1616 {
1617 return Err(RenderPassBuildError::ColorAttachmentNotRenderTarget);
1618 }
1619
1620 if texture_inner.size.x == 0 || texture_inner.size.y == 0 {
1621 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1622 Point2::new(0.0, 0.0),
1623 texture_inner.size,
1624 ));
1625 }
1626
1627 if texture_inner.sample_count != SampleCount::SampleCount1 {
1628 return Err(RenderPassBuildError::ColorAttachmentMultiSampled);
1629 }
1630
1631 (
1632 texture_inner.wgpu_view.clone(),
1633 texture_inner.format.into(),
1634 texture_inner.size,
1635 )
1636 }
1637 };
1638
1639 if surface_size.is_some() {
1640 let surface_size = surface_size.unwrap();
1641 if surface_size != size {
1642 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1643 surface_size,
1644 size,
1645 ));
1646 }
1647 }
1648
1649 if surface_size.is_none() {
1650 surface_size = Some(size);
1651 }
1652
1653 color_attachments.push(RenderpassRenderTarget {
1654 view,
1655 format,
1656 blend: blend.map(|b| b.create_wgpu_blend_state()),
1657 write_mask: blend.map(|b| b.create_wgpu_color_write_mask()),
1658 });
1659 }
1660
1661 let mut multi_sample_target = Vec::with_capacity(self.msaa_attachments.len());
1662 let mut multi_sample_count = None;
1663
1664 for msaa_texture in self.msaa_attachments {
1665 let texture_inner = msaa_texture.inner.borrow();
1666
1667 if !texture_inner
1668 .usages
1669 .contains(TextureUsage::RenderAttachment)
1670 {
1671 return Err(RenderPassBuildError::MsaaTextureNotRenderAttachment);
1672 }
1673
1674 if texture_inner.sample_count == SampleCount::SampleCount1 {
1675 return Err(RenderPassBuildError::MsaaTextureNotMultiSampled);
1676 }
1677
1678 if texture_inner.size.x == 0 || texture_inner.size.y == 0 {
1679 return Err(RenderPassBuildError::MsaaTextureInvalidSize(Point2::new(
1680 0.0, 0.0,
1681 )));
1682 }
1683
1684 if surface_size.is_some() {
1685 let surface_size = surface_size.unwrap();
1686 if surface_size != texture_inner.size {
1687 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1688 surface_size,
1689 texture_inner.size,
1690 ));
1691 }
1692 }
1693
1694 let sample_count: u32 = texture_inner.sample_count.into();
1695
1696 if multi_sample_count.is_some() && multi_sample_count.unwrap() != sample_count {
1697 return Err(RenderPassBuildError::MismatchedAttachmentSampleCount(
1698 multi_sample_count.unwrap(),
1699 sample_count,
1700 ));
1701 }
1702
1703 if multi_sample_count.is_none() {
1704 multi_sample_count = Some(sample_count);
1705 }
1706
1707 multi_sample_target.push(texture_inner.wgpu_view.clone());
1708 }
1709
1710 let mut depth_view = None;
1711 let mut depth_format = None;
1712
1713 if let Some(depth_texture) = self.depth_attachment {
1714 let texture_inner = depth_texture.inner.borrow();
1715
1716 if !texture_inner
1717 .usages
1718 .contains(TextureUsage::RenderAttachment)
1719 {
1720 return Err(RenderPassBuildError::DepthTextureNotRenderAttachment);
1721 }
1722
1723 if texture_inner.size.x == 0 || texture_inner.size.y == 0 {
1724 return Err(RenderPassBuildError::DepthTextureInvalidSize(Point2::new(
1725 0.0, 0.0,
1726 )));
1727 }
1728
1729 if texture_inner.format != TextureFormat::Depth32Float
1730 && texture_inner.format != TextureFormat::Depth24PlusStencil8
1731 {
1732 return Err(RenderPassBuildError::DepthTextureFormatNotSupported(
1733 texture_inner.format,
1734 ));
1735 }
1736
1737 if surface_size.is_some() {
1738 let surface_size = surface_size.unwrap();
1739 if surface_size != texture_inner.size {
1740 return Err(RenderPassBuildError::MismatchedAttachmentSize(
1741 surface_size,
1742 texture_inner.size,
1743 ));
1744 }
1745 }
1746
1747 if surface_size.is_none() {
1748 surface_size = Some(texture_inner.size);
1749 }
1750
1751 depth_view = Some(texture_inner.wgpu_view.clone());
1752 depth_format = Some(texture_inner.format.into());
1753 }
1754
1755 if surface_size.is_none() {
1756 return Err(RenderPassBuildError::NoColorOrDepthAttachment);
1757 }
1758
1759 let renderpass = RenderPass::new(self.gpu, self.cmd, self.atomic_pass);
1760 {
1761 let mut inner = renderpass.inner.borrow_mut();
1762
1763 inner.render_targets = color_attachments;
1764 inner.multi_sample_target = multi_sample_target;
1765 inner.multi_sample_count = multi_sample_count;
1766 inner.depth_target = depth_view;
1767 inner.depth_target_format = depth_format;
1768 inner.surface_size = surface_size.unwrap();
1769 }
1770
1771 Ok(renderpass)
1772 }
1773}
1774
1775pub enum RenderPassBuildError {
1776 NoColorOrDepthAttachment,
1777 ColorAttachmentNotRenderTarget,
1778 ColorAttachmentMultiSampled,
1779 MismatchedAttachmentCount(usize, usize),
1780 MismatchedAttachmentSize(Point2, Point2),
1781 MismatchedAttachmentSampleCount(u32, u32),
1782 MismatchedAttachmentFormat(TextureFormat, TextureFormat),
1783 MsaaTextureNotMultiSampled,
1784 MsaaTextureNotRenderAttachment,
1785 MsaaTextureInvalidSize(Point2),
1786 DepthTextureNotRenderAttachment,
1787 DepthTextureInvalidSize(Point2),
1788 DepthTextureFormatNotSupported(TextureFormat),
1789 SwapchainError(String),
1790}
1791
1792impl std::fmt::Display for RenderPassBuildError {
1793 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1794 match self {
1795 RenderPassBuildError::NoColorOrDepthAttachment => write!(f, "No color attachment provided"),
1796 RenderPassBuildError::ColorAttachmentNotRenderTarget => {
1797 write!(f, "Color attachment is not a render target")
1798 }
1799 RenderPassBuildError::ColorAttachmentMultiSampled => {
1800 write!(f, "Color attachment is multi-sampled")
1801 }
1802 RenderPassBuildError::MismatchedAttachmentCount(expected, actual) => {
1803 write!(f, "Expected {} attachments, but got {}", expected, actual)
1804 }
1805 RenderPassBuildError::MismatchedAttachmentSize(expected, actual) => write!(
1806 f,
1807 "Expected attachment size {:?}, but got {:?}",
1808 expected, actual
1809 ),
1810 RenderPassBuildError::MismatchedAttachmentSampleCount(expected, actual) => {
1811 write!(f, "Expected sample count {}, but got {}", expected, actual)
1812 }
1813 RenderPassBuildError::MismatchedAttachmentFormat(expected, actual) => {
1814 write!(f, "Expected format {:?}, but got {:?}", expected, actual)
1815 }
1816 RenderPassBuildError::MsaaTextureNotMultiSampled => {
1817 write!(f, "MSAA texture is not multi-sampled")
1818 }
1819 RenderPassBuildError::MsaaTextureNotRenderAttachment => {
1820 write!(f, "MSAA texture is not a render attachment")
1821 }
1822 RenderPassBuildError::MsaaTextureInvalidSize(size) => {
1823 write!(f, "MSAA texture has invalid size {:?}", size)
1824 }
1825 RenderPassBuildError::DepthTextureNotRenderAttachment => {
1826 write!(f, "Depth texture is not a render attachment")
1827 }
1828 RenderPassBuildError::DepthTextureInvalidSize(size) => {
1829 write!(f, "Depth texture has invalid size {:?}", size)
1830 }
1831 RenderPassBuildError::DepthTextureFormatNotSupported(format) => {
1832 write!(f, "Depth texture format {:?} is not supported", format)
1833 }
1834 RenderPassBuildError::SwapchainError(err) => write!(f, "Swapchain error: {}", err),
1835 }
1836 }
1837}
1838
1839#[derive(Clone, Debug, Hash)]
1840pub(crate) struct IntermediateRenderPipeline {
1841 pub shader: (wgpu::ShaderModule, wgpu::ShaderModule),
1842 pub vertex_attribute: (u64, Vec<wgpu::VertexAttribute>),
1843 pub shader_entry: (String, String),
1844 pub layout: Vec<BindGroupLayout>,
1845 pub topology: ShaderTopology,
1846 pub cull_mode: Option<ShaderCullMode>,
1847 pub front_face: ShaderFrontFace,
1848 pub polygon_mode: ShaderPollygonMode,
1849 pub index_format: Option<IndexBufferSize>,
1850}
1851
1852#[derive(Debug, Clone)]
1853pub(crate) struct RenderPassQueue {
1854 pub pipeline: wgpu::RenderPipeline,
1855 pub bind_group: Vec<(u32, wgpu::BindGroup)>,
1856
1857 pub vbo: Option<wgpu::Buffer>,
1858 pub ibo: Option<wgpu::Buffer>,
1859 pub itype: Option<wgpu::IndexFormat>,
1860
1861 pub viewport: Option<(RectF, f32, f32)>,
1862 pub scissor: Option<RectF>,
1863
1864 pub ty: DrawCallType,
1865 pub push_constant: Option<Vec<u8>>,
1866}
1867
1868#[derive(Clone, Debug)]
1869pub(crate) enum RenderShaderBinding {
1870 Intermediate(IntermediateRenderPipeline),
1871 Pipeline(RenderPipeline),
1872}
1873
1874#[derive(Debug, Clone)]
1875pub enum DrawCallType {
1876 Direct {
1877 ranges: Range<u32>,
1878 vertex_offset: i32,
1879 num_of_instances: u32,
1880 },
1881
1882 InDirect {
1883 buffer: wgpu::Buffer,
1884 offset: u64,
1885 },
1886}