1pub mod node;
6pub mod pass_ref;
7
8mod binding;
9mod edge;
10mod info;
11mod resolver;
12mod swapchain;
13
14pub use self::{
15 binding::{Bind, Unbind},
16 resolver::Resolver,
17};
18
19use {
20 self::{
21 binding::Binding,
22 edge::Edge,
23 info::Information,
24 node::Node,
25 node::{
26 AccelerationStructureLeaseNode, AccelerationStructureNode,
27 AnyAccelerationStructureNode, AnyBufferNode, AnyImageNode, BufferLeaseNode, BufferNode,
28 ImageLeaseNode, ImageNode, SwapchainImageNode,
29 },
30 pass_ref::{AttachmentIndex, Bindings, Descriptor, PassRef, SubresourceAccess, ViewType},
31 },
32 crate::driver::{
33 DescriptorBindingMap,
34 buffer::Buffer,
35 compute::ComputePipeline,
36 device::Device,
37 format_aspect_mask, format_texel_block_size,
38 graphic::{DepthStencilMode, GraphicPipeline},
39 image::{ImageType, ImageViewInfo, SampleCount},
40 image_subresource_range_from_layers,
41 ray_trace::RayTracePipeline,
42 render_pass::ResolveMode,
43 shader::PipelineDescriptorInfo,
44 },
45 ash::vk,
46 std::{
47 cmp::Ord,
48 collections::{BTreeMap, HashMap},
49 fmt::{Debug, Formatter},
50 ops::Range,
51 sync::Arc,
52 },
53 vk_sync::AccessType,
54};
55
56type ExecFn = Box<dyn FnOnce(&Device, vk::CommandBuffer, Bindings<'_>) + Send>;
57type NodeIndex = usize;
58
59#[derive(Clone, Copy, Debug)]
60struct Area {
61 height: u32,
62 width: u32,
63 x: i32,
64 y: i32,
65}
66
67#[derive(Clone, Copy, Debug)]
68struct Attachment {
69 array_layer_count: u32,
70 aspect_mask: vk::ImageAspectFlags,
71 base_array_layer: u32,
72 base_mip_level: u32,
73 format: vk::Format,
74 mip_level_count: u32,
75 sample_count: SampleCount,
76 target: NodeIndex,
77}
78
79impl Attachment {
80 fn new(image_view_info: ImageViewInfo, sample_count: SampleCount, target: NodeIndex) -> Self {
81 Self {
82 array_layer_count: image_view_info.array_layer_count,
83 aspect_mask: image_view_info.aspect_mask,
84 base_array_layer: image_view_info.base_array_layer,
85 base_mip_level: image_view_info.base_mip_level,
86 format: image_view_info.fmt,
87 mip_level_count: image_view_info.mip_level_count,
88 sample_count,
89 target,
90 }
91 }
92
93 fn are_compatible(lhs: Option<Self>, rhs: Option<Self>) -> bool {
94 if lhs.is_none() || rhs.is_none() {
98 return true;
99 }
100
101 Self::are_identical(lhs.unwrap(), rhs.unwrap())
102 }
103
104 pub fn are_identical(lhs: Self, rhs: Self) -> bool {
105 lhs.array_layer_count == rhs.array_layer_count
106 && lhs.base_array_layer == rhs.base_array_layer
107 && lhs.base_mip_level == rhs.base_mip_level
108 && lhs.format == rhs.format
109 && lhs.mip_level_count == rhs.mip_level_count
110 && lhs.sample_count == rhs.sample_count
111 && lhs.target == rhs.target
112 }
113}
114
115#[derive(Clone, Copy, Debug)]
117pub struct ClearColorValue(pub [f32; 4]);
118
119impl From<[f32; 3]> for ClearColorValue {
120 fn from(color: [f32; 3]) -> Self {
121 [color[0], color[1], color[2], 1.0].into()
122 }
123}
124
125impl From<[f32; 4]> for ClearColorValue {
126 fn from(color: [f32; 4]) -> Self {
127 Self(color)
128 }
129}
130
131impl From<[u8; 3]> for ClearColorValue {
132 fn from(color: [u8; 3]) -> Self {
133 [color[0], color[1], color[2], u8::MAX].into()
134 }
135}
136
137impl From<[u8; 4]> for ClearColorValue {
138 fn from(color: [u8; 4]) -> Self {
139 [
140 color[0] as f32 / u8::MAX as f32,
141 color[1] as f32 / u8::MAX as f32,
142 color[2] as f32 / u8::MAX as f32,
143 color[3] as f32 / u8::MAX as f32,
144 ]
145 .into()
146 }
147}
148
149#[derive(Default)]
150struct Execution {
151 accesses: HashMap<NodeIndex, Vec<SubresourceAccess>>,
152 bindings: BTreeMap<Descriptor, (NodeIndex, Option<ViewType>)>,
153
154 correlated_view_mask: u32,
155 depth_stencil: Option<DepthStencilMode>,
156 render_area: Option<Area>,
157 view_mask: u32,
158
159 color_attachments: HashMap<AttachmentIndex, Attachment>,
160 color_clears: HashMap<AttachmentIndex, (Attachment, ClearColorValue)>,
161 color_loads: HashMap<AttachmentIndex, Attachment>,
162 color_resolves: HashMap<AttachmentIndex, (Attachment, AttachmentIndex)>,
163 color_stores: HashMap<AttachmentIndex, Attachment>,
164 depth_stencil_attachment: Option<Attachment>,
165 depth_stencil_clear: Option<(Attachment, vk::ClearDepthStencilValue)>,
166 depth_stencil_load: Option<Attachment>,
167 depth_stencil_resolve: Option<(
168 Attachment,
169 AttachmentIndex,
170 Option<ResolveMode>,
171 Option<ResolveMode>,
172 )>,
173 depth_stencil_store: Option<Attachment>,
174
175 func: Option<ExecutionFunction>,
176 pipeline: Option<ExecutionPipeline>,
177}
178
179impl Debug for Execution {
180 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
181 f.debug_struct("Execution")
184 .field("accesses", &self.accesses)
185 .field("bindings", &self.bindings)
186 .field("depth_stencil", &self.depth_stencil)
187 .field("color_attachments", &self.color_attachments)
188 .field("color_clears", &self.color_clears)
189 .field("color_loads", &self.color_loads)
190 .field("color_resolves", &self.color_resolves)
191 .field("color_stores", &self.color_stores)
192 .field("depth_stencil_attachment", &self.depth_stencil_attachment)
193 .field("depth_stencil_clear", &self.depth_stencil_clear)
194 .field("depth_stencil_load", &self.depth_stencil_load)
195 .field("depth_stencil_resolve", &self.depth_stencil_resolve)
196 .field("depth_stencil_store", &self.depth_stencil_store)
197 .field("pipeline", &self.pipeline)
198 .finish()
199 }
200}
201
202struct ExecutionFunction(ExecFn);
203
204#[derive(Debug)]
205enum ExecutionPipeline {
206 Compute(Arc<ComputePipeline>),
207 Graphic(Arc<GraphicPipeline>),
208 RayTrace(Arc<RayTracePipeline>),
209}
210
211impl ExecutionPipeline {
212 fn as_graphic(&self) -> Option<&GraphicPipeline> {
213 if let Self::Graphic(pipeline) = self {
214 Some(pipeline)
215 } else {
216 None
217 }
218 }
219
220 fn bind_point(&self) -> vk::PipelineBindPoint {
221 match self {
222 ExecutionPipeline::Compute(_) => vk::PipelineBindPoint::COMPUTE,
223 ExecutionPipeline::Graphic(_) => vk::PipelineBindPoint::GRAPHICS,
224 ExecutionPipeline::RayTrace(_) => vk::PipelineBindPoint::RAY_TRACING_KHR,
225 }
226 }
227
228 fn descriptor_bindings(&self) -> &DescriptorBindingMap {
229 match self {
230 ExecutionPipeline::Compute(pipeline) => &pipeline.descriptor_bindings,
231 ExecutionPipeline::Graphic(pipeline) => &pipeline.descriptor_bindings,
232 ExecutionPipeline::RayTrace(pipeline) => &pipeline.descriptor_bindings,
233 }
234 }
235
236 fn descriptor_info(&self) -> &PipelineDescriptorInfo {
237 match self {
238 ExecutionPipeline::Compute(pipeline) => &pipeline.descriptor_info,
239 ExecutionPipeline::Graphic(pipeline) => &pipeline.descriptor_info,
240 ExecutionPipeline::RayTrace(pipeline) => &pipeline.descriptor_info,
241 }
242 }
243
244 fn layout(&self) -> vk::PipelineLayout {
245 match self {
246 ExecutionPipeline::Compute(pipeline) => pipeline.layout,
247 ExecutionPipeline::Graphic(pipeline) => pipeline.layout,
248 ExecutionPipeline::RayTrace(pipeline) => pipeline.layout,
249 }
250 }
251
252 fn stage(&self) -> vk::PipelineStageFlags {
253 match self {
254 ExecutionPipeline::Compute(_) => vk::PipelineStageFlags::COMPUTE_SHADER,
255 ExecutionPipeline::Graphic(_) => vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT,
256 ExecutionPipeline::RayTrace(_) => vk::PipelineStageFlags::RAY_TRACING_SHADER_KHR,
257 }
258 }
259}
260
261impl Clone for ExecutionPipeline {
262 fn clone(&self) -> Self {
263 match self {
264 Self::Compute(pipeline) => Self::Compute(Arc::clone(pipeline)),
265 Self::Graphic(pipeline) => Self::Graphic(Arc::clone(pipeline)),
266 Self::RayTrace(pipeline) => Self::RayTrace(Arc::clone(pipeline)),
267 }
268 }
269}
270
271#[derive(Debug)]
272struct Pass {
273 execs: Vec<Execution>,
274 name: String,
275}
276
277impl Pass {
278 fn descriptor_pools_sizes(
279 &self,
280 ) -> impl Iterator<Item = &HashMap<u32, HashMap<vk::DescriptorType, u32>>> {
281 self.execs
282 .iter()
283 .flat_map(|exec| exec.pipeline.as_ref())
284 .map(|pipeline| &pipeline.descriptor_info().pool_sizes)
285 }
286}
287
288#[derive(Debug)]
297pub struct RenderGraph {
298 bindings: Vec<Binding>,
299 passes: Vec<Pass>,
300
301 #[cfg(debug_assertions)]
303 pub debug: bool,
304}
305
306impl RenderGraph {
307 #[allow(clippy::new_without_default)]
309 pub fn new() -> Self {
310 let bindings = vec![];
311 let passes = vec![];
312
313 #[cfg(debug_assertions)]
314 let debug = false;
315
316 Self {
317 bindings,
318 passes,
319 #[cfg(debug_assertions)]
320 debug,
321 }
322 }
323
324 pub fn begin_pass(&mut self, name: impl AsRef<str>) -> PassRef<'_> {
326 PassRef::new(self, name.as_ref().to_string())
327 }
328
329 pub fn bind_node<'a, B>(&'a mut self, binding: B) -> <B as Edge<Self>>::Result
333 where
334 B: Edge<Self>,
335 B: Bind<&'a mut Self, <B as Edge<Self>>::Result>,
336 {
337 binding.bind(self)
338 }
339
340 pub fn blit_image(
342 &mut self,
343 src_node: impl Into<AnyImageNode>,
344 dst_node: impl Into<AnyImageNode>,
345 filter: vk::Filter,
346 ) -> &mut Self {
347 let src_node = src_node.into();
348 let dst_node = dst_node.into();
349
350 let src_info = self.node_info(src_node);
351 let dst_info = self.node_info(dst_node);
352
353 self.blit_image_region(
354 src_node,
355 dst_node,
356 filter,
357 vk::ImageBlit {
358 src_subresource: vk::ImageSubresourceLayers {
359 aspect_mask: format_aspect_mask(src_info.fmt),
360 mip_level: 0,
361 base_array_layer: 0,
362 layer_count: 1,
363 },
364 src_offsets: [
365 vk::Offset3D { x: 0, y: 0, z: 0 },
366 vk::Offset3D {
367 x: src_info.width as _,
368 y: src_info.height as _,
369 z: src_info.depth as _,
370 },
371 ],
372 dst_subresource: vk::ImageSubresourceLayers {
373 aspect_mask: format_aspect_mask(dst_info.fmt),
374 mip_level: 0,
375 base_array_layer: 0,
376 layer_count: 1,
377 },
378 dst_offsets: [
379 vk::Offset3D { x: 0, y: 0, z: 0 },
380 vk::Offset3D {
381 x: dst_info.width as _,
382 y: dst_info.height as _,
383 z: dst_info.depth as _,
384 },
385 ],
386 },
387 )
388 }
389
390 pub fn blit_image_region(
392 &mut self,
393 src_node: impl Into<AnyImageNode>,
394 dst_node: impl Into<AnyImageNode>,
395 filter: vk::Filter,
396 region: vk::ImageBlit,
397 ) -> &mut Self {
398 self.blit_image_regions(src_node, dst_node, filter, [region])
399 }
400
401 #[profiling::function]
403 pub fn blit_image_regions(
404 &mut self,
405 src_node: impl Into<AnyImageNode>,
406 dst_node: impl Into<AnyImageNode>,
407 filter: vk::Filter,
408 regions: impl AsRef<[vk::ImageBlit]> + 'static + Send,
409 ) -> &mut Self {
410 let src_node = src_node.into();
411 let dst_node = dst_node.into();
412
413 let mut pass = self.begin_pass("blit image");
414
415 for region in regions.as_ref() {
416 pass = pass
417 .access_node_subrange(
418 src_node,
419 AccessType::TransferRead,
420 image_subresource_range_from_layers(region.src_subresource),
421 )
422 .access_node_subrange(
423 dst_node,
424 AccessType::TransferWrite,
425 image_subresource_range_from_layers(region.dst_subresource),
426 );
427 }
428
429 pass.record_cmd_buf(move |device, cmd_buf, bindings| {
430 let src_image = *bindings[src_node];
431 let dst_image = *bindings[dst_node];
432
433 unsafe {
434 device.cmd_blit_image(
435 cmd_buf,
436 src_image,
437 vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
438 dst_image,
439 vk::ImageLayout::TRANSFER_DST_OPTIMAL,
440 regions.as_ref(),
441 filter,
442 );
443 }
444 })
445 .submit_pass()
446 }
447
448 pub fn clear_color_image(&mut self, image_node: impl Into<AnyImageNode>) -> &mut Self {
450 self.clear_color_image_value(image_node, [0, 0, 0, 0])
451 }
452
453 #[profiling::function]
455 pub fn clear_color_image_value(
456 &mut self,
457 image_node: impl Into<AnyImageNode>,
458 color_value: impl Into<ClearColorValue>,
459 ) -> &mut Self {
460 let color_value = color_value.into();
461 let image_node = image_node.into();
462 let image_info = self.node_info(image_node);
463 let image_view_info = image_info.default_view_info();
464
465 self.begin_pass("clear color")
466 .access_node_subrange(image_node, AccessType::TransferWrite, image_view_info)
467 .record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
468 device.cmd_clear_color_image(
469 cmd_buf,
470 *bindings[image_node],
471 vk::ImageLayout::TRANSFER_DST_OPTIMAL,
472 &vk::ClearColorValue {
473 float32: color_value.0,
474 },
475 &[image_view_info.into()],
476 );
477 })
478 .submit_pass()
479 }
480
481 pub fn clear_depth_stencil_image(&mut self, image_node: impl Into<AnyImageNode>) -> &mut Self {
483 self.clear_depth_stencil_image_value(image_node, 1.0, 0)
484 }
485
486 #[profiling::function]
488 pub fn clear_depth_stencil_image_value(
489 &mut self,
490 image_node: impl Into<AnyImageNode>,
491 depth: f32,
492 stencil: u32,
493 ) -> &mut Self {
494 let image_node = image_node.into();
495 let image_info = self.node_info(image_node);
496 let image_view_info = image_info.default_view_info();
497
498 self.begin_pass("clear depth/stencil")
499 .access_node_subrange(image_node, AccessType::TransferWrite, image_view_info)
500 .record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
501 device.cmd_clear_depth_stencil_image(
502 cmd_buf,
503 *bindings[image_node],
504 vk::ImageLayout::TRANSFER_DST_OPTIMAL,
505 &vk::ClearDepthStencilValue { depth, stencil },
506 &[image_view_info.into()],
507 );
508 })
509 .submit_pass()
510 }
511
512 pub fn copy_buffer(
514 &mut self,
515 src_node: impl Into<AnyBufferNode>,
516 dst_node: impl Into<AnyBufferNode>,
517 ) -> &mut Self {
518 let src_node = src_node.into();
519 let dst_node = dst_node.into();
520 let src_info = self.node_info(src_node);
521 let dst_info = self.node_info(dst_node);
522
523 self.copy_buffer_region(
524 src_node,
525 dst_node,
526 vk::BufferCopy {
527 src_offset: 0,
528 dst_offset: 0,
529 size: src_info.size.min(dst_info.size),
530 },
531 )
532 }
533
534 pub fn copy_buffer_region(
536 &mut self,
537 src_node: impl Into<AnyBufferNode>,
538 dst_node: impl Into<AnyBufferNode>,
539 region: vk::BufferCopy,
540 ) -> &mut Self {
541 self.copy_buffer_regions(src_node, dst_node, [region])
542 }
543
544 #[profiling::function]
546 pub fn copy_buffer_regions(
547 &mut self,
548 src_node: impl Into<AnyBufferNode>,
549 dst_node: impl Into<AnyBufferNode>,
550 regions: impl AsRef<[vk::BufferCopy]> + 'static + Send,
551 ) -> &mut Self {
552 let src_node = src_node.into();
553 let dst_node = dst_node.into();
554
555 #[cfg(debug_assertions)]
556 let (src_size, dst_size) = (self.node_info(src_node).size, self.node_info(dst_node).size);
557
558 let mut pass = self.begin_pass("copy buffer");
559
560 for region in regions.as_ref() {
561 #[cfg(debug_assertions)]
562 {
563 assert!(
564 region.src_offset + region.size <= src_size,
565 "source range end ({}) exceeds source size ({src_size})",
566 region.src_offset + region.size
567 );
568 assert!(
569 region.dst_offset + region.size <= dst_size,
570 "destination range end ({}) exceeds destination size ({dst_size})",
571 region.dst_offset + region.size
572 );
573 };
574
575 pass = pass
576 .access_node_subrange(
577 src_node,
578 AccessType::TransferRead,
579 region.src_offset..region.src_offset + region.size,
580 )
581 .access_node_subrange(
582 dst_node,
583 AccessType::TransferWrite,
584 region.dst_offset..region.dst_offset + region.size,
585 );
586 }
587
588 pass.record_cmd_buf(move |device, cmd_buf, bindings| {
589 let src_buf = *bindings[src_node];
590 let dst_buf = *bindings[dst_node];
591
592 unsafe {
593 device.cmd_copy_buffer(cmd_buf, src_buf, dst_buf, regions.as_ref());
594 }
595 })
596 .submit_pass()
597 }
598
599 pub fn copy_buffer_to_image(
601 &mut self,
602 src_node: impl Into<AnyBufferNode>,
603 dst_node: impl Into<AnyImageNode>,
604 ) -> &mut Self {
605 let dst_node = dst_node.into();
606 let dst_info = self.node_info(dst_node);
607
608 self.copy_buffer_to_image_region(
609 src_node,
610 dst_node,
611 vk::BufferImageCopy {
612 buffer_offset: 0,
613 buffer_row_length: dst_info.width,
614 buffer_image_height: dst_info.height,
615 image_subresource: vk::ImageSubresourceLayers {
616 aspect_mask: format_aspect_mask(dst_info.fmt),
617 mip_level: 0,
618 base_array_layer: 0,
619 layer_count: 1,
620 },
621 image_offset: Default::default(),
622 image_extent: vk::Extent3D {
623 depth: dst_info.depth,
624 height: dst_info.height,
625 width: dst_info.width,
626 },
627 },
628 )
629 }
630
631 pub fn copy_buffer_to_image_region(
633 &mut self,
634 src_node: impl Into<AnyBufferNode>,
635 dst_node: impl Into<AnyImageNode>,
636 region: vk::BufferImageCopy,
637 ) -> &mut Self {
638 self.copy_buffer_to_image_regions(src_node, dst_node, [region])
639 }
640
641 #[profiling::function]
643 pub fn copy_buffer_to_image_regions(
644 &mut self,
645 src_node: impl Into<AnyBufferNode>,
646 dst_node: impl Into<AnyImageNode>,
647 regions: impl AsRef<[vk::BufferImageCopy]> + 'static + Send,
648 ) -> &mut Self {
649 let src_node = src_node.into();
650 let dst_node = dst_node.into();
651 let dst_info = self.node_info(dst_node);
652
653 let mut pass = self.begin_pass("copy buffer to image");
654
655 for region in regions.as_ref() {
656 pass = pass
657 .access_node_subrange(
658 src_node,
659 AccessType::TransferRead,
660 region.buffer_offset
661 ..region.buffer_offset
662 + (region.buffer_row_length
663 * format_texel_block_size(dst_info.fmt)
664 * region.buffer_image_height)
665 as vk::DeviceSize,
666 )
667 .access_node_subrange(
668 dst_node,
669 AccessType::TransferWrite,
670 image_subresource_range_from_layers(region.image_subresource),
671 );
672 }
673
674 pass.record_cmd_buf(move |device, cmd_buf, bindings| {
675 let src_buf = *bindings[src_node];
676 let dst_image = *bindings[dst_node];
677
678 unsafe {
679 device.cmd_copy_buffer_to_image(
680 cmd_buf,
681 src_buf,
682 dst_image,
683 vk::ImageLayout::TRANSFER_DST_OPTIMAL,
684 regions.as_ref(),
685 );
686 }
687 })
688 .submit_pass()
689 }
690
691 pub fn copy_image(
693 &mut self,
694 src_node: impl Into<AnyImageNode>,
695 dst_node: impl Into<AnyImageNode>,
696 ) -> &mut Self {
697 let src_node = src_node.into();
698 let src_info = self.node_info(src_node);
699
700 let dst_node = dst_node.into();
701 let dst_info = self.node_info(dst_node);
702
703 self.copy_image_region(
704 src_node,
705 dst_node,
706 vk::ImageCopy {
707 src_subresource: vk::ImageSubresourceLayers {
708 aspect_mask: format_aspect_mask(src_info.fmt),
709 mip_level: 0,
710 base_array_layer: 0,
711 layer_count: if matches!(src_info.ty, ImageType::Cube | ImageType::CubeArray) {
712 6
713 } else {
714 1
715 },
716 },
717 src_offset: vk::Offset3D { x: 0, y: 0, z: 0 },
718 dst_subresource: vk::ImageSubresourceLayers {
719 aspect_mask: format_aspect_mask(dst_info.fmt),
720 mip_level: 0,
721 base_array_layer: 0,
722 layer_count: if matches!(dst_info.ty, ImageType::Cube | ImageType::CubeArray) {
723 6
724 } else {
725 1
726 },
727 },
728 dst_offset: vk::Offset3D { x: 0, y: 0, z: 0 },
729 extent: vk::Extent3D {
730 depth: src_info.depth.clamp(1, dst_info.depth),
731 height: src_info.height.clamp(1, dst_info.height),
732 width: src_info.width.min(dst_info.width),
733 },
734 },
735 )
736 }
737
738 pub fn copy_image_region(
740 &mut self,
741 src_node: impl Into<AnyImageNode>,
742 dst_node: impl Into<AnyImageNode>,
743 region: vk::ImageCopy,
744 ) -> &mut Self {
745 self.copy_image_regions(src_node, dst_node, [region])
746 }
747
748 #[profiling::function]
750 pub fn copy_image_regions(
751 &mut self,
752 src_node: impl Into<AnyImageNode>,
753 dst_node: impl Into<AnyImageNode>,
754 regions: impl AsRef<[vk::ImageCopy]> + 'static + Send,
755 ) -> &mut Self {
756 let src_node = src_node.into();
757 let dst_node = dst_node.into();
758
759 let mut pass = self.begin_pass("copy image");
760
761 for region in regions.as_ref() {
762 pass = pass
763 .access_node_subrange(
764 src_node,
765 AccessType::TransferRead,
766 image_subresource_range_from_layers(region.src_subresource),
767 )
768 .access_node_subrange(
769 dst_node,
770 AccessType::TransferWrite,
771 image_subresource_range_from_layers(region.dst_subresource),
772 );
773 }
774
775 pass.record_cmd_buf(move |device, cmd_buf, bindings| {
776 let src_image = *bindings[src_node];
777 let dst_image = *bindings[dst_node];
778
779 unsafe {
780 device.cmd_copy_image(
781 cmd_buf,
782 src_image,
783 vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
784 dst_image,
785 vk::ImageLayout::TRANSFER_DST_OPTIMAL,
786 regions.as_ref(),
787 );
788 }
789 })
790 .submit_pass()
791 }
792
793 pub fn copy_image_to_buffer(
795 &mut self,
796 src_node: impl Into<AnyImageNode>,
797 dst_node: impl Into<AnyBufferNode>,
798 ) -> &mut Self {
799 let src_node = src_node.into();
800 let dst_node = dst_node.into();
801
802 let src_info = self.node_info(src_node);
803
804 self.copy_image_to_buffer_region(
805 src_node,
806 dst_node,
807 vk::BufferImageCopy {
808 buffer_offset: 0,
809 buffer_row_length: src_info.width,
810 buffer_image_height: src_info.height,
811 image_subresource: vk::ImageSubresourceLayers {
812 aspect_mask: format_aspect_mask(src_info.fmt),
813 mip_level: 0,
814 base_array_layer: 0,
815 layer_count: 1,
816 },
817 image_offset: Default::default(),
818 image_extent: vk::Extent3D {
819 depth: src_info.depth,
820 height: src_info.height,
821 width: src_info.width,
822 },
823 },
824 )
825 }
826
827 pub fn copy_image_to_buffer_region(
829 &mut self,
830 src_node: impl Into<AnyImageNode>,
831 dst_node: impl Into<AnyBufferNode>,
832 region: vk::BufferImageCopy,
833 ) -> &mut Self {
834 self.copy_image_to_buffer_regions(src_node, dst_node, [region])
835 }
836
837 #[profiling::function]
839 pub fn copy_image_to_buffer_regions(
840 &mut self,
841 src_node: impl Into<AnyImageNode>,
842 dst_node: impl Into<AnyBufferNode>,
843 regions: impl AsRef<[vk::BufferImageCopy]> + 'static + Send,
844 ) -> &mut Self {
845 let src_node = src_node.into();
846 let src_info = self.node_info(src_node);
847 let dst_node = dst_node.into();
848
849 let mut pass = self.begin_pass("copy image to buffer");
850
851 for region in regions.as_ref() {
852 pass = pass
853 .access_node_subrange(
854 src_node,
855 AccessType::TransferRead,
856 image_subresource_range_from_layers(region.image_subresource),
857 )
858 .access_node_subrange(
859 dst_node,
860 AccessType::TransferWrite,
861 region.buffer_offset
862 ..region.buffer_offset
863 + (region.buffer_row_length
864 * format_texel_block_size(src_info.fmt)
865 * region.buffer_image_height)
866 as vk::DeviceSize,
867 );
868 }
869
870 pass.record_cmd_buf(move |device, cmd_buf, bindings| {
871 let src_image = *bindings[src_node];
872 let dst_buf = *bindings[dst_node];
873
874 unsafe {
875 device.cmd_copy_image_to_buffer(
876 cmd_buf,
877 src_image,
878 vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
879 dst_buf,
880 regions.as_ref(),
881 );
882 }
883 })
884 .submit_pass()
885 }
886
887 pub fn fill_buffer(&mut self, buffer_node: impl Into<AnyBufferNode>, data: u32) -> &mut Self {
889 let buffer_node = buffer_node.into();
890
891 let buffer_info = self.node_info(buffer_node);
892
893 self.fill_buffer_region(buffer_node, data, 0..buffer_info.size)
894 }
895
896 #[profiling::function]
898 pub fn fill_buffer_region(
899 &mut self,
900 buffer_node: impl Into<AnyBufferNode>,
901 data: u32,
902 region: Range<vk::DeviceSize>,
903 ) -> &mut Self {
904 let buffer_node = buffer_node.into();
905
906 self.begin_pass("fill buffer")
907 .access_node_subrange(buffer_node, AccessType::TransferWrite, region.clone())
908 .record_cmd_buf(move |device, cmd_buf, bindings| {
909 let buffer = *bindings[buffer_node];
910
911 unsafe {
912 device.cmd_fill_buffer(
913 cmd_buf,
914 buffer,
915 region.start,
916 region.end - region.start,
917 data,
918 );
919 }
920 })
921 .submit_pass()
922 }
923
924 #[profiling::function]
926 fn first_node_access_pass_index(&self, node: impl Node) -> Option<usize> {
927 let node_idx = node.index();
928
929 for (pass_idx, pass) in self.passes.iter().enumerate() {
930 for exec in pass.execs.iter() {
931 if exec.accesses.contains_key(&node_idx) {
932 return Some(pass_idx);
933 }
934 }
935 }
936
937 None
938 }
939
940 pub fn node_device_address(&self, node: impl Into<AnyBufferNode>) -> vk::DeviceAddress {
947 let node: AnyBufferNode = node.into();
948 let buffer = self.bindings[node.index()].as_driver_buffer().unwrap();
949
950 Buffer::device_address(buffer)
951 }
952
953 pub fn node_info<N>(&self, node: N) -> <N as Information>::Info
955 where
956 N: Information,
957 {
958 node.get(self)
959 }
960
961 #[profiling::function]
964 pub fn resolve(mut self) -> Resolver {
965 for pass in &mut self.passes {
967 pass.execs.pop();
968 }
969
970 Resolver::new(self)
971 }
972
973 pub fn unbind_node<N>(&mut self, node: N) -> <N as Edge<Self>>::Result
977 where
978 N: Edge<Self>,
979 N: Unbind<Self, <N as Edge<Self>>::Result>,
980 {
981 node.unbind(self)
982 }
983
984 pub fn update_buffer(
986 &mut self,
987 buffer_node: impl Into<AnyBufferNode>,
988 data: impl AsRef<[u8]> + 'static + Send,
989 ) -> &mut Self {
990 self.update_buffer_offset(buffer_node, 0, data)
991 }
992
993 #[profiling::function]
995 pub fn update_buffer_offset(
996 &mut self,
997 buffer_node: impl Into<AnyBufferNode>,
998 offset: vk::DeviceSize,
999 data: impl AsRef<[u8]> + 'static + Send,
1000 ) -> &mut Self {
1001 let buffer_node = buffer_node.into();
1002 let data_end = offset + data.as_ref().len() as vk::DeviceSize;
1003
1004 #[cfg(debug_assertions)]
1005 {
1006 let buffer_info = self.node_info(buffer_node);
1007
1008 assert!(
1009 data_end <= buffer_info.size,
1010 "data range end ({data_end}) exceeds buffer size ({})",
1011 buffer_info.size
1012 );
1013 }
1014
1015 self.begin_pass("update buffer")
1016 .access_node_subrange(buffer_node, AccessType::TransferWrite, offset..data_end)
1017 .record_cmd_buf(move |device, cmd_buf, bindings| {
1018 let buffer = *bindings[buffer_node];
1019
1020 unsafe {
1021 device.cmd_update_buffer(cmd_buf, buffer, offset, data.as_ref());
1022 }
1023 })
1024 .submit_pass()
1025 }
1026}