screen_13/graph/
mod.rs

1//! Rendering operations and command submission.
2//!
3//!
4
5pub mod node;
6pub mod pass_ref;
7
8mod binding;
9mod edge;
10mod info;
11mod resolver;
12mod swapchain;
13
14pub use self::{
15    binding::{Bind, Unbind},
16    resolver::Resolver,
17};
18
19use {
20    self::{
21        binding::Binding,
22        edge::Edge,
23        info::Information,
24        node::Node,
25        node::{
26            AccelerationStructureLeaseNode, AccelerationStructureNode,
27            AnyAccelerationStructureNode, AnyBufferNode, AnyImageNode, BufferLeaseNode, BufferNode,
28            ImageLeaseNode, ImageNode, SwapchainImageNode,
29        },
30        pass_ref::{AttachmentIndex, Bindings, Descriptor, PassRef, SubresourceAccess, ViewType},
31    },
32    crate::driver::{
33        DescriptorBindingMap,
34        buffer::Buffer,
35        compute::ComputePipeline,
36        device::Device,
37        format_aspect_mask, format_texel_block_extent, format_texel_block_size,
38        graphic::{DepthStencilMode, GraphicPipeline},
39        image::{ImageInfo, ImageViewInfo, SampleCount},
40        image_subresource_range_from_layers,
41        ray_trace::RayTracePipeline,
42        render_pass::ResolveMode,
43        shader::PipelineDescriptorInfo,
44    },
45    ash::vk,
46    std::{
47        cmp::Ord,
48        collections::{BTreeMap, HashMap},
49        fmt::{Debug, Formatter},
50        ops::Range,
51        sync::Arc,
52    },
53    vk_sync::AccessType,
54};
55
56type ExecFn = Box<dyn FnOnce(&Device, vk::CommandBuffer, Bindings<'_>) + Send>;
57type NodeIndex = usize;
58
59#[derive(Clone, Copy, Debug)]
60struct Area {
61    height: u32,
62    width: u32,
63    x: i32,
64    y: i32,
65}
66
67#[derive(Clone, Copy, Debug)]
68struct Attachment {
69    array_layer_count: u32,
70    aspect_mask: vk::ImageAspectFlags,
71    base_array_layer: u32,
72    base_mip_level: u32,
73    format: vk::Format,
74    mip_level_count: u32,
75    sample_count: SampleCount,
76    target: NodeIndex,
77}
78
79impl Attachment {
80    fn new(image_view_info: ImageViewInfo, sample_count: SampleCount, target: NodeIndex) -> Self {
81        Self {
82            array_layer_count: image_view_info.array_layer_count,
83            aspect_mask: image_view_info.aspect_mask,
84            base_array_layer: image_view_info.base_array_layer,
85            base_mip_level: image_view_info.base_mip_level,
86            format: image_view_info.fmt,
87            mip_level_count: image_view_info.mip_level_count,
88            sample_count,
89            target,
90        }
91    }
92
93    fn are_compatible(lhs: Option<Self>, rhs: Option<Self>) -> bool {
94        // Two attachment references are compatible if they have matching format and sample
95        // count, or are both VK_ATTACHMENT_UNUSED or the pointer that would contain the
96        // reference is NULL.
97        if lhs.is_none() || rhs.is_none() {
98            return true;
99        }
100
101        Self::are_identical(lhs.unwrap(), rhs.unwrap())
102    }
103
104    fn are_identical(lhs: Self, rhs: Self) -> bool {
105        lhs.array_layer_count == rhs.array_layer_count
106            && lhs.base_array_layer == rhs.base_array_layer
107            && lhs.base_mip_level == rhs.base_mip_level
108            && lhs.format == rhs.format
109            && lhs.mip_level_count == rhs.mip_level_count
110            && lhs.sample_count == rhs.sample_count
111            && lhs.target == rhs.target
112    }
113
114    fn image_view_info(self, image_info: ImageInfo) -> ImageViewInfo {
115        image_info
116            .to_builder()
117            .array_layer_count(self.array_layer_count)
118            .mip_level_count(self.mip_level_count)
119            .fmt(self.format)
120            .build()
121            .default_view_info()
122            .to_builder()
123            .aspect_mask(self.aspect_mask)
124            .base_array_layer(self.base_array_layer)
125            .base_mip_level(self.base_mip_level)
126            .build()
127    }
128}
129
130/// Specifies a color attachment clear value which can be used to initliaze an image.
131#[derive(Clone, Copy, Debug)]
132pub struct ClearColorValue(pub [f32; 4]);
133
134impl From<[f32; 3]> for ClearColorValue {
135    fn from(color: [f32; 3]) -> Self {
136        [color[0], color[1], color[2], 1.0].into()
137    }
138}
139
140impl From<[f32; 4]> for ClearColorValue {
141    fn from(color: [f32; 4]) -> Self {
142        Self(color)
143    }
144}
145
146impl From<[u8; 3]> for ClearColorValue {
147    fn from(color: [u8; 3]) -> Self {
148        [color[0], color[1], color[2], u8::MAX].into()
149    }
150}
151
152impl From<[u8; 4]> for ClearColorValue {
153    fn from(color: [u8; 4]) -> Self {
154        [
155            color[0] as f32 / u8::MAX as f32,
156            color[1] as f32 / u8::MAX as f32,
157            color[2] as f32 / u8::MAX as f32,
158            color[3] as f32 / u8::MAX as f32,
159        ]
160        .into()
161    }
162}
163
164#[derive(Default)]
165struct Execution {
166    accesses: HashMap<NodeIndex, Vec<SubresourceAccess>>,
167    bindings: BTreeMap<Descriptor, (NodeIndex, Option<ViewType>)>,
168
169    correlated_view_mask: u32,
170    depth_stencil: Option<DepthStencilMode>,
171    render_area: Option<Area>,
172    view_mask: u32,
173
174    color_attachments: HashMap<AttachmentIndex, Attachment>,
175    color_clears: HashMap<AttachmentIndex, (Attachment, ClearColorValue)>,
176    color_loads: HashMap<AttachmentIndex, Attachment>,
177    color_resolves: HashMap<AttachmentIndex, (Attachment, AttachmentIndex)>,
178    color_stores: HashMap<AttachmentIndex, Attachment>,
179    depth_stencil_attachment: Option<Attachment>,
180    depth_stencil_clear: Option<(Attachment, vk::ClearDepthStencilValue)>,
181    depth_stencil_load: Option<Attachment>,
182    depth_stencil_resolve: Option<(
183        Attachment,
184        AttachmentIndex,
185        Option<ResolveMode>,
186        Option<ResolveMode>,
187    )>,
188    depth_stencil_store: Option<Attachment>,
189
190    func: Option<ExecutionFunction>,
191    pipeline: Option<ExecutionPipeline>,
192}
193
194impl Debug for Execution {
195    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
196        // The only field missing is func which cannot easily be implemented because it is a
197        // FnOnce.
198        f.debug_struct("Execution")
199            .field("accesses", &self.accesses)
200            .field("bindings", &self.bindings)
201            .field("depth_stencil", &self.depth_stencil)
202            .field("color_attachments", &self.color_attachments)
203            .field("color_clears", &self.color_clears)
204            .field("color_loads", &self.color_loads)
205            .field("color_resolves", &self.color_resolves)
206            .field("color_stores", &self.color_stores)
207            .field("depth_stencil_attachment", &self.depth_stencil_attachment)
208            .field("depth_stencil_clear", &self.depth_stencil_clear)
209            .field("depth_stencil_load", &self.depth_stencil_load)
210            .field("depth_stencil_resolve", &self.depth_stencil_resolve)
211            .field("depth_stencil_store", &self.depth_stencil_store)
212            .field("pipeline", &self.pipeline)
213            .finish()
214    }
215}
216
217struct ExecutionFunction(ExecFn);
218
219#[derive(Debug)]
220enum ExecutionPipeline {
221    Compute(Arc<ComputePipeline>),
222    Graphic(Arc<GraphicPipeline>),
223    RayTrace(Arc<RayTracePipeline>),
224}
225
226impl ExecutionPipeline {
227    fn as_graphic(&self) -> Option<&GraphicPipeline> {
228        if let Self::Graphic(pipeline) = self {
229            Some(pipeline)
230        } else {
231            None
232        }
233    }
234
235    fn bind_point(&self) -> vk::PipelineBindPoint {
236        match self {
237            ExecutionPipeline::Compute(_) => vk::PipelineBindPoint::COMPUTE,
238            ExecutionPipeline::Graphic(_) => vk::PipelineBindPoint::GRAPHICS,
239            ExecutionPipeline::RayTrace(_) => vk::PipelineBindPoint::RAY_TRACING_KHR,
240        }
241    }
242
243    fn descriptor_bindings(&self) -> &DescriptorBindingMap {
244        match self {
245            ExecutionPipeline::Compute(pipeline) => &pipeline.descriptor_bindings,
246            ExecutionPipeline::Graphic(pipeline) => &pipeline.descriptor_bindings,
247            ExecutionPipeline::RayTrace(pipeline) => &pipeline.descriptor_bindings,
248        }
249    }
250
251    fn descriptor_info(&self) -> &PipelineDescriptorInfo {
252        match self {
253            ExecutionPipeline::Compute(pipeline) => &pipeline.descriptor_info,
254            ExecutionPipeline::Graphic(pipeline) => &pipeline.descriptor_info,
255            ExecutionPipeline::RayTrace(pipeline) => &pipeline.descriptor_info,
256        }
257    }
258
259    fn layout(&self) -> vk::PipelineLayout {
260        match self {
261            ExecutionPipeline::Compute(pipeline) => pipeline.layout,
262            ExecutionPipeline::Graphic(pipeline) => pipeline.layout,
263            ExecutionPipeline::RayTrace(pipeline) => pipeline.layout,
264        }
265    }
266
267    fn stage(&self) -> vk::PipelineStageFlags {
268        match self {
269            ExecutionPipeline::Compute(_) => vk::PipelineStageFlags::COMPUTE_SHADER,
270            ExecutionPipeline::Graphic(_) => vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT,
271            ExecutionPipeline::RayTrace(_) => vk::PipelineStageFlags::RAY_TRACING_SHADER_KHR,
272        }
273    }
274}
275
276impl Clone for ExecutionPipeline {
277    fn clone(&self) -> Self {
278        match self {
279            Self::Compute(pipeline) => Self::Compute(Arc::clone(pipeline)),
280            Self::Graphic(pipeline) => Self::Graphic(Arc::clone(pipeline)),
281            Self::RayTrace(pipeline) => Self::RayTrace(Arc::clone(pipeline)),
282        }
283    }
284}
285
286#[derive(Debug)]
287struct Pass {
288    execs: Vec<Execution>,
289    name: String,
290}
291
292impl Pass {
293    fn descriptor_pools_sizes(
294        &self,
295    ) -> impl Iterator<Item = &HashMap<u32, HashMap<vk::DescriptorType, u32>>> {
296        self.execs
297            .iter()
298            .flat_map(|exec| exec.pipeline.as_ref())
299            .map(|pipeline| &pipeline.descriptor_info().pool_sizes)
300    }
301}
302
303/// A composable graph of render pass operations.
304///
305/// `RenderGraph` instances are are intended for one-time use.
306///
307/// The design of this code originated with a combination of
308/// [`PassBuilder`](https://github.com/EmbarkStudios/kajiya/blob/main/crates/lib/kajiya-rg/src/pass_builder.rs)
309/// and
310/// [`render_graph.cpp`](https://github.com/Themaister/Granite/blob/master/renderer/render_graph.cpp).
311#[derive(Debug)]
312pub struct RenderGraph {
313    bindings: Vec<Binding>,
314    passes: Vec<Pass>,
315
316    /// Set to true (when in debug mode) in order to get a breakpoint hit where you want.
317    #[cfg(debug_assertions)]
318    pub debug: bool,
319}
320
321impl RenderGraph {
322    /// Constructs a new `RenderGraph`.
323    #[allow(clippy::new_without_default)]
324    pub fn new() -> Self {
325        let bindings = vec![];
326        let passes = vec![];
327
328        #[cfg(debug_assertions)]
329        let debug = false;
330
331        Self {
332            bindings,
333            passes,
334            #[cfg(debug_assertions)]
335            debug,
336        }
337    }
338
339    /// Begins a new pass.
340    pub fn begin_pass(&mut self, name: impl AsRef<str>) -> PassRef<'_> {
341        PassRef::new(self, name.as_ref().to_string())
342    }
343
344    /// Binds a Vulkan acceleration structure, buffer, or image to this graph.
345    ///
346    /// Bound nodes may be used in passes for pipeline and shader operations.
347    pub fn bind_node<'a, B>(&'a mut self, binding: B) -> <B as Edge<Self>>::Result
348    where
349        B: Edge<Self>,
350        B: Bind<&'a mut Self, <B as Edge<Self>>::Result>,
351    {
352        binding.bind(self)
353    }
354
355    /// Copy an image, potentially performing format conversion.
356    pub fn blit_image(
357        &mut self,
358        src_node: impl Into<AnyImageNode>,
359        dst_node: impl Into<AnyImageNode>,
360        filter: vk::Filter,
361    ) -> &mut Self {
362        let src_node = src_node.into();
363        let dst_node = dst_node.into();
364
365        let src_info = self.node_info(src_node);
366        let dst_info = self.node_info(dst_node);
367
368        self.blit_image_region(
369            src_node,
370            dst_node,
371            filter,
372            vk::ImageBlit {
373                src_subresource: vk::ImageSubresourceLayers {
374                    aspect_mask: format_aspect_mask(src_info.fmt),
375                    mip_level: 0,
376                    base_array_layer: 0,
377                    layer_count: 1,
378                },
379                src_offsets: [
380                    vk::Offset3D { x: 0, y: 0, z: 0 },
381                    vk::Offset3D {
382                        x: src_info.width as _,
383                        y: src_info.height as _,
384                        z: src_info.depth as _,
385                    },
386                ],
387                dst_subresource: vk::ImageSubresourceLayers {
388                    aspect_mask: format_aspect_mask(dst_info.fmt),
389                    mip_level: 0,
390                    base_array_layer: 0,
391                    layer_count: 1,
392                },
393                dst_offsets: [
394                    vk::Offset3D { x: 0, y: 0, z: 0 },
395                    vk::Offset3D {
396                        x: dst_info.width as _,
397                        y: dst_info.height as _,
398                        z: dst_info.depth as _,
399                    },
400                ],
401            },
402        )
403    }
404
405    /// Copy a region of an image, potentially performing format conversion.
406    pub fn blit_image_region(
407        &mut self,
408        src_node: impl Into<AnyImageNode>,
409        dst_node: impl Into<AnyImageNode>,
410        filter: vk::Filter,
411        region: vk::ImageBlit,
412    ) -> &mut Self {
413        self.blit_image_regions(src_node, dst_node, filter, [region])
414    }
415
416    /// Copy regions of an image, potentially performing format conversion.
417    #[profiling::function]
418    pub fn blit_image_regions(
419        &mut self,
420        src_node: impl Into<AnyImageNode>,
421        dst_node: impl Into<AnyImageNode>,
422        filter: vk::Filter,
423        regions: impl AsRef<[vk::ImageBlit]> + 'static + Send,
424    ) -> &mut Self {
425        let src_node = src_node.into();
426        let dst_node = dst_node.into();
427
428        let mut pass = self.begin_pass("blit image");
429
430        for region in regions.as_ref() {
431            pass = pass
432                .access_node_subrange(
433                    src_node,
434                    AccessType::TransferRead,
435                    image_subresource_range_from_layers(region.src_subresource),
436                )
437                .access_node_subrange(
438                    dst_node,
439                    AccessType::TransferWrite,
440                    image_subresource_range_from_layers(region.dst_subresource),
441                );
442        }
443
444        pass.record_cmd_buf(move |device, cmd_buf, bindings| {
445            let src_image = *bindings[src_node];
446            let dst_image = *bindings[dst_node];
447
448            unsafe {
449                device.cmd_blit_image(
450                    cmd_buf,
451                    src_image,
452                    vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
453                    dst_image,
454                    vk::ImageLayout::TRANSFER_DST_OPTIMAL,
455                    regions.as_ref(),
456                    filter,
457                );
458            }
459        })
460        .submit_pass()
461    }
462
463    /// Clear a color image.
464    pub fn clear_color_image(&mut self, image_node: impl Into<AnyImageNode>) -> &mut Self {
465        self.clear_color_image_value(image_node, [0, 0, 0, 0])
466    }
467
468    /// Clear a color image.
469    #[profiling::function]
470    pub fn clear_color_image_value(
471        &mut self,
472        image_node: impl Into<AnyImageNode>,
473        color_value: impl Into<ClearColorValue>,
474    ) -> &mut Self {
475        let color_value = color_value.into();
476        let image_node = image_node.into();
477        let image_info = self.node_info(image_node);
478        let image_view_info = image_info.default_view_info();
479
480        self.begin_pass("clear color")
481            .access_node_subrange(image_node, AccessType::TransferWrite, image_view_info)
482            .record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
483                device.cmd_clear_color_image(
484                    cmd_buf,
485                    *bindings[image_node],
486                    vk::ImageLayout::TRANSFER_DST_OPTIMAL,
487                    &vk::ClearColorValue {
488                        float32: color_value.0,
489                    },
490                    &[image_view_info.into()],
491                );
492            })
493            .submit_pass()
494    }
495
496    /// Clears a depth/stencil image.
497    pub fn clear_depth_stencil_image(&mut self, image_node: impl Into<AnyImageNode>) -> &mut Self {
498        self.clear_depth_stencil_image_value(image_node, 1.0, 0)
499    }
500
501    /// Clears a depth/stencil image.
502    #[profiling::function]
503    pub fn clear_depth_stencil_image_value(
504        &mut self,
505        image_node: impl Into<AnyImageNode>,
506        depth: f32,
507        stencil: u32,
508    ) -> &mut Self {
509        let image_node = image_node.into();
510        let image_info = self.node_info(image_node);
511        let image_view_info = image_info.default_view_info();
512
513        self.begin_pass("clear depth/stencil")
514            .access_node_subrange(image_node, AccessType::TransferWrite, image_view_info)
515            .record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
516                device.cmd_clear_depth_stencil_image(
517                    cmd_buf,
518                    *bindings[image_node],
519                    vk::ImageLayout::TRANSFER_DST_OPTIMAL,
520                    &vk::ClearDepthStencilValue { depth, stencil },
521                    &[image_view_info.into()],
522                );
523            })
524            .submit_pass()
525    }
526
527    /// Copy data between buffers
528    pub fn copy_buffer(
529        &mut self,
530        src_node: impl Into<AnyBufferNode>,
531        dst_node: impl Into<AnyBufferNode>,
532    ) -> &mut Self {
533        let src_node = src_node.into();
534        let dst_node = dst_node.into();
535        let src_info = self.node_info(src_node);
536        let dst_info = self.node_info(dst_node);
537
538        self.copy_buffer_region(
539            src_node,
540            dst_node,
541            vk::BufferCopy {
542                src_offset: 0,
543                dst_offset: 0,
544                size: src_info.size.min(dst_info.size),
545            },
546        )
547    }
548
549    /// Copy data between buffer regions.
550    pub fn copy_buffer_region(
551        &mut self,
552        src_node: impl Into<AnyBufferNode>,
553        dst_node: impl Into<AnyBufferNode>,
554        region: vk::BufferCopy,
555    ) -> &mut Self {
556        self.copy_buffer_regions(src_node, dst_node, [region])
557    }
558
559    /// Copy data between buffer regions.
560    #[profiling::function]
561    pub fn copy_buffer_regions(
562        &mut self,
563        src_node: impl Into<AnyBufferNode>,
564        dst_node: impl Into<AnyBufferNode>,
565        regions: impl AsRef<[vk::BufferCopy]> + 'static + Send,
566    ) -> &mut Self {
567        let src_node = src_node.into();
568        let dst_node = dst_node.into();
569
570        #[cfg(debug_assertions)]
571        let (src_size, dst_size) = (self.node_info(src_node).size, self.node_info(dst_node).size);
572
573        let mut pass = self.begin_pass("copy buffer");
574
575        for region in regions.as_ref() {
576            #[cfg(debug_assertions)]
577            {
578                assert!(
579                    region.src_offset + region.size <= src_size,
580                    "source range end ({}) exceeds source size ({src_size})",
581                    region.src_offset + region.size
582                );
583                assert!(
584                    region.dst_offset + region.size <= dst_size,
585                    "destination range end ({}) exceeds destination size ({dst_size})",
586                    region.dst_offset + region.size
587                );
588            };
589
590            pass = pass
591                .access_node_subrange(
592                    src_node,
593                    AccessType::TransferRead,
594                    region.src_offset..region.src_offset + region.size,
595                )
596                .access_node_subrange(
597                    dst_node,
598                    AccessType::TransferWrite,
599                    region.dst_offset..region.dst_offset + region.size,
600                );
601        }
602
603        pass.record_cmd_buf(move |device, cmd_buf, bindings| {
604            let src_buf = *bindings[src_node];
605            let dst_buf = *bindings[dst_node];
606
607            unsafe {
608                device.cmd_copy_buffer(cmd_buf, src_buf, dst_buf, regions.as_ref());
609            }
610        })
611        .submit_pass()
612    }
613
614    /// Copy data from a buffer into an image.
615    pub fn copy_buffer_to_image(
616        &mut self,
617        src_node: impl Into<AnyBufferNode>,
618        dst_node: impl Into<AnyImageNode>,
619    ) -> &mut Self {
620        let dst_node = dst_node.into();
621        let dst_info = self.node_info(dst_node);
622
623        self.copy_buffer_to_image_region(
624            src_node,
625            dst_node,
626            vk::BufferImageCopy {
627                buffer_offset: 0,
628                buffer_row_length: dst_info.width,
629                buffer_image_height: dst_info.height,
630                image_subresource: vk::ImageSubresourceLayers {
631                    aspect_mask: format_aspect_mask(dst_info.fmt),
632                    mip_level: 0,
633                    base_array_layer: 0,
634                    layer_count: 1,
635                },
636                image_offset: Default::default(),
637                image_extent: vk::Extent3D {
638                    depth: dst_info.depth,
639                    height: dst_info.height,
640                    width: dst_info.width,
641                },
642            },
643        )
644    }
645
646    /// Copy data from a buffer into an image.
647    pub fn copy_buffer_to_image_region(
648        &mut self,
649        src_node: impl Into<AnyBufferNode>,
650        dst_node: impl Into<AnyImageNode>,
651        region: vk::BufferImageCopy,
652    ) -> &mut Self {
653        self.copy_buffer_to_image_regions(src_node, dst_node, [region])
654    }
655
656    /// Copy data from a buffer into an image.
657    #[profiling::function]
658    pub fn copy_buffer_to_image_regions(
659        &mut self,
660        src_node: impl Into<AnyBufferNode>,
661        dst_node: impl Into<AnyImageNode>,
662        regions: impl AsRef<[vk::BufferImageCopy]> + 'static + Send,
663    ) -> &mut Self {
664        let src_node = src_node.into();
665        let dst_node = dst_node.into();
666        let dst_info = self.node_info(dst_node);
667
668        let mut pass = self.begin_pass("copy buffer to image");
669
670        for region in regions.as_ref() {
671            let block_bytes_size = format_texel_block_size(dst_info.fmt);
672            let (block_height, block_width) = format_texel_block_extent(dst_info.fmt);
673            let data_size = block_bytes_size
674                * (region.buffer_row_length / block_width)
675                * (region.buffer_image_height / block_height);
676
677            pass = pass
678                .access_node_subrange(
679                    src_node,
680                    AccessType::TransferRead,
681                    region.buffer_offset..region.buffer_offset + data_size as vk::DeviceSize,
682                )
683                .access_node_subrange(
684                    dst_node,
685                    AccessType::TransferWrite,
686                    image_subresource_range_from_layers(region.image_subresource),
687                );
688        }
689
690        pass.record_cmd_buf(move |device, cmd_buf, bindings| {
691            let src_buf = *bindings[src_node];
692            let dst_image = *bindings[dst_node];
693
694            unsafe {
695                device.cmd_copy_buffer_to_image(
696                    cmd_buf,
697                    src_buf,
698                    dst_image,
699                    vk::ImageLayout::TRANSFER_DST_OPTIMAL,
700                    regions.as_ref(),
701                );
702            }
703        })
704        .submit_pass()
705    }
706
707    /// Copy all layers of a source image to a destination image.
708    pub fn copy_image(
709        &mut self,
710        src_node: impl Into<AnyImageNode>,
711        dst_node: impl Into<AnyImageNode>,
712    ) -> &mut Self {
713        let src_node = src_node.into();
714        let src_info = self.node_info(src_node);
715
716        let dst_node = dst_node.into();
717        let dst_info = self.node_info(dst_node);
718
719        self.copy_image_region(
720            src_node,
721            dst_node,
722            vk::ImageCopy {
723                src_subresource: vk::ImageSubresourceLayers {
724                    aspect_mask: format_aspect_mask(src_info.fmt),
725                    mip_level: 0,
726                    base_array_layer: 0,
727                    layer_count: src_info.array_layer_count,
728                },
729                src_offset: vk::Offset3D { x: 0, y: 0, z: 0 },
730                dst_subresource: vk::ImageSubresourceLayers {
731                    aspect_mask: format_aspect_mask(dst_info.fmt),
732                    mip_level: 0,
733                    base_array_layer: 0,
734                    layer_count: src_info.array_layer_count,
735                },
736                dst_offset: vk::Offset3D { x: 0, y: 0, z: 0 },
737                extent: vk::Extent3D {
738                    depth: src_info.depth.clamp(1, dst_info.depth),
739                    height: src_info.height.clamp(1, dst_info.height),
740                    width: src_info.width.min(dst_info.width),
741                },
742            },
743        )
744    }
745
746    /// Copy data between images.
747    pub fn copy_image_region(
748        &mut self,
749        src_node: impl Into<AnyImageNode>,
750        dst_node: impl Into<AnyImageNode>,
751        region: vk::ImageCopy,
752    ) -> &mut Self {
753        self.copy_image_regions(src_node, dst_node, [region])
754    }
755
756    /// Copy data between images.
757    #[profiling::function]
758    pub fn copy_image_regions(
759        &mut self,
760        src_node: impl Into<AnyImageNode>,
761        dst_node: impl Into<AnyImageNode>,
762        regions: impl AsRef<[vk::ImageCopy]> + 'static + Send,
763    ) -> &mut Self {
764        let src_node = src_node.into();
765        let dst_node = dst_node.into();
766
767        let mut pass = self.begin_pass("copy image");
768
769        for region in regions.as_ref() {
770            pass = pass
771                .access_node_subrange(
772                    src_node,
773                    AccessType::TransferRead,
774                    image_subresource_range_from_layers(region.src_subresource),
775                )
776                .access_node_subrange(
777                    dst_node,
778                    AccessType::TransferWrite,
779                    image_subresource_range_from_layers(region.dst_subresource),
780                );
781        }
782
783        pass.record_cmd_buf(move |device, cmd_buf, bindings| {
784            let src_image = *bindings[src_node];
785            let dst_image = *bindings[dst_node];
786
787            unsafe {
788                device.cmd_copy_image(
789                    cmd_buf,
790                    src_image,
791                    vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
792                    dst_image,
793                    vk::ImageLayout::TRANSFER_DST_OPTIMAL,
794                    regions.as_ref(),
795                );
796            }
797        })
798        .submit_pass()
799    }
800
801    /// Copy image data into a buffer.
802    pub fn copy_image_to_buffer(
803        &mut self,
804        src_node: impl Into<AnyImageNode>,
805        dst_node: impl Into<AnyBufferNode>,
806    ) -> &mut Self {
807        let src_node = src_node.into();
808        let dst_node = dst_node.into();
809
810        let src_info = self.node_info(src_node);
811
812        self.copy_image_to_buffer_region(
813            src_node,
814            dst_node,
815            vk::BufferImageCopy {
816                buffer_offset: 0,
817                buffer_row_length: src_info.width,
818                buffer_image_height: src_info.height,
819                image_subresource: vk::ImageSubresourceLayers {
820                    aspect_mask: format_aspect_mask(src_info.fmt),
821                    mip_level: 0,
822                    base_array_layer: 0,
823                    layer_count: 1,
824                },
825                image_offset: Default::default(),
826                image_extent: vk::Extent3D {
827                    depth: src_info.depth,
828                    height: src_info.height,
829                    width: src_info.width,
830                },
831            },
832        )
833    }
834
835    /// Copy image data into a buffer.
836    pub fn copy_image_to_buffer_region(
837        &mut self,
838        src_node: impl Into<AnyImageNode>,
839        dst_node: impl Into<AnyBufferNode>,
840        region: vk::BufferImageCopy,
841    ) -> &mut Self {
842        self.copy_image_to_buffer_regions(src_node, dst_node, [region])
843    }
844
845    /// Copy image data into a buffer.
846    #[profiling::function]
847    pub fn copy_image_to_buffer_regions(
848        &mut self,
849        src_node: impl Into<AnyImageNode>,
850        dst_node: impl Into<AnyBufferNode>,
851        regions: impl AsRef<[vk::BufferImageCopy]> + 'static + Send,
852    ) -> &mut Self {
853        let src_node = src_node.into();
854        let src_info = self.node_info(src_node);
855        let dst_node = dst_node.into();
856
857        let mut pass = self.begin_pass("copy image to buffer");
858
859        for region in regions.as_ref() {
860            let block_bytes_size = format_texel_block_size(src_info.fmt);
861            let (block_height, block_width) = format_texel_block_extent(src_info.fmt);
862            let data_size = block_bytes_size
863                * (region.buffer_row_length / block_width)
864                * (region.buffer_image_height / block_height);
865
866            pass = pass
867                .access_node_subrange(
868                    src_node,
869                    AccessType::TransferRead,
870                    image_subresource_range_from_layers(region.image_subresource),
871                )
872                .access_node_subrange(
873                    dst_node,
874                    AccessType::TransferWrite,
875                    region.buffer_offset..region.buffer_offset + data_size as vk::DeviceSize,
876                );
877        }
878
879        pass.record_cmd_buf(move |device, cmd_buf, bindings| {
880            let src_image = *bindings[src_node];
881            let dst_buf = *bindings[dst_node];
882
883            unsafe {
884                device.cmd_copy_image_to_buffer(
885                    cmd_buf,
886                    src_image,
887                    vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
888                    dst_buf,
889                    regions.as_ref(),
890                );
891            }
892        })
893        .submit_pass()
894    }
895
896    /// Fill a region of a buffer with a fixed value.
897    pub fn fill_buffer(&mut self, buffer_node: impl Into<AnyBufferNode>, data: u32) -> &mut Self {
898        let buffer_node = buffer_node.into();
899
900        let buffer_info = self.node_info(buffer_node);
901
902        self.fill_buffer_region(buffer_node, data, 0..buffer_info.size)
903    }
904
905    /// Fill a region of a buffer with a fixed value.
906    #[profiling::function]
907    pub fn fill_buffer_region(
908        &mut self,
909        buffer_node: impl Into<AnyBufferNode>,
910        data: u32,
911        region: Range<vk::DeviceSize>,
912    ) -> &mut Self {
913        let buffer_node = buffer_node.into();
914
915        self.begin_pass("fill buffer")
916            .access_node_subrange(buffer_node, AccessType::TransferWrite, region.clone())
917            .record_cmd_buf(move |device, cmd_buf, bindings| {
918                let buffer = *bindings[buffer_node];
919
920                unsafe {
921                    device.cmd_fill_buffer(
922                        cmd_buf,
923                        buffer,
924                        region.start,
925                        region.end - region.start,
926                        data,
927                    );
928                }
929            })
930            .submit_pass()
931    }
932
933    /// Returns the index of the first pass which accesses a given node
934    #[profiling::function]
935    fn first_node_access_pass_index(&self, node: impl Node) -> Option<usize> {
936        let node_idx = node.index();
937
938        for (pass_idx, pass) in self.passes.iter().enumerate() {
939            for exec in pass.execs.iter() {
940                if exec.accesses.contains_key(&node_idx) {
941                    return Some(pass_idx);
942                }
943            }
944        }
945
946        None
947    }
948
949    /// Returns the device address of a buffer node.
950    ///
951    /// # Panics
952    ///
953    /// Panics if the buffer is not currently bound or was not created with the
954    /// `SHADER_DEVICE_ADDRESS` usage flag.
955    pub fn node_device_address(&self, node: impl Into<AnyBufferNode>) -> vk::DeviceAddress {
956        let node: AnyBufferNode = node.into();
957        let buffer = self.bindings[node.index()].as_driver_buffer().unwrap();
958
959        Buffer::device_address(buffer)
960    }
961
962    /// Returns information used to crate a node.
963    pub fn node_info<N>(&self, node: N) -> <N as Information>::Info
964    where
965        N: Information,
966    {
967        node.get(self)
968    }
969
970    /// Finalizes the graph and provides an object with functions for submitting the resulting
971    /// commands.
972    #[profiling::function]
973    pub fn resolve(mut self) -> Resolver {
974        // The final execution of each pass has no function
975        for pass in &mut self.passes {
976            pass.execs.pop();
977        }
978
979        Resolver::new(self)
980    }
981
982    /// Removes a node from this graph.
983    ///
984    /// Future access to `node` on this graph will return invalid results.
985    pub fn unbind_node<N>(&mut self, node: N) -> <N as Edge<Self>>::Result
986    where
987        N: Edge<Self>,
988        N: Unbind<Self, <N as Edge<Self>>::Result>,
989    {
990        node.unbind(self)
991    }
992
993    /// Note: `data` must not exceed 65536 bytes.
994    pub fn update_buffer(
995        &mut self,
996        buffer_node: impl Into<AnyBufferNode>,
997        data: impl AsRef<[u8]> + 'static + Send,
998    ) -> &mut Self {
999        self.update_buffer_offset(buffer_node, 0, data)
1000    }
1001
1002    /// Note: `data` must not exceed 65536 bytes.
1003    #[profiling::function]
1004    pub fn update_buffer_offset(
1005        &mut self,
1006        buffer_node: impl Into<AnyBufferNode>,
1007        offset: vk::DeviceSize,
1008        data: impl AsRef<[u8]> + 'static + Send,
1009    ) -> &mut Self {
1010        let buffer_node = buffer_node.into();
1011        let data_end = offset + data.as_ref().len() as vk::DeviceSize;
1012
1013        #[cfg(debug_assertions)]
1014        {
1015            let buffer_info = self.node_info(buffer_node);
1016
1017            assert!(
1018                data_end <= buffer_info.size,
1019                "data range end ({data_end}) exceeds buffer size ({})",
1020                buffer_info.size
1021            );
1022        }
1023
1024        self.begin_pass("update buffer")
1025            .access_node_subrange(buffer_node, AccessType::TransferWrite, offset..data_end)
1026            .record_cmd_buf(move |device, cmd_buf, bindings| {
1027                let buffer = *bindings[buffer_node];
1028
1029                unsafe {
1030                    device.cmd_update_buffer(cmd_buf, buffer, offset, data.as_ref());
1031                }
1032            })
1033            .submit_pass()
1034    }
1035}