mod binding;
mod edge;
mod info;
mod node;
mod pass_ref;
mod resolver;
mod swapchain;
pub use {
self::{
binding::{AnyBufferBinding, AnyImageBinding, Bind},
node::{
AccelerationStructureLeaseNode, AccelerationStructureNode,
AnyAccelerationStructureNode, AnyBufferNode, AnyImageNode, BufferLeaseNode, BufferNode,
ImageLeaseNode, ImageNode, SwapchainImageNode, Unbind, View, ViewType,
},
pass_ref::{Bindings, Compute, Draw, PassRef, PipelinePassRef, RayTrace},
resolver::Resolver,
},
vk_sync::AccessType,
};
use {
self::{binding::Binding, edge::Edge, info::Information, node::Node},
crate::driver::{
buffer_copy_subresources, buffer_image_copy_subresource, format_aspect_mask,
is_write_access, BufferSubresource, ComputePipeline, DepthStencilMode,
DescriptorBindingMap, Device, GraphicPipeline, ImageSubresource, ImageType,
PipelineDescriptorInfo, RayTracePipeline, SampleCount,
},
ash::vk,
std::{
cmp::Ord,
collections::{BTreeMap, BTreeSet},
fmt::{Debug, Formatter},
ops::Range,
sync::Arc,
},
};
pub type AttachmentIndex = u32;
pub type BindingIndex = u32;
pub type BindingOffset = u32;
pub type DescriptorSetIndex = u32;
type ExecFn = Box<dyn FnOnce(&Device, vk::CommandBuffer, Bindings<'_>) + Send>;
type NodeIndex = usize;
#[derive(Clone, Copy, Debug)]
struct Area {
height: u32,
width: u32,
x: i32,
y: i32,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
struct Attachment {
aspect_mask: vk::ImageAspectFlags,
fmt: vk::Format,
sample_count: SampleCount,
target: NodeIndex,
}
impl Attachment {
fn are_compatible(lhs: Option<Self>, rhs: Option<Self>) -> bool {
if lhs.is_none() || rhs.is_none() {
return true;
}
Self::are_identical(lhs.unwrap(), rhs.unwrap())
}
fn are_identical(lhs: Self, rhs: Self) -> bool {
lhs.fmt == rhs.fmt && lhs.sample_count == rhs.sample_count
}
}
#[derive(Clone, Debug, Default)]
struct AttachmentMap {
attached: Vec<Option<Attachment>>,
attached_count: usize,
depth_stencil: Option<AttachmentIndex>,
}
impl AttachmentMap {
fn attached(&self) -> impl Iterator<Item = AttachmentIndex> + '_ {
self.attached
.iter()
.enumerate()
.filter_map(|(idx, opt)| opt.map(|_| idx as AttachmentIndex))
}
fn are_compatible(&self, other: &Self) -> bool {
self.attached
.iter()
.zip(other.attached.iter())
.all(|(lhs, rhs)| Attachment::are_compatible(*lhs, *rhs))
}
fn contains_attachment(&self, attachment: AttachmentIndex) -> bool {
self.attached.get(attachment as usize).is_some()
}
fn contains_image(&self, node_idx: NodeIndex) -> bool {
self.attached
.iter()
.any(|attachment| matches!(attachment, Some(Attachment { target, .. }) if *target == node_idx))
}
fn depth_stencil(&self) -> Option<(AttachmentIndex, Attachment)> {
self.depth_stencil.map(|attachment_idx| {
(
attachment_idx as AttachmentIndex,
self.attached[attachment_idx as usize].unwrap(),
)
})
}
fn get(&self, attachment: AttachmentIndex) -> Option<Attachment> {
self.attached.get(attachment as usize).copied().flatten()
}
fn insert_color(
&mut self,
attachment: AttachmentIndex,
aspect_mask: vk::ImageAspectFlags,
fmt: vk::Format,
sample_count: SampleCount,
target: NodeIndex,
) -> bool {
self.extend_attached(attachment);
if self.attached[attachment as usize].is_none() {
self.attached_count += 1;
}
Self::set_attachment(
&mut self.attached[attachment as usize],
Attachment {
aspect_mask,
fmt,
sample_count,
target,
},
)
}
fn extend_attached(&mut self, attachment_idx: u32) {
let attachment_count = attachment_idx as usize + 1;
if attachment_count > self.attached.len() {
self.attached
.reserve(attachment_count - self.attached.len());
while self.attached.len() < attachment_count {
self.attached.push(None);
}
}
}
fn images(&self) -> impl Iterator<Item = NodeIndex> + '_ {
let mut already_seen = BTreeSet::new();
self.attached
.iter()
.filter_map(|attachment| attachment.as_ref().map(|attachment| attachment.target))
.filter(move |target| already_seen.insert(*target))
}
fn set_attachment(curr: &mut Option<Attachment>, next: Attachment) -> bool {
curr.replace(next)
.map(|curr| Attachment::are_identical(curr, next))
.unwrap_or(true)
}
fn set_depth_stencil(
&mut self,
attachment: AttachmentIndex,
aspect_mask: vk::ImageAspectFlags,
fmt: vk::Format,
sample_count: SampleCount,
target: NodeIndex,
) -> bool {
self.extend_attached(attachment);
assert!(self.depth_stencil.is_none());
self.attached_count += 1;
self.depth_stencil = Some(attachment);
Self::set_attachment(
&mut self.attached[attachment as usize],
Attachment {
aspect_mask,
fmt,
sample_count,
target,
},
)
}
}
#[derive(Clone, Copy, Debug)]
pub struct Color(pub [f32; 4]);
impl From<[f32; 4]> for Color {
fn from(color: [f32; 4]) -> Self {
Self(color)
}
}
impl From<[u8; 4]> for Color {
fn from(color: [u8; 4]) -> Self {
Self([
color[0] as f32 / u8::MAX as f32,
color[1] as f32 / u8::MAX as f32,
color[2] as f32 / u8::MAX as f32,
color[3] as f32 / u8::MAX as f32,
])
}
}
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum Descriptor {
ArrayBinding(DescriptorSetIndex, BindingIndex, BindingOffset),
Binding(DescriptorSetIndex, BindingIndex),
}
impl Descriptor {
fn into_tuple(self) -> (DescriptorSetIndex, BindingIndex, BindingOffset) {
match self {
Self::ArrayBinding(descriptor_set_idx, binding_idx, binding_offset) => {
(descriptor_set_idx, binding_idx, binding_offset)
}
Self::Binding(descriptor_set_idx, binding_idx) => (descriptor_set_idx, binding_idx, 0),
}
}
fn set(self) -> DescriptorSetIndex {
let (res, _, _) = self.into_tuple();
res
}
}
impl From<BindingIndex> for Descriptor {
fn from(val: BindingIndex) -> Self {
Self::Binding(0, val)
}
}
impl From<(DescriptorSetIndex, BindingIndex)> for Descriptor {
fn from(tuple: (DescriptorSetIndex, BindingIndex)) -> Self {
Self::Binding(tuple.0, tuple.1)
}
}
impl From<(BindingIndex, [BindingOffset; 1])> for Descriptor {
fn from(tuple: (BindingIndex, [BindingOffset; 1])) -> Self {
Self::ArrayBinding(0, tuple.0, tuple.1[0])
}
}
impl From<(DescriptorSetIndex, BindingIndex, [BindingOffset; 1])> for Descriptor {
fn from(tuple: (DescriptorSetIndex, BindingIndex, [BindingOffset; 1])) -> Self {
Self::ArrayBinding(tuple.0, tuple.1, tuple.2[0])
}
}
#[derive(Copy, Clone, Debug)]
enum ClearValue {
Color(Color),
DepthStencil(vk::ClearDepthStencilValue),
}
impl From<ClearValue> for vk::ClearValue {
fn from(src: ClearValue) -> Self {
match src {
ClearValue::Color(color) => vk::ClearValue {
color: vk::ClearColorValue { float32: color.0 },
},
ClearValue::DepthStencil(depth_stencil) => vk::ClearValue { depth_stencil },
}
}
}
#[derive(Default)]
struct Execution {
accesses: BTreeMap<NodeIndex, [SubresourceAccess; 2]>,
bindings: BTreeMap<Descriptor, (NodeIndex, Option<ViewType>)>,
clears: BTreeMap<AttachmentIndex, ClearValue>,
loads: AttachmentMap,
resolves: AttachmentMap,
stores: AttachmentMap,
func: Option<ExecutionFunction>,
pipeline: Option<ExecutionPipeline>,
}
impl Execution {
fn attachment(&self, attachment_idx: AttachmentIndex) -> Option<Attachment> {
self.loads.get(attachment_idx).or_else(|| {
self.resolves
.get(attachment_idx)
.or_else(|| self.stores.get(attachment_idx))
})
}
fn attachment_count(&self) -> usize {
self.loads
.attached
.len()
.max(self.resolves.attached.len())
.max(self.stores.attached.len())
}
#[cfg(debug_assertions)]
fn attached_written(&self) -> impl Iterator<Item = AttachmentIndex> + '_ {
self.clears
.keys()
.copied()
.chain(self.resolves.attached())
.chain(self.stores.attached())
}
}
impl Debug for Execution {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Execution")
.field("accesses", &self.accesses)
.field("bindings", &self.bindings)
.field("clears", &self.clears)
.field("loads", &self.loads)
.field("resolves", &self.resolves)
.field("stores", &self.stores)
.field("pipeline", &self.pipeline)
.finish()
}
}
struct ExecutionFunction(ExecFn);
#[derive(Debug)]
enum ExecutionPipeline {
Compute(Arc<ComputePipeline>),
Graphic(Arc<GraphicPipeline>),
RayTrace(Arc<RayTracePipeline>),
}
impl ExecutionPipeline {
fn bind_point(&self) -> vk::PipelineBindPoint {
match self {
ExecutionPipeline::Compute(_) => vk::PipelineBindPoint::COMPUTE,
ExecutionPipeline::Graphic(_) => vk::PipelineBindPoint::GRAPHICS,
ExecutionPipeline::RayTrace(_) => vk::PipelineBindPoint::RAY_TRACING_KHR,
}
}
fn descriptor_bindings(&self) -> &DescriptorBindingMap {
match self {
ExecutionPipeline::Compute(pipeline) => &pipeline.descriptor_bindings,
ExecutionPipeline::Graphic(pipeline) => &pipeline.descriptor_bindings,
ExecutionPipeline::RayTrace(pipeline) => &pipeline.descriptor_bindings,
}
}
fn descriptor_info(&self) -> &PipelineDescriptorInfo {
match self {
ExecutionPipeline::Compute(pipeline) => &pipeline.descriptor_info,
ExecutionPipeline::Graphic(pipeline) => &pipeline.descriptor_info,
ExecutionPipeline::RayTrace(pipeline) => &pipeline.descriptor_info,
}
}
fn layout(&self) -> vk::PipelineLayout {
match self {
ExecutionPipeline::Compute(pipeline) => pipeline.layout,
ExecutionPipeline::Graphic(pipeline) => pipeline.layout,
ExecutionPipeline::RayTrace(pipeline) => pipeline.layout,
}
}
fn stage(&self) -> vk::PipelineStageFlags {
match self {
ExecutionPipeline::Compute(_) => vk::PipelineStageFlags::COMPUTE_SHADER,
ExecutionPipeline::Graphic(_) => vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT,
ExecutionPipeline::RayTrace(_) => vk::PipelineStageFlags::RAY_TRACING_SHADER_KHR,
}
}
}
impl Clone for ExecutionPipeline {
fn clone(&self) -> Self {
match self {
Self::Compute(pipeline) => Self::Compute(Arc::clone(pipeline)),
Self::Graphic(pipeline) => Self::Graphic(Arc::clone(pipeline)),
Self::RayTrace(pipeline) => Self::RayTrace(Arc::clone(pipeline)),
}
}
}
#[derive(Debug)]
struct Pass {
depth_stencil: Option<DepthStencilMode>,
execs: Vec<Execution>,
name: String,
render_area: Option<Area>,
}
impl Pass {
fn descriptor_pools_sizes(
&self,
) -> impl Iterator<Item = &BTreeMap<u32, BTreeMap<vk::DescriptorType, u32>>> {
self.execs
.iter()
.flat_map(|exec| exec.pipeline.as_ref())
.map(|pipeline| &pipeline.descriptor_info().pool_sizes)
}
}
#[derive(Debug)]
pub struct RenderGraph {
bindings: Vec<Binding>,
passes: Vec<Pass>,
#[cfg(debug_assertions)]
pub debug: bool,
}
impl RenderGraph {
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
let bindings = vec![];
let passes = vec![];
#[cfg(debug_assertions)]
let debug = false;
Self {
bindings,
passes,
#[cfg(debug_assertions)]
debug,
}
}
pub fn begin_pass(&mut self, name: impl AsRef<str>) -> PassRef<'_> {
PassRef::new(self, name.as_ref().to_string())
}
pub fn bind_node<'a, B>(&'a mut self, binding: B) -> <B as Edge<Self>>::Result
where
B: Edge<Self>,
B: Bind<&'a mut Self, <B as Edge<Self>>::Result>,
{
binding.bind(self)
}
pub fn blit_image(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyImageNode>,
filter: vk::Filter,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let src_info = self.node_info(src_node);
let dst_info = self.node_info(dst_node);
self.blit_image_region(
src_node,
dst_node,
&vk::ImageBlit {
src_subresource: vk::ImageSubresourceLayers {
aspect_mask: format_aspect_mask(src_info.fmt),
mip_level: 0,
base_array_layer: 0,
layer_count: 1,
},
src_offsets: [
vk::Offset3D {
x: src_info.width as _,
y: src_info.height as _,
z: src_info.depth as _,
},
vk::Offset3D { x: 0, y: 0, z: 0 },
],
dst_subresource: vk::ImageSubresourceLayers {
aspect_mask: format_aspect_mask(dst_info.fmt),
mip_level: 0,
base_array_layer: 0,
layer_count: 1,
},
dst_offsets: [
vk::Offset3D {
x: dst_info.width as _,
y: dst_info.height as _,
z: dst_info.depth as _,
},
vk::Offset3D { x: 0, y: 0, z: 0 },
],
},
filter,
)
}
pub fn blit_image_region(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyImageNode>,
region: &vk::ImageBlit,
filter: vk::Filter,
) -> &mut Self {
use std::slice::from_ref;
self.blit_image_regions(src_node, dst_node, from_ref(region), filter)
}
pub fn blit_image_regions(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyImageNode>,
regions: impl Into<Box<[vk::ImageBlit]>>,
filter: vk::Filter,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let regions = regions.into();
let src_access_range = self.node_info(src_node).default_view_info();
let dst_access_range = self.node_info(dst_node).default_view_info();
self.begin_pass("blit image")
.access_node_subrange(src_node, AccessType::TransferRead, src_access_range)
.access_node_subrange(dst_node, AccessType::TransferWrite, dst_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_blit_image(
cmd_buf,
*bindings[src_node],
vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
*bindings[dst_node],
vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
®ions,
filter,
);
})
.submit_pass()
}
pub fn clear_color_image(&mut self, image_node: impl Into<AnyImageNode>) -> &mut Self {
self.clear_color_image_value(image_node, [0, 0, 0, 0])
}
pub fn clear_color_image_value(
&mut self,
image_node: impl Into<AnyImageNode>,
color_value: impl Into<Color>,
) -> &mut Self {
let color_value = color_value.into();
let image_node = image_node.into();
let image_info = self.node_info(image_node);
let image_access_range = image_info.default_view_info();
self.begin_pass("clear color")
.access_node_subrange(image_node, AccessType::TransferWrite, image_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_clear_color_image(
cmd_buf,
*bindings[image_node],
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
&vk::ClearColorValue {
float32: color_value.0,
},
&[vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
level_count: image_info.mip_level_count,
layer_count: image_info.array_elements,
..Default::default()
}],
);
})
.submit_pass()
}
pub fn clear_depth_stencil_image(&mut self, image_node: impl Into<AnyImageNode>) -> &mut Self {
self.clear_depth_stencil_image_value(image_node, 0.0, 0)
}
pub fn clear_depth_stencil_image_value(
&mut self,
image_node: impl Into<AnyImageNode>,
depth: f32,
stencil: u32,
) -> &mut Self {
let image_node = image_node.into();
let image_info = self.node_info(image_node);
let image_access_range = image_info.default_view_info();
self.begin_pass("clear depth/stencil")
.access_node_subrange(image_node, AccessType::TransferWrite, image_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_clear_depth_stencil_image(
cmd_buf,
*bindings[image_node],
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
&vk::ClearDepthStencilValue { depth, stencil },
&[vk::ImageSubresourceRange {
aspect_mask: format_aspect_mask(image_info.fmt),
level_count: image_info.mip_level_count,
layer_count: image_info.array_elements,
..Default::default()
}],
);
})
.submit_pass()
}
pub fn copy_buffer(
&mut self,
src_node: impl Into<AnyBufferNode>,
dst_node: impl Into<AnyBufferNode>,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let src_info = self.node_info(src_node);
let dst_info = self.node_info(dst_node);
self.copy_buffer_region(
src_node,
dst_node,
&vk::BufferCopy {
src_offset: 0,
dst_offset: 0,
size: src_info.size.min(dst_info.size),
},
)
}
pub fn copy_buffer_region(
&mut self,
src_node: impl Into<AnyBufferNode>,
dst_node: impl Into<AnyBufferNode>,
region: &vk::BufferCopy,
) -> &mut Self {
use std::slice::from_ref;
self.copy_buffer_regions(src_node, dst_node, from_ref(region))
}
pub fn copy_buffer_regions(
&mut self,
src_node: impl Into<AnyBufferNode>,
dst_node: impl Into<AnyBufferNode>,
regions: impl Into<Box<[vk::BufferCopy]>>,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let regions: Box<[_]> = regions.into();
let (src_access_range, dst_access_range) = buffer_copy_subresources(®ions);
self.begin_pass("copy buffer")
.access_node_subrange(src_node, AccessType::TransferRead, src_access_range)
.access_node_subrange(dst_node, AccessType::TransferWrite, dst_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_copy_buffer(cmd_buf, *bindings[src_node], *bindings[dst_node], ®ions);
})
.submit_pass()
}
pub fn copy_buffer_to_image(
&mut self,
src_node: impl Into<AnyBufferNode>,
dst_node: impl Into<AnyImageNode>,
) -> &mut Self {
let dst_node = dst_node.into();
let dst_info = self.node_info(dst_node);
self.copy_buffer_to_image_region(
src_node,
dst_node,
&vk::BufferImageCopy {
buffer_offset: 0,
buffer_row_length: dst_info.width,
buffer_image_height: dst_info.height,
image_subresource: vk::ImageSubresourceLayers {
aspect_mask: format_aspect_mask(dst_info.fmt),
mip_level: 0,
base_array_layer: 0,
layer_count: 1,
},
image_offset: Default::default(),
image_extent: vk::Extent3D {
depth: dst_info.depth,
height: dst_info.height,
width: dst_info.width,
},
},
)
}
pub fn copy_buffer_to_image_region(
&mut self,
src_node: impl Into<AnyBufferNode>,
dst_node: impl Into<AnyImageNode>,
region: &vk::BufferImageCopy,
) -> &mut Self {
use std::slice::from_ref;
self.copy_buffer_to_image_regions(src_node, dst_node, from_ref(region))
}
pub fn copy_buffer_to_image_regions(
&mut self,
src_node: impl Into<AnyBufferNode>,
dst_node: impl Into<AnyImageNode>,
regions: impl Into<Box<[vk::BufferImageCopy]>>,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let dst_access_range = self.node_info(dst_node).default_view_info();
let regions = regions.into();
let src_access_range = buffer_image_copy_subresource(®ions);
self.begin_pass("copy buffer to image")
.access_node_subrange(src_node, AccessType::TransferRead, src_access_range)
.access_node_subrange(dst_node, AccessType::TransferWrite, dst_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_copy_buffer_to_image(
cmd_buf,
*bindings[src_node],
*bindings[dst_node],
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
®ions,
);
})
.submit_pass()
}
pub fn copy_image(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyImageNode>,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let src_info = self.node_info(src_node);
let dst_info = self.node_info(dst_node);
self.copy_image_region(
src_node,
dst_node,
&vk::ImageCopy {
src_subresource: vk::ImageSubresourceLayers {
aspect_mask: format_aspect_mask(src_info.fmt),
mip_level: 0,
base_array_layer: 0,
layer_count: if matches!(src_info.ty, ImageType::Cube | ImageType::CubeArray) {
6
} else {
1
},
},
src_offset: vk::Offset3D { x: 0, y: 0, z: 0 },
dst_subresource: vk::ImageSubresourceLayers {
aspect_mask: format_aspect_mask(dst_info.fmt),
mip_level: 0,
base_array_layer: 0,
layer_count: if matches!(dst_info.ty, ImageType::Cube | ImageType::CubeArray) {
6
} else {
1
},
},
dst_offset: vk::Offset3D { x: 0, y: 0, z: 0 },
extent: vk::Extent3D {
depth: src_info.depth.min(dst_info.depth).max(1),
height: src_info.height.min(dst_info.height).max(1),
width: src_info.width.min(dst_info.width),
},
},
)
}
pub fn copy_image_region(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyImageNode>,
region: &vk::ImageCopy,
) -> &mut Self {
use std::slice::from_ref;
self.copy_image_regions(src_node, dst_node, from_ref(region))
}
pub fn copy_image_regions(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyImageNode>,
regions: impl Into<Box<[vk::ImageCopy]>>,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let src_access_range = self.node_info(src_node).default_view_info();
let dst_access_range = self.node_info(dst_node).default_view_info();
let regions = regions.into();
self.begin_pass("copy image")
.access_node_subrange(src_node, AccessType::TransferRead, src_access_range)
.access_node_subrange(dst_node, AccessType::TransferWrite, dst_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_copy_image(
cmd_buf,
*bindings[src_node],
vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
*bindings[dst_node],
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
®ions,
);
})
.submit_pass()
}
pub fn copy_image_to_buffer(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyBufferNode>,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let src_info = self.node_info(src_node);
self.copy_image_to_buffer_region(
src_node,
dst_node,
&vk::BufferImageCopy {
buffer_offset: 0,
buffer_row_length: src_info.width,
buffer_image_height: src_info.height,
image_subresource: vk::ImageSubresourceLayers {
aspect_mask: format_aspect_mask(src_info.fmt),
mip_level: 0,
base_array_layer: 0,
layer_count: 1,
},
image_offset: Default::default(),
image_extent: vk::Extent3D {
depth: src_info.depth,
height: src_info.height,
width: src_info.width,
},
},
)
}
pub fn copy_image_to_buffer_region(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyBufferNode>,
region: &vk::BufferImageCopy,
) -> &mut Self {
use std::slice::from_ref;
self.copy_image_to_buffer_regions(src_node, dst_node, from_ref(region))
}
pub fn copy_image_to_buffer_regions(
&mut self,
src_node: impl Into<AnyImageNode>,
dst_node: impl Into<AnyBufferNode>,
regions: impl Into<Box<[vk::BufferImageCopy]>>,
) -> &mut Self {
let src_node = src_node.into();
let dst_node = dst_node.into();
let regions = regions.into();
let src_subresource = self.node_info(src_node).default_view_info();
let dst_subresource = buffer_image_copy_subresource(®ions);
self.begin_pass("copy image to buffer")
.access_node_subrange(src_node, AccessType::TransferRead, src_subresource)
.access_node_subrange(dst_node, AccessType::TransferWrite, dst_subresource)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_copy_image_to_buffer(
cmd_buf,
*bindings[src_node],
vk::ImageLayout::TRANSFER_SRC_OPTIMAL,
*bindings[dst_node],
®ions,
);
})
.submit_pass()
}
pub fn fill_buffer(&mut self, buffer_node: impl Into<AnyBufferNode>, data: u32) -> &mut Self {
let buffer_node = buffer_node.into();
let buffer_info = self.node_info(buffer_node);
self.fill_buffer_region(buffer_node, data, 0..buffer_info.size)
}
pub fn fill_buffer_region(
&mut self,
buffer_node: impl Into<AnyBufferNode>,
data: u32,
region: Range<vk::DeviceSize>,
) -> &mut Self {
let buffer_node = buffer_node.into();
let buffer_info = self.node_info(buffer_node);
let buffer_access_range = 0..buffer_info.size;
self.begin_pass("fill buffer")
.access_node_subrange(buffer_node, AccessType::TransferWrite, buffer_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_fill_buffer(
cmd_buf,
*bindings[buffer_node],
region.start,
region.end - region.start,
data,
);
})
.submit_pass()
}
fn first_node_access_pass_index(&self, node: impl Node) -> Option<usize> {
self.node_access_pass_index(node, self.passes.iter())
}
pub(super) fn last_write(&self, node: impl Node) -> Option<AccessType> {
let node_idx = node.index();
self.passes
.iter()
.rev()
.flat_map(|pass| pass.execs.iter().rev())
.find_map(|exec| {
exec.accesses.get(&node_idx).and_then(|[_early, late]| {
if is_write_access(late.access) {
Some(late.access)
} else {
None
}
})
})
}
fn node_access_pass_index<'a>(
&self,
node: impl Node,
passes: impl Iterator<Item = &'a Pass>,
) -> Option<usize> {
let node_idx = node.index();
for (pass_idx, pass) in passes.enumerate() {
for exec in pass.execs.iter() {
if exec.accesses.contains_key(&node_idx) {
return Some(pass_idx);
}
}
}
None
}
pub fn node_info<N>(&self, node: N) -> <N as Information>::Info
where
N: Information,
{
node.get(self)
}
pub fn resolve(mut self) -> Resolver {
for pass in &mut self.passes {
pass.execs.pop();
}
Resolver::new(self)
}
pub fn unbind_node<N>(&mut self, node: N) -> <N as Edge<Self>>::Result
where
N: Edge<Self>,
N: Unbind<Self, <N as Edge<Self>>::Result>,
{
node.unbind(self)
}
pub fn update_buffer(
&mut self,
buffer_node: impl Into<AnyBufferNode>,
data: &'static [u8],
) -> &mut Self {
self.update_buffer_offset(buffer_node, data, 0)
}
pub fn update_buffer_offset(
&mut self,
buffer_node: impl Into<AnyBufferNode>,
data: &'static [u8],
offset: vk::DeviceSize,
) -> &mut Self {
let buffer_node = buffer_node.into();
let buffer_info = self.node_info(buffer_node);
let buffer_access_range = 0..buffer_info.size;
self.begin_pass("update buffer")
.access_node_subrange(buffer_node, AccessType::TransferWrite, buffer_access_range)
.record_cmd_buf(move |device, cmd_buf, bindings| unsafe {
device.cmd_update_buffer(cmd_buf, *bindings[buffer_node], offset, data);
})
.submit_pass()
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Subresource {
AccelerationStructure,
Image(ImageSubresource),
Buffer(BufferSubresource),
}
impl Subresource {
fn unwrap_buffer(self) -> BufferSubresource {
if let Self::Buffer(subresource) = self {
subresource
} else {
unreachable!();
}
}
fn unwrap_image(self) -> ImageSubresource {
if let Self::Image(subresource) = self {
subresource
} else {
unreachable!();
}
}
}
impl From<()> for Subresource {
fn from(_: ()) -> Self {
Self::AccelerationStructure
}
}
impl From<ImageSubresource> for Subresource {
fn from(subresource: ImageSubresource) -> Self {
Self::Image(subresource)
}
}
impl From<BufferSubresource> for Subresource {
fn from(subresource: BufferSubresource) -> Self {
Self::Buffer(subresource)
}
}
#[derive(Clone, Copy, Debug)]
struct SubresourceAccess {
access: AccessType,
subresource: Option<Subresource>,
}