use super::descriptor::{DescriptorSet, ShaderStageFlags};
use super::device::DeviceInner;
use super::flags::{AccessFlags, AccessFlags2, PipelineStage, PipelineStage2};
use super::graphics_pipeline::GraphicsPipeline;
use super::image::{BufferImageCopy, Image, ImageBarrier};
use super::pipeline::{ComputePipeline, PipelineLayout};
use super::query::QueryPool;
use super::render_pass::{Framebuffer, RenderPass};
use super::{Buffer, Device, Error, Result, check};
use crate::raw::bindings::*;
use std::sync::Arc;
#[derive(Debug, Clone, Copy)]
pub enum ClearValue {
Color([f32; 4]),
DepthStencil { depth: f32, stencil: u32 },
}
#[derive(Debug, Clone, Copy)]
pub struct BufferCopy {
pub src_offset: u64,
pub dst_offset: u64,
pub size: u64,
}
impl BufferCopy {
pub const fn full(size: u64) -> Self {
Self {
src_offset: 0,
dst_offset: 0,
size,
}
}
}
pub struct CommandPool {
pub(crate) handle: VkCommandPool,
pub(crate) device: Arc<DeviceInner>,
}
impl CommandPool {
pub fn new(device: &Device, queue_family_index: u32) -> Result<Self> {
let create = device
.inner
.dispatch
.vkCreateCommandPool
.ok_or(Error::MissingFunction("vkCreateCommandPool"))?;
let info = VkCommandPoolCreateInfo {
sType: VkStructureType::STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
queueFamilyIndex: queue_family_index,
..Default::default()
};
let mut handle: VkCommandPool = 0;
check(unsafe { create(device.inner.handle, &info, std::ptr::null(), &mut handle) })?;
Ok(Self {
handle,
device: Arc::clone(&device.inner),
})
}
pub fn raw(&self) -> VkCommandPool {
self.handle
}
pub fn allocate_primary(&self) -> Result<CommandBuffer> {
let allocate = self
.device
.dispatch
.vkAllocateCommandBuffers
.ok_or(Error::MissingFunction("vkAllocateCommandBuffers"))?;
let info = VkCommandBufferAllocateInfo {
sType: VkStructureType::STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
commandPool: self.handle,
level: VkCommandBufferLevel::COMMAND_BUFFER_LEVEL_PRIMARY,
commandBufferCount: 1,
..Default::default()
};
let mut handle: VkCommandBuffer = std::ptr::null_mut();
check(unsafe { allocate(self.device.handle, &info, &mut handle) })?;
Ok(CommandBuffer {
handle,
device: Arc::clone(&self.device),
pool: self.handle,
})
}
}
impl Drop for CommandPool {
fn drop(&mut self) {
if let Some(destroy) = self.device.dispatch.vkDestroyCommandPool {
unsafe { destroy(self.device.handle, self.handle, std::ptr::null()) };
}
}
}
pub struct CommandBuffer {
pub(crate) handle: VkCommandBuffer,
pub(crate) device: Arc<DeviceInner>,
#[allow(dead_code)]
pool: VkCommandPool,
}
impl CommandBuffer {
pub fn raw(&self) -> VkCommandBuffer {
self.handle
}
pub fn begin(&mut self) -> Result<CommandBufferRecording<'_>> {
let begin = self
.device
.dispatch
.vkBeginCommandBuffer
.ok_or(Error::MissingFunction("vkBeginCommandBuffer"))?;
let info = VkCommandBufferBeginInfo {
sType: VkStructureType::STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
..Default::default()
};
check(unsafe { begin(self.handle, &info) })?;
Ok(CommandBufferRecording { buffer: self })
}
}
impl Drop for CommandBuffer {
fn drop(&mut self) {
if let Some(free) = self.device.dispatch.vkFreeCommandBuffers {
unsafe { free(self.device.handle, self.pool, 1, &self.handle) };
}
}
}
pub struct CommandBufferRecording<'a> {
buffer: &'a mut CommandBuffer,
}
impl<'a> CommandBufferRecording<'a> {
pub fn fill_buffer(&mut self, buffer: &Buffer, dst_offset: u64, size: u64, data: u32) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdFillBuffer
.expect("vkCmdFillBuffer is required by Vulkan 1.0");
unsafe { cmd(self.buffer.handle, buffer.handle, dst_offset, size, data) };
}
pub fn bind_compute_pipeline(&mut self, pipeline: &ComputePipeline) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBindPipeline
.expect("vkCmdBindPipeline is required by Vulkan 1.0");
unsafe {
cmd(
self.buffer.handle,
VkPipelineBindPoint::PIPELINE_BIND_POINT_COMPUTE,
pipeline.handle,
)
};
}
pub fn bind_compute_descriptor_sets(
&mut self,
layout: &PipelineLayout,
first_set: u32,
descriptor_sets: &[&DescriptorSet],
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBindDescriptorSets
.expect("vkCmdBindDescriptorSets is required by Vulkan 1.0");
let raw: Vec<VkDescriptorSet> = descriptor_sets.iter().map(|s| s.handle).collect();
unsafe {
cmd(
self.buffer.handle,
VkPipelineBindPoint::PIPELINE_BIND_POINT_COMPUTE,
layout.handle,
first_set,
raw.len() as u32,
raw.as_ptr(),
0,
std::ptr::null(),
)
};
}
pub fn dispatch(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdDispatch
.expect("vkCmdDispatch is required by Vulkan 1.0");
unsafe {
cmd(
self.buffer.handle,
group_count_x,
group_count_y,
group_count_z,
)
};
}
pub fn dispatch_indirect(&mut self, buffer: &Buffer, offset: u64) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdDispatchIndirect
.expect("vkCmdDispatchIndirect is required by Vulkan 1.0");
unsafe { cmd(self.buffer.handle, buffer.handle, offset) };
}
pub fn copy_buffer(&mut self, src: &Buffer, dst: &Buffer, regions: &[BufferCopy]) {
debug_assert!(
!regions.is_empty(),
"vkCmdCopyBuffer requires at least one region"
);
let cmd = self
.buffer
.device
.dispatch
.vkCmdCopyBuffer
.expect("vkCmdCopyBuffer is required by Vulkan 1.0");
let raw: Vec<VkBufferCopy> = regions
.iter()
.map(|r| VkBufferCopy {
srcOffset: r.src_offset,
dstOffset: r.dst_offset,
size: r.size,
})
.collect();
unsafe {
cmd(
self.buffer.handle,
src.handle,
dst.handle,
raw.len() as u32,
raw.as_ptr(),
)
};
}
pub fn push_constants(
&mut self,
layout: &PipelineLayout,
stages: ShaderStageFlags,
offset: u32,
bytes: &[u8],
) {
debug_assert!(
offset % 4 == 0,
"push constant offset must be a multiple of 4"
);
debug_assert!(
bytes.len() % 4 == 0,
"push constant size must be a multiple of 4"
);
let cmd = self
.buffer
.device
.dispatch
.vkCmdPushConstants
.expect("vkCmdPushConstants is required by Vulkan 1.0");
unsafe {
cmd(
self.buffer.handle,
layout.handle,
stages.0,
offset,
bytes.len() as u32,
bytes.as_ptr() as *const _,
)
};
}
pub fn reset_query_pool(&mut self, pool: &QueryPool, first_query: u32, query_count: u32) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdResetQueryPool
.expect("vkCmdResetQueryPool is required by Vulkan 1.0");
unsafe { cmd(self.buffer.handle, pool.handle, first_query, query_count) };
}
pub fn write_timestamp(&mut self, pipeline_stage: PipelineStage, pool: &QueryPool, query: u32) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdWriteTimestamp
.expect("vkCmdWriteTimestamp is required by Vulkan 1.0");
unsafe { cmd(self.buffer.handle, pipeline_stage.0, pool.handle, query) };
}
pub fn image_barrier(
&mut self,
src_stage: PipelineStage,
dst_stage: PipelineStage,
barrier: ImageBarrier<'_>,
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdPipelineBarrier
.expect("vkCmdPipelineBarrier is required by Vulkan 1.0");
let raw = VkImageMemoryBarrier {
sType: VkStructureType::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
srcAccessMask: barrier.src_access.0,
dstAccessMask: barrier.dst_access.0,
oldLayout: barrier.old_layout.0,
newLayout: barrier.new_layout.0,
srcQueueFamilyIndex: !0u32, dstQueueFamilyIndex: !0u32,
image: barrier.image.handle,
subresourceRange: VkImageSubresourceRange {
aspectMask: barrier.aspect_mask,
baseMipLevel: 0,
levelCount: 1,
baseArrayLayer: 0,
layerCount: 1,
},
..Default::default()
};
unsafe {
cmd(
self.buffer.handle,
src_stage.0,
dst_stage.0,
0,
0,
std::ptr::null(),
0,
std::ptr::null(),
1,
&raw,
)
};
}
pub fn copy_buffer_to_image(
&mut self,
src: &Buffer,
dst: &Image,
dst_layout: super::ImageLayout,
regions: &[BufferImageCopy],
) {
debug_assert!(
!regions.is_empty(),
"vkCmdCopyBufferToImage requires at least one region"
);
let cmd = self
.buffer
.device
.dispatch
.vkCmdCopyBufferToImage
.expect("vkCmdCopyBufferToImage is required by Vulkan 1.0");
let raw: Vec<VkBufferImageCopy> = regions.iter().map(|r| r.to_raw()).collect();
unsafe {
cmd(
self.buffer.handle,
src.handle,
dst.handle,
dst_layout.0,
raw.len() as u32,
raw.as_ptr(),
)
};
}
pub fn memory_barrier2(
&mut self,
src_stage: PipelineStage2,
dst_stage: PipelineStage2,
src_access: AccessFlags2,
dst_access: AccessFlags2,
) -> Result<()> {
let cmd = self
.buffer
.device
.dispatch
.vkCmdPipelineBarrier2
.ok_or(Error::MissingFunction("vkCmdPipelineBarrier2"))?;
let mb = VkMemoryBarrier2 {
sType: VkStructureType::STRUCTURE_TYPE_MEMORY_BARRIER_2,
srcStageMask: src_stage.0,
srcAccessMask: src_access.0,
dstStageMask: dst_stage.0,
dstAccessMask: dst_access.0,
..Default::default()
};
let info = VkDependencyInfo {
sType: VkStructureType::STRUCTURE_TYPE_DEPENDENCY_INFO,
memoryBarrierCount: 1,
pMemoryBarriers: &mb,
..Default::default()
};
unsafe { cmd(self.buffer.handle, &info) };
Ok(())
}
pub fn image_barrier2(
&mut self,
src_stage: PipelineStage2,
dst_stage: PipelineStage2,
barrier: ImageBarrier<'_>,
) -> Result<()> {
let cmd = self
.buffer
.device
.dispatch
.vkCmdPipelineBarrier2
.ok_or(Error::MissingFunction("vkCmdPipelineBarrier2"))?;
let ib = VkImageMemoryBarrier2 {
sType: VkStructureType::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
srcStageMask: src_stage.0,
srcAccessMask: barrier.src_access.0 as u64,
dstStageMask: dst_stage.0,
dstAccessMask: barrier.dst_access.0 as u64,
oldLayout: barrier.old_layout.0,
newLayout: barrier.new_layout.0,
srcQueueFamilyIndex: !0u32,
dstQueueFamilyIndex: !0u32,
image: barrier.image.handle,
subresourceRange: VkImageSubresourceRange {
aspectMask: barrier.aspect_mask,
baseMipLevel: 0,
levelCount: 1,
baseArrayLayer: 0,
layerCount: 1,
},
..Default::default()
};
let info = VkDependencyInfo {
sType: VkStructureType::STRUCTURE_TYPE_DEPENDENCY_INFO,
imageMemoryBarrierCount: 1,
pImageMemoryBarriers: &ib,
..Default::default()
};
unsafe { cmd(self.buffer.handle, &info) };
Ok(())
}
pub fn copy_image_to_buffer(
&mut self,
src: &Image,
src_layout: super::ImageLayout,
dst: &Buffer,
regions: &[BufferImageCopy],
) {
debug_assert!(
!regions.is_empty(),
"vkCmdCopyImageToBuffer requires at least one region"
);
let cmd = self
.buffer
.device
.dispatch
.vkCmdCopyImageToBuffer
.expect("vkCmdCopyImageToBuffer is required by Vulkan 1.0");
let raw: Vec<VkBufferImageCopy> = regions.iter().map(|r| r.to_raw()).collect();
unsafe {
cmd(
self.buffer.handle,
src.handle,
src_layout.0,
dst.handle,
raw.len() as u32,
raw.as_ptr(),
)
};
}
pub fn memory_barrier(
&mut self,
src_stage: PipelineStage,
dst_stage: PipelineStage,
src_access: AccessFlags,
dst_access: AccessFlags,
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdPipelineBarrier
.expect("vkCmdPipelineBarrier is required by Vulkan 1.0");
let barrier = VkMemoryBarrier {
sType: VkStructureType::STRUCTURE_TYPE_MEMORY_BARRIER,
srcAccessMask: src_access.0,
dstAccessMask: dst_access.0,
..Default::default()
};
unsafe {
cmd(
self.buffer.handle,
src_stage.0,
dst_stage.0,
0,
1,
&barrier,
0,
std::ptr::null(),
0,
std::ptr::null(),
)
};
}
pub fn begin_render_pass(
&mut self,
render_pass: &RenderPass,
framebuffer: &Framebuffer,
clear_colors: &[[f32; 4]],
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBeginRenderPass
.expect("vkCmdBeginRenderPass is required by Vulkan 1.0");
let raw_clears: Vec<VkClearValue> = clear_colors
.iter()
.map(|c| VkClearValue {
color: VkClearColorValue { float32: *c },
})
.collect();
let info = VkRenderPassBeginInfo {
sType: VkStructureType::STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
renderPass: render_pass.handle,
framebuffer: framebuffer.handle,
renderArea: VkRect2D {
offset: VkOffset2D { x: 0, y: 0 },
extent: VkExtent2D {
width: framebuffer.width,
height: framebuffer.height,
},
},
clearValueCount: raw_clears.len() as u32,
pClearValues: if raw_clears.is_empty() {
std::ptr::null()
} else {
raw_clears.as_ptr()
},
..Default::default()
};
unsafe {
cmd(
self.buffer.handle,
&info,
VkSubpassContents::SUBPASS_CONTENTS_INLINE,
)
};
}
pub fn begin_render_pass_ext(
&mut self,
render_pass: &RenderPass,
framebuffer: &Framebuffer,
clear_values: &[ClearValue],
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBeginRenderPass
.expect("vkCmdBeginRenderPass is required by Vulkan 1.0");
let raw_clears: Vec<VkClearValue> = clear_values
.iter()
.map(|cv| match cv {
ClearValue::Color(c) => VkClearValue {
color: VkClearColorValue { float32: *c },
},
ClearValue::DepthStencil { depth, stencil } => VkClearValue {
depthStencil: VkClearDepthStencilValue {
depth: *depth,
stencil: *stencil,
},
},
})
.collect();
let info = VkRenderPassBeginInfo {
sType: VkStructureType::STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
renderPass: render_pass.handle,
framebuffer: framebuffer.handle,
renderArea: VkRect2D {
offset: VkOffset2D { x: 0, y: 0 },
extent: VkExtent2D {
width: framebuffer.width,
height: framebuffer.height,
},
},
clearValueCount: raw_clears.len() as u32,
pClearValues: if raw_clears.is_empty() {
std::ptr::null()
} else {
raw_clears.as_ptr()
},
..Default::default()
};
unsafe {
cmd(
self.buffer.handle,
&info,
VkSubpassContents::SUBPASS_CONTENTS_INLINE,
)
};
}
pub fn end_render_pass(&mut self) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdEndRenderPass
.expect("vkCmdEndRenderPass is required by Vulkan 1.0");
unsafe { cmd(self.buffer.handle) };
}
pub fn bind_graphics_pipeline(&mut self, pipeline: &GraphicsPipeline) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBindPipeline
.expect("vkCmdBindPipeline is required by Vulkan 1.0");
unsafe {
cmd(
self.buffer.handle,
VkPipelineBindPoint::PIPELINE_BIND_POINT_GRAPHICS,
pipeline.handle,
)
};
}
pub fn bind_graphics_descriptor_sets(
&mut self,
layout: &PipelineLayout,
first_set: u32,
descriptor_sets: &[&DescriptorSet],
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBindDescriptorSets
.expect("vkCmdBindDescriptorSets is required by Vulkan 1.0");
let raw: Vec<VkDescriptorSet> = descriptor_sets.iter().map(|s| s.handle).collect();
unsafe {
cmd(
self.buffer.handle,
VkPipelineBindPoint::PIPELINE_BIND_POINT_GRAPHICS,
layout.handle,
first_set,
raw.len() as u32,
raw.as_ptr(),
0,
std::ptr::null(),
)
};
}
pub fn bind_vertex_buffers(&mut self, first_binding: u32, buffers_offsets: &[(&Buffer, u64)]) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBindVertexBuffers
.expect("vkCmdBindVertexBuffers is required by Vulkan 1.0");
let raw_buffers: Vec<VkBuffer> = buffers_offsets.iter().map(|(b, _)| b.handle).collect();
let raw_offsets: Vec<u64> = buffers_offsets.iter().map(|(_, o)| *o).collect();
unsafe {
cmd(
self.buffer.handle,
first_binding,
raw_buffers.len() as u32,
raw_buffers.as_ptr(),
raw_offsets.as_ptr(),
)
};
}
pub fn bind_index_buffer(&mut self, buffer: &Buffer, offset: u64, index_type: u32) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdBindIndexBuffer
.expect("vkCmdBindIndexBuffer is required by Vulkan 1.0");
let it = match index_type {
0 => VkIndexType::INDEX_TYPE_UINT16,
_ => VkIndexType::INDEX_TYPE_UINT32,
};
unsafe { cmd(self.buffer.handle, buffer.handle, offset, it) };
}
pub fn draw(
&mut self,
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdDraw
.expect("vkCmdDraw is required by Vulkan 1.0");
unsafe {
cmd(
self.buffer.handle,
vertex_count,
instance_count,
first_vertex,
first_instance,
)
};
}
pub fn draw_indexed(
&mut self,
index_count: u32,
instance_count: u32,
first_index: u32,
vertex_offset: i32,
first_instance: u32,
) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdDrawIndexed
.expect("vkCmdDrawIndexed is required by Vulkan 1.0");
unsafe {
cmd(
self.buffer.handle,
index_count,
instance_count,
first_index,
vertex_offset,
first_instance,
)
};
}
pub fn set_viewport(&mut self, x: f32, y: f32, width: f32, height: f32) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdSetViewport
.expect("vkCmdSetViewport is required by Vulkan 1.0");
let viewport = VkViewport {
x,
y,
width,
height,
minDepth: 0.0,
maxDepth: 1.0,
};
unsafe { cmd(self.buffer.handle, 0, 1, &viewport) };
}
pub fn set_scissor(&mut self, x: i32, y: i32, width: u32, height: u32) {
let cmd = self
.buffer
.device
.dispatch
.vkCmdSetScissor
.expect("vkCmdSetScissor is required by Vulkan 1.0");
let scissor = VkRect2D {
offset: VkOffset2D { x, y },
extent: VkExtent2D { width, height },
};
unsafe { cmd(self.buffer.handle, 0, 1, &scissor) };
}
pub fn end(self) -> Result<()> {
let end = self
.buffer
.device
.dispatch
.vkEndCommandBuffer
.ok_or(Error::MissingFunction("vkEndCommandBuffer"))?;
let result = unsafe { end(self.buffer.handle) };
std::mem::forget(self);
check(result)
}
}
impl<'a> Drop for CommandBufferRecording<'a> {
fn drop(&mut self) {
if let Some(end) = self.buffer.device.dispatch.vkEndCommandBuffer {
unsafe { end(self.buffer.handle) };
}
}
}