use core::{
self,
Buffer,
CommandPool,
DescriptorSet,
Event,
Image,
Pipeline,
PipelineLayout,
QueryPool,
};
use libc::c_void;
use std::cmp::Ordering;
use std::hash::{Hash, Hasher};
use std::ptr;
use std::sync::Arc;
use vks;
use {TryDestroyError, TryDestroyErrorKind, VulkanObject};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CommandBuffer(Arc<Inner>);
impl VulkanObject for CommandBuffer {
type NativeVulkanObject = vks::VkCommandBuffer;
#[inline]
fn as_native_vulkan_object(&self) -> Self::NativeVulkanObject {
self.handle()
}
fn try_destroy(self) -> Result<(), TryDestroyError<Self>> {
let strong_count = Arc::strong_count(&self.0);
if strong_count == 1 {
Ok(())
}
else {
Err(TryDestroyError::new(self, TryDestroyErrorKind::InUse(Some(strong_count))))
}
}
}
impl CommandBuffer {
pub(crate) fn new(handle: vks::VkCommandBuffer, command_pool: CommandPool) -> Self {
CommandBuffer(Arc::new(Inner {
handle: handle,
command_pool: command_pool,
}))
}
#[inline]
pub(crate) fn handle(&self) -> vks::VkCommandBuffer {
self.0.handle
}
#[inline]
pub(crate) fn loader(&self) -> &vks::DeviceProcAddrLoader {
self.0.command_pool.loader()
}
pub fn begin(&self, begin_info: &core::CommandBufferBeginInfo) -> Result<(), core::Error> {
let begin_info_wrapper = core::VkCommandBufferBeginInfoWrapper::new(begin_info, true);
let res = unsafe {
(self.loader().core.vkBeginCommandBuffer)(self.handle(), &begin_info_wrapper.vks_struct)
};
if res == vks::VK_SUCCESS {
Ok(())
}
else {
Err(res.into())
}
}
pub fn end(&self) -> Result<(), core::Error> {
let res = unsafe {
(self.loader().core.vkEndCommandBuffer)(self.handle())
};
if res == vks::VK_SUCCESS {
Ok(())
}
else {
Err(res.into())
}
}
pub fn reset(&self, flags: core::CommandBufferResetFlags) -> Result<(), core::Error> {
let res = unsafe {
(self.loader().core.vkResetCommandBuffer)(self.handle(), flags)
};
if res == vks::VK_SUCCESS {
Ok(())
}
else {
Err(res.into())
}
}
pub fn bind_pipeline(&self, pipeline_bind_point: core::PipelineBindPoint, pipeline: &Pipeline) {
unsafe {
(self.loader().core.vkCmdBindPipeline)(self.handle(), pipeline_bind_point.into(), pipeline.handle());
}
}
pub fn set_viewport(&self, first_viewport: u32, viewports: &[core::Viewport]) {
let viewports: Vec<_> = viewports.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdSetViewport)(self.handle(), first_viewport, viewports.len() as u32, viewports.as_ptr());
}
}
pub fn set_scissor(&self, first_scissor: u32, scissors: &[core::Rect2D]) {
let scissors: Vec<_> = scissors.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdSetScissor)(self.handle(), first_scissor, scissors.len() as u32, scissors.as_ptr());
}
}
pub fn set_line_width(&self, line_width: f32) {
unsafe {
(self.loader().core.vkCmdSetLineWidth)(self.handle(), line_width);
}
}
pub fn set_depth_bias(&self, depth_bias_constant_factor: f32, depth_bias_clamp: f32, depth_bias_slope_factor: f32) {
unsafe {
(self.loader().core.vkCmdSetDepthBias)(self.handle(), depth_bias_constant_factor, depth_bias_clamp, depth_bias_slope_factor);
}
}
pub fn set_blend_constants(&self, blend_constants: &[f32]) {
unsafe {
(self.loader().core.vkCmdSetBlendConstants)(self.handle(), blend_constants.as_ptr());
}
}
pub fn set_depth_bounds(&self, min_depth_bounds: f32, max_depth_bounds: f32) {
unsafe {
(self.loader().core.vkCmdSetDepthBounds)(self.handle(), min_depth_bounds, max_depth_bounds);
}
}
pub fn set_stencil_compare_mask(&self, face_mask: core::StencilFaceFlags, compare_mask: u32) {
unsafe {
(self.loader().core.vkCmdSetStencilCompareMask)(self.handle(), face_mask, compare_mask);
}
}
pub fn set_stencil_write_mask(&self, face_mask: core::StencilFaceFlags, write_mask: u32) {
unsafe {
(self.loader().core.vkCmdSetStencilWriteMask)(self.handle(), face_mask, write_mask);
}
}
pub fn set_stencil_reference(&self, face_mask: core::StencilFaceFlags, reference: u32) {
unsafe {
(self.loader().core.vkCmdSetStencilReference)(self.handle(), face_mask, reference);
}
}
pub fn bind_descriptor_sets(&self, pipeline_bind_point: core::PipelineBindPoint, layout: &PipelineLayout, first_set: u32, descriptor_sets: &[DescriptorSet], dynamic_offsets: Option<&[u32]>) {
let descriptor_sets: Vec<_> = descriptor_sets.iter().map(DescriptorSet::handle).collect();
let (dynamic_offsets_count, dynamic_offsets_ptr) = match dynamic_offsets {
Some(dynamic_offsets) => (dynamic_offsets.len() as u32, dynamic_offsets.as_ptr()),
None => (0, ptr::null()),
};
unsafe {
(self.loader().core.vkCmdBindDescriptorSets)(self.handle(), pipeline_bind_point.into(), layout.handle(), first_set, descriptor_sets.len() as u32, descriptor_sets.as_ptr(), dynamic_offsets_count, dynamic_offsets_ptr);
}
}
pub fn bind_index_buffer(&self, buffer: &Buffer, offset: u64, index_type: core::IndexType) {
unsafe {
(self.loader().core.vkCmdBindIndexBuffer)(self.handle(), buffer.handle(), offset, index_type.into());
}
}
pub fn bind_vertex_buffers(&self, first_binding: u32, buffers: &[Buffer], offsets: &[u64]) {
let buffers: Vec<_> = buffers.iter().map(Buffer::handle).collect();
unsafe {
(self.loader().core.vkCmdBindVertexBuffers)(self.handle(), first_binding, buffers.len() as u32, buffers.as_ptr(), offsets.as_ptr());
}
}
pub fn draw(&self, vertex_count: u32, instance_count: u32, first_vertex: u32, first_instance: u32) {
unsafe {
(self.loader().core.vkCmdDraw)(self.handle(), vertex_count, instance_count, first_vertex, first_instance);
}
}
pub fn draw_indexed(&self, index_count: u32, instance_count: u32, first_index: u32, vertex_offset: i32, first_instance: u32) {
unsafe {
(self.loader().core.vkCmdDrawIndexed)(self.handle(), index_count, instance_count, first_index, vertex_offset, first_instance);
}
}
pub fn draw_indirect(&self, buffer: &Buffer, offset: u64, draw_count: u32, stride: u32) {
unsafe {
(self.loader().core.vkCmdDrawIndirect)(self.handle(), buffer.handle(), offset, draw_count, stride);
}
}
pub fn draw_indexed_indirect(&self, buffer: &Buffer, offset: u64, draw_count: u32, stride: u32) {
unsafe {
(self.loader().core.vkCmdDrawIndexedIndirect)(self.handle(), buffer.handle(), offset, draw_count, stride);
}
}
pub fn dispatch(&self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
unsafe {
(self.loader().core.vkCmdDispatch)(self.handle(), group_count_x, group_count_y, group_count_z);
}
}
pub fn dispatch_indirect(&self, buffer: &Buffer, offset: u64) {
unsafe {
(self.loader().core.vkCmdDispatchIndirect)(self.handle(), buffer.handle(), offset);
}
}
pub fn copy_buffer(&self, src_buffer: &Buffer, dst_buffer: &Buffer, regions: &[core::BufferCopy]) {
let regions: Vec<_> = regions.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdCopyBuffer)(self.handle(), src_buffer.handle(), dst_buffer.handle(), regions.len() as u32, regions.as_ptr());
}
}
pub fn copy_image(&self, src_image: &Image, src_image_layout: core::ImageLayout, dst_image: &Image, dst_image_layout: core::ImageLayout, regions: &[core::ImageCopy]) {
let regions: Vec<_> = regions.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdCopyImage)(self.handle(), src_image.handle(), src_image_layout.into(), dst_image.handle(), dst_image_layout.into(), regions.len() as u32, regions.as_ptr());
}
}
pub fn blit_image(&self, src_image: &Image, src_image_layout: core::ImageLayout, dst_image: &Image, dst_image_layout: core::ImageLayout, regions: &[core::ImageBlit], filter: core::Filter) {
let regions: Vec<_> = regions.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdBlitImage)(self.handle(), src_image.handle(), src_image_layout.into(), dst_image.handle(), dst_image_layout.into(), regions.len() as u32, regions.as_ptr(), filter.into());
}
}
pub fn copy_buffer_to_image(&self, src_buffer: &Buffer, dst_image: &Image, dst_image_layout: core::ImageLayout, regions: &[core::BufferImageCopy]) {
let regions: Vec<_> = regions.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdCopyBufferToImage)(self.handle(), src_buffer.handle(), dst_image.handle(), dst_image_layout.into(), regions.len() as u32, regions.as_ptr());
}
}
pub fn copy_image_to_buffer(&self, src_image: &Image, src_image_layout: core::ImageLayout, dst_buffer: &Buffer, regions: &[core::BufferImageCopy]) {
let regions: Vec<_> = regions.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdCopyImageToBuffer)(self.handle(), src_image.handle(), src_image_layout.into(), dst_buffer.handle(), regions.len() as u32, regions.as_ptr());
}
}
pub fn update_buffer(&self, dst_buffer: &Buffer, dst_offset: u64, data: &[u8]) {
unsafe {
(self.loader().core.vkCmdUpdateBuffer)(self.handle(), dst_buffer.handle(), dst_offset, data.len() as u64, data.as_ptr() as *const u32);
}
}
pub fn fill_buffer(&self, dst_buffer: &Buffer, dst_offset: u64, size: core::OptionalDeviceSize, data: u32) {
unsafe {
(self.loader().core.vkCmdFillBuffer)(self.handle(), dst_buffer.handle(), dst_offset, size.into(), data);
}
}
pub fn clear_color_image(&self, image: &Image, image_layout: core::ImageLayout, color: &core::ClearColorValue, ranges: &[core::ImageSubresourceRange]) {
let color = color.into();
let ranges: Vec<_> = ranges.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdClearColorImage)(self.handle(), image.handle(), image_layout.into(), &color, ranges.len() as u32, ranges.as_ptr());
}
}
pub fn clear_depth_stencil_image(&self, image: &Image, image_layout: core::ImageLayout, depth_stencil: &core::ClearDepthStencilValue, ranges: &[core::ImageSubresourceRange]) {
let depth_stencil = depth_stencil.into();
let ranges: Vec<_> = ranges.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdClearDepthStencilImage)(self.handle(), image.handle(), image_layout.into(), &depth_stencil, ranges.len() as u32, ranges.as_ptr());
}
}
pub fn clear_attachments(&self, attachments: &[core::ClearAttachment], rects: &[core::ClearRect]) {
let attachments: Vec<_> = attachments.iter().map(From::from).collect();
let rects: Vec<_> = rects.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdClearAttachments)(self.handle(), attachments.len() as u32, attachments.as_ptr(), rects.len() as u32, rects.as_ptr());
}
}
pub fn resolve_image(&self, src_image: &Image, src_image_layout: core::ImageLayout, dst_image: &Image, dst_image_layout: core::ImageLayout, regions: &[core::ImageResolve]) {
let regions: Vec<_> = regions.iter().map(From::from).collect();
unsafe {
(self.loader().core.vkCmdResolveImage)(self.handle(), src_image.handle(), src_image_layout.into(), dst_image.handle(), dst_image_layout.into(), regions.len() as u32, regions.as_ptr());
}
}
pub fn set_event(&self, event: &Event, stage_mask: core::PipelineStageFlags) {
unsafe {
(self.loader().core.vkCmdSetEvent)(self.handle(), event.handle(), stage_mask);
}
}
pub fn reset_event(&self, event: &Event, stage_mask: core::PipelineStageFlags) {
unsafe {
(self.loader().core.vkCmdResetEvent)(self.handle(), event.handle(), stage_mask);
}
}
pub fn wait_events(&self, events: &[Event], src_stage_mask: core::PipelineStageFlags, dst_stage_mask: core::PipelineStageFlags, memory_barriers: Option<&[core::MemoryBarrier]>, buffer_memory_barriers: Option<&[core::BufferMemoryBarrier]>, image_memory_barriers: Option<&[core::ImageMemoryBarrier]>) {
let events: Vec<_> = events.iter().map(Event::handle).collect();
#[allow(unused_variables)]
let (memory_barriers_count, memory_barriers_ptr, vk_memory_barriers, memory_barriers_wrappers) = match memory_barriers {
Some(memory_barriers) => {
let memory_barriers_wrappers: Vec<_> = memory_barriers.iter().map(|m| core::VkMemoryBarrierWrapper::new(m, true)).collect();
let vk_memory_barriers: Vec<_> = memory_barriers_wrappers.iter().map(|m| m.vks_struct).collect();
(memory_barriers.len() as u32, vk_memory_barriers.as_ptr(), Some(vk_memory_barriers), Some(memory_barriers_wrappers))
}
None => (0, ptr::null(), None, None),
};
#[allow(unused_variables)]
let (buffer_memory_barriers_count, buffer_memory_barriers_ptr, vk_buffer_memory_barriers, buffer_memory_barriers_wrappers) = match buffer_memory_barriers {
Some(buffer_memory_barriers) => {
let buffer_memory_barriers_wrappers: Vec<_> = buffer_memory_barriers.iter().map(|b| core::VkBufferMemoryBarrierWrapper::new(b, true)).collect();
let vk_buffer_memory_barriers: Vec<_> = buffer_memory_barriers_wrappers.iter().map(|b| b.vks_struct).collect();
(buffer_memory_barriers.len() as u32, vk_buffer_memory_barriers.as_ptr(), Some(vk_buffer_memory_barriers), Some(buffer_memory_barriers_wrappers))
}
None => (0, ptr::null(), None, None),
};
#[allow(unused_variables)]
let (image_memory_barriers_count, image_memory_barriers_ptr, vk_image_memory_barriers, image_memory_barriers_wrappers) = match image_memory_barriers {
Some(image_memory_barriers) => {
let image_memory_barriers_wrappers: Vec<_> = image_memory_barriers.iter().map(|i| core::VkImageMemoryBarrierWrapper::new(i, true)).collect();
let vk_image_memory_barriers: Vec<_> = image_memory_barriers_wrappers.iter().map(|i| i.vks_struct).collect();
(image_memory_barriers.len() as u32, vk_image_memory_barriers.as_ptr(), Some(vk_image_memory_barriers), Some(image_memory_barriers_wrappers))
}
None => (0, ptr::null(), None, None),
};
unsafe {
(self.loader().core.vkCmdWaitEvents)(self.handle(), events.len() as u32, events.as_ptr(), src_stage_mask, dst_stage_mask, memory_barriers_count, memory_barriers_ptr, buffer_memory_barriers_count, buffer_memory_barriers_ptr, image_memory_barriers_count, image_memory_barriers_ptr);
}
}
pub fn pipeline_barrier(&self, src_stage_mask: core::PipelineStageFlags, dst_stage_mask: core::PipelineStageFlags, dependency_flags: core::DependencyFlags, memory_barriers: Option<&[core::MemoryBarrier]>, buffer_memory_barriers: Option<&[core::BufferMemoryBarrier]>, image_memory_barriers: Option<&[core::ImageMemoryBarrier]>) {
#[allow(unused_variables)]
let (memory_barriers_count, memory_barriers_ptr, vk_memory_barriers, memory_barriers_wrappers) = match memory_barriers {
Some(memory_barriers) => {
let memory_barriers_wrappers: Vec<_> = memory_barriers.iter().map(|m| core::VkMemoryBarrierWrapper::new(m, true)).collect();
let vk_memory_barriers: Vec<_> = memory_barriers_wrappers.iter().map(|m| m.vks_struct).collect();
(memory_barriers.len() as u32, vk_memory_barriers.as_ptr(), Some(vk_memory_barriers), Some(memory_barriers_wrappers))
}
None => (0, ptr::null(), None, None),
};
#[allow(unused_variables)]
let (buffer_memory_barriers_count, buffer_memory_barriers_ptr, vk_buffer_memory_barriers, buffer_memory_barriers_wrappers) = match buffer_memory_barriers {
Some(buffer_memory_barriers) => {
let buffer_memory_barriers_wrappers: Vec<_> = buffer_memory_barriers.iter().map(|b| core::VkBufferMemoryBarrierWrapper::new(b, true)).collect();
let vk_buffer_memory_barriers: Vec<_> = buffer_memory_barriers_wrappers.iter().map(|b| b.vks_struct).collect();
(buffer_memory_barriers.len() as u32, vk_buffer_memory_barriers.as_ptr(), Some(vk_buffer_memory_barriers), Some(buffer_memory_barriers_wrappers))
}
None => (0, ptr::null(), None, None),
};
#[allow(unused_variables)]
let (image_memory_barriers_count, image_memory_barriers_ptr, vk_image_memory_barriers, image_memory_barriers_wrappers) = match image_memory_barriers {
Some(image_memory_barriers) => {
let image_memory_barriers_wrappers: Vec<_> = image_memory_barriers.iter().map(|i| core::VkImageMemoryBarrierWrapper::new(i, true)).collect();
let vk_image_memory_barriers: Vec<_> = image_memory_barriers_wrappers.iter().map(|i| i.vks_struct).collect();
(image_memory_barriers.len() as u32, vk_image_memory_barriers.as_ptr(), Some(vk_image_memory_barriers), Some(image_memory_barriers_wrappers))
}
None => (0, ptr::null(), None, None),
};
unsafe {
(self.loader().core.vkCmdPipelineBarrier)(self.handle(), src_stage_mask, dst_stage_mask, dependency_flags, memory_barriers_count, memory_barriers_ptr, buffer_memory_barriers_count, buffer_memory_barriers_ptr, image_memory_barriers_count, image_memory_barriers_ptr);
}
}
pub fn begin_query(&self, query_pool: &QueryPool, query: u32, flags: core::QueryControlFlags) {
unsafe {
(self.loader().core.vkCmdBeginQuery)(self.handle(), query_pool.handle(), query, flags);
}
}
pub fn end_query(&self, query_pool: &QueryPool, query: u32) {
unsafe {
(self.loader().core.vkCmdEndQuery)(self.handle(), query_pool.handle(), query);
}
}
pub fn reset_query_pool(&self, query_pool: &QueryPool, first_query: u32, query_count: u32) {
unsafe {
(self.loader().core.vkCmdResetQueryPool)(self.handle(), query_pool.handle(), first_query, query_count);
}
}
pub fn write_timestamp(&self, pipeline_stage: core::PipelineStageFlagBits, query_pool: &QueryPool, query: u32) {
unsafe {
(self.loader().core.vkCmdWriteTimestamp)(self.handle(), pipeline_stage, query_pool.handle(), query);
}
}
pub fn copy_query_pool_results(&self, query_pool: &QueryPool, first_query: u32, query_count: u32, dst_buffer: &Buffer, dst_offset: u64, stride: u64, flags: core::QueryResultFlags) {
unsafe {
(self.loader().core.vkCmdCopyQueryPoolResults)(self.handle(), query_pool.handle(), first_query, query_count, dst_buffer.handle(), dst_offset, stride, flags);
}
}
pub fn push_constants(&self, layout: &PipelineLayout, stage_flags: core::ShaderStageFlags, offset: u32, values: &[u8]) {
unsafe {
(self.loader().core.vkCmdPushConstants)(self.handle(), layout.handle(), stage_flags, offset, values.len() as u32, values.as_ptr() as *const c_void);
}
}
pub fn begin_render_pass(&self, render_pass_begin: &core::RenderPassBeginInfo, contents: core::SubpassContents) {
let render_pass_begin_wrapper = core::VkRenderPassBeginInfoWrapper::new(render_pass_begin, true);
unsafe {
(self.loader().core.vkCmdBeginRenderPass)(self.handle(), &render_pass_begin_wrapper.vks_struct, contents.into());
}
}
pub fn next_subpass(&self, contents: core::SubpassContents) {
unsafe {
(self.loader().core.vkCmdNextSubpass)(self.handle(), contents.into());
}
}
pub fn end_render_pass(&self) {
unsafe {
(self.loader().core.vkCmdEndRenderPass)(self.handle());
}
}
pub fn execute_commands(&self, command_buffers: &[CommandBuffer]) {
let command_buffers: Vec<_> = command_buffers.iter().map(CommandBuffer::handle).collect();
unsafe {
(self.loader().core.vkCmdExecuteCommands)(self.handle(), command_buffers.len() as u32, command_buffers.as_ptr());
}
}
}
#[derive(Debug)]
struct Inner {
handle: vks::VkCommandBuffer,
command_pool: CommandPool,
}
impl Drop for Inner {
fn drop(&mut self) {
unsafe {
(self.command_pool.loader().core.vkFreeCommandBuffers)(self.command_pool.device_handle(), self.command_pool.handle(), 1, &self.handle);
}
}
}
unsafe impl Send for Inner { }
unsafe impl Sync for Inner { }
impl PartialEq for Inner {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.handle == other.handle
}
}
impl Eq for Inner { }
impl PartialOrd for Inner {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.handle.partial_cmp(&other.handle)
}
}
impl Ord for Inner {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.handle.cmp(&other.handle)
}
}
impl Hash for Inner {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
self.handle.hash(state);
}
}