use std::cmp;
use std::mem;
use std::ops::Range;
use std::ptr;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::u32;
use smallvec::SmallVec;
use buffer::Buffer;
use buffer::BufferSlice;
use buffer::sys::UnsafeBuffer;
use buffer::traits::PipelineBarrierRequest as BufferPipelineBarrierRequest;
use command_buffer::pool::AllocatedCommandBuffer;
use command_buffer::pool::CommandPool;
use command_buffer::pool::CommandPoolFinished;
use descriptor::pipeline_layout::PipelineLayout;
use descriptor::descriptor_set::UnsafeDescriptorSet;
use descriptor::descriptor::ShaderStages;
use device::Device;
use format::ClearValue;
use format::FormatTy;
use framebuffer::RenderPass;
use framebuffer::Subpass;
use framebuffer::UnsafeRenderPass;
use framebuffer::traits::Framebuffer;
use image::Image;
use image::sys::Layout;
use image::sys::UnsafeImage;
use image::traits::PipelineBarrierRequest as ImagePipelineBarrierRequest;
use pipeline::ComputePipeline;
use pipeline::GraphicsPipeline;
use pipeline::input_assembly::IndexType;
use sync::AccessFlagBits;
use sync::PipelineStages;
use OomError;
use VulkanObject;
use VulkanPointers;
use check_errors;
use vk;
pub struct UnsafeCommandBufferBuilder<P> where P: CommandPool {
cmd: Option<vk::CommandBuffer>,
pool: Option<P>,
device: Arc<Device>,
flags: Flags,
secondary_cb: bool,
within_render_pass: bool,
}
impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
pub fn new<R, F>(pool: P, kind: Kind<R, F>, flags: Flags)
-> Result<UnsafeCommandBufferBuilder<P>, OomError>
where R: RenderPass, F: Framebuffer
{
let secondary = match kind {
Kind::Primary => false,
Kind::Secondary | Kind::SecondaryRenderPass { .. } => true,
};
let cmd = try!(pool.alloc(secondary, 1)).next().unwrap();
match unsafe { UnsafeCommandBufferBuilder::already_allocated(pool, cmd, kind, flags) } {
Ok(cmd) => Ok(cmd),
Err(err) => {
Err(err)
},
}
}
pub unsafe fn already_allocated<R, F>(pool: P, cmd: AllocatedCommandBuffer,
kind: Kind<R, F>, flags: Flags)
-> Result<UnsafeCommandBufferBuilder<P>, OomError>
where R: RenderPass, F: Framebuffer
{
let device = pool.device().clone();
let vk = device.pointers();
let cmd = cmd.internal_object();
let vk_flags = {
let a = match flags {
Flags::None => 0,
Flags::SimultaneousUse => vk::COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
Flags::OneTimeSubmit => vk::COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
};
let b = match kind {
Kind::Primary | Kind::Secondary => 0,
Kind::SecondaryRenderPass { .. } => {
vk::COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT
},
};
a | b
};
let (rp, sp) = if let Kind::SecondaryRenderPass { subpass, .. } = kind {
(subpass.render_pass().inner().internal_object(), subpass.index())
} else {
(0, 0)
};
let framebuffer = if let Kind::SecondaryRenderPass { subpass, framebuffer: Some(ref framebuffer) } = kind {
framebuffer.internal_object()
} else {
0
};
let inheritance = vk::CommandBufferInheritanceInfo {
sType: vk::STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
pNext: ptr::null(),
renderPass: rp,
subpass: sp,
framebuffer: framebuffer,
occlusionQueryEnable: 0, queryFlags: 0, pipelineStatistics: 0, };
let infos = vk::CommandBufferBeginInfo {
sType: vk::STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
pNext: ptr::null(),
flags: vk_flags,
pInheritanceInfo: &inheritance,
};
try!(check_errors(vk.BeginCommandBuffer(cmd, &infos)));
Ok(UnsafeCommandBufferBuilder {
device: device.clone(),
pool: Some(pool),
cmd: Some(cmd),
flags: flags,
secondary_cb: match kind {
Kind::Primary => false,
Kind::Secondary | Kind::SecondaryRenderPass { .. } => true,
},
within_render_pass: match kind {
Kind::Primary | Kind::Secondary => false,
Kind::SecondaryRenderPass { .. } => true,
},
})
}
pub fn build(mut self) -> Result<UnsafeCommandBuffer<P>, OomError> {
unsafe {
let vk = self.device.pointers();
let cmd = self.cmd.take().unwrap();
try!(check_errors(vk.EndCommandBuffer(cmd)));
Ok(UnsafeCommandBuffer {
cmd: cmd,
device: self.device.clone(),
pool: self.pool.take().unwrap().finish(),
flags: self.flags,
already_submitted: AtomicBool::new(false),
secondary_cb: self.secondary_cb,
})
}
}
#[inline]
pub fn pool(&self) -> &P {
self.pool.as_ref().unwrap()
}
#[inline]
pub fn device(&self) -> &Arc<Device> {
&self.device
}
#[inline]
pub fn is_secondary(&self) -> bool {
self.secondary_cb
}
pub unsafe fn clear_color_image<I>(&mut self, image: &UnsafeImage, general_layout: bool,
color: ClearValue, ranges: I)
where I: Iterator<Item = ImageSubresourcesRange>
{
assert_eq!(image.device().internal_object(), self.device.internal_object());
let clear_value = match color {
ClearValue::None => panic!(),
ClearValue::Float(val) => {
debug_assert_eq!(image.format().ty(), FormatTy::Float);
vk::ClearColorValue::float32(val)
},
ClearValue::Int(val) => {
debug_assert_eq!(image.format().ty(), FormatTy::Sint);
vk::ClearColorValue::int32(val)
},
ClearValue::Uint(val) => {
debug_assert_eq!(image.format().ty(), FormatTy::Uint);
vk::ClearColorValue::uint32(val)
},
ClearValue::Depth(_) => panic!(),
ClearValue::Stencil(_) => panic!(),
ClearValue::DepthStencil(_) => panic!(),
};
let ranges: SmallVec<[_; 4]> = ranges.filter_map(|range| {
debug_assert!(range.first_mipmap_level + range.num_mipmap_levels <=
image.mipmap_levels());
debug_assert!(range.first_array_layer + range.num_array_layers <=
image.dimensions().array_layers());
if range.num_mipmap_levels == 0 {
return None;
}
if range.num_array_layers == 0 {
return None;
}
Some(vk::ImageSubresourceRange {
aspectMask: vk::IMAGE_ASPECT_COLOR_BIT,
baseMipLevel: range.first_mipmap_level,
levelCount: range.num_mipmap_levels,
baseArrayLayer: range.first_array_layer,
layerCount: range.num_array_layers,
})
}).collect();
if ranges.is_empty() {
return;
}
let layout = if general_layout { vk::IMAGE_LAYOUT_GENERAL }
else { vk::IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL };
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdClearColorImage(cmd, image.internal_object(), layout, &clear_value,
ranges.len() as u32, ranges.as_ptr());
}
pub unsafe fn clear_depth_stencil_image<I>(&mut self, image: &UnsafeImage, general_layout: bool,
color: ClearValue, ranges: I)
where I: Iterator<Item = ImageSubresourcesRange>
{
assert_eq!(image.device().internal_object(), self.device.internal_object());
let (clear_value, aspect_mask) = match color {
ClearValue::None => panic!(),
ClearValue::Float(_) => panic!(),
ClearValue::Int(_) => panic!(),
ClearValue::Uint(_) => panic!(),
ClearValue::Depth(val) => {
debug_assert!(image.format().ty() == FormatTy::Depth ||
image.format().ty() == FormatTy::DepthStencil);
let clear = vk::ClearDepthStencilValue { depth: val, stencil: 0 };
let aspect = vk::IMAGE_ASPECT_DEPTH_BIT;
(clear, aspect)
},
ClearValue::Stencil(val) => {
debug_assert!(image.format().ty() == FormatTy::Stencil ||
image.format().ty() == FormatTy::DepthStencil);
let clear = vk::ClearDepthStencilValue { depth: 0.0, stencil: val };
let aspect = vk::IMAGE_ASPECT_STENCIL_BIT;
(clear, aspect)
},
ClearValue::DepthStencil((depth, stencil)) => {
debug_assert_eq!(image.format().ty(), FormatTy::DepthStencil);
let clear = vk::ClearDepthStencilValue { depth: depth, stencil: stencil };
let aspect = vk::IMAGE_ASPECT_DEPTH_BIT | vk::IMAGE_ASPECT_STENCIL_BIT;
(clear, aspect)
},
};
let ranges: SmallVec<[_; 4]> = ranges.filter_map(|range| {
debug_assert!(range.first_mipmap_level + range.num_mipmap_levels <=
image.mipmap_levels());
debug_assert!(range.first_array_layer + range.num_array_layers <=
image.dimensions().array_layers());
if range.num_mipmap_levels == 0 {
return None;
}
if range.num_array_layers == 0 {
return None;
}
Some(vk::ImageSubresourceRange {
aspectMask: aspect_mask,
baseMipLevel: range.first_mipmap_level,
levelCount: range.num_mipmap_levels,
baseArrayLayer: range.first_array_layer,
layerCount: range.num_array_layers,
})
}).collect();
if ranges.is_empty() {
return;
}
let layout = if general_layout { vk::IMAGE_LAYOUT_GENERAL }
else { vk::IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL };
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdClearDepthStencilImage(cmd, image.internal_object(), layout, &clear_value,
ranges.len() as u32, ranges.as_ptr());
}
pub unsafe fn clear_attachments<Ia, Ir>(&mut self, attachments: Ia, rects: Ir)
where Ia: Iterator<Item = (u32, ClearValue)>,
Ir: Iterator<Item = [(u32, u32); 3]>,
{
let rects: SmallVec<[_; 3]> = rects.filter_map(|rect| {
if rect[0].1 == 0 || rect[1].1 == 0 || rect[2].1 == 0 {
return None;
}
Some(vk::ClearRect {
rect: vk::Rect2D {
offset: vk::Offset2D {
x: rect[0].0 as i32,
y: rect[1].0 as i32,
},
extent: vk::Extent2D {
width: rect[0].1,
height: rect[1].1,
},
},
baseArrayLayer: rect[2].0,
layerCount: rect[2].1,
})
}).collect();
let attachments: SmallVec<[_; 8]> = attachments.map(|(attachment, clear_value)| {
let (clear_value, aspect_mask) = match clear_value {
ClearValue::None => panic!(),
ClearValue::Float(val) => {
let clear = vk::ClearValue::color(vk::ClearColorValue::float32(val));
let aspect = vk::IMAGE_ASPECT_COLOR_BIT;
(clear, aspect)
},
ClearValue::Int(val) => {
let clear = vk::ClearValue::color(vk::ClearColorValue::int32(val));
let aspect = vk::IMAGE_ASPECT_COLOR_BIT;
(clear, aspect)
},
ClearValue::Uint(val) => {
let clear = vk::ClearValue::color(vk::ClearColorValue::uint32(val));
let aspect = vk::IMAGE_ASPECT_COLOR_BIT;
(clear, aspect)
},
ClearValue::Depth(val) => {
let clear = vk::ClearValue::depth_stencil(vk::ClearDepthStencilValue {
depth: val, stencil: 0
});
let aspect = vk::IMAGE_ASPECT_DEPTH_BIT;
(clear, aspect)
},
ClearValue::Stencil(val) => {
let clear = vk::ClearValue::depth_stencil(vk::ClearDepthStencilValue {
depth: 0.0, stencil: val
});
let aspect = vk::IMAGE_ASPECT_STENCIL_BIT;
(clear, aspect)
},
ClearValue::DepthStencil((depth, stencil)) => {
let clear = vk::ClearValue::depth_stencil(vk::ClearDepthStencilValue {
depth: depth, stencil: stencil,
});
let aspect = vk::IMAGE_ASPECT_DEPTH_BIT | vk::IMAGE_ASPECT_STENCIL_BIT;
(clear, aspect)
},
};
vk::ClearAttachment {
aspectMask: aspect_mask,
colorAttachment: attachment,
clearValue: clear_value,
}
}).collect();
if rects.is_empty() || attachments.is_empty() {
return;
}
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdClearAttachments(cmd, attachments.len() as u32, attachments.as_ptr(),
rects.len() as u32, rects.as_ptr());
}
pub unsafe fn fill_buffer(&mut self, buffer: &UnsafeBuffer, offset: usize, size: usize,
data: u32)
{
assert_eq!(buffer.device().internal_object(), self.device.internal_object());
debug_assert_eq!(offset % 4, 0);
debug_assert!(offset + size <= buffer.size());
let size = if offset + size == buffer.size() {
vk::WHOLE_SIZE
} else {
debug_assert_eq!(size % 4, 0);
size as vk::DeviceSize
};
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdFillBuffer(cmd, buffer.internal_object(), offset as vk::DeviceSize,
size as vk::DeviceSize, data);
}
pub unsafe fn update_buffer<D: ?Sized>(&mut self, buffer: &UnsafeBuffer, offset: usize,
size: usize, data: &D)
where D: Copy + 'static
{
assert_eq!(buffer.device().internal_object(), self.device.internal_object());
let size = cmp::min(size, mem::size_of_val(data));
debug_assert_eq!(offset % 4, 0);
debug_assert_eq!(size % 4, 0);
debug_assert!(size <= 65536);
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdUpdateBuffer(cmd, buffer.internal_object(), offset as vk::DeviceSize,
size as vk::DeviceSize, data as *const D as *const _);
}
pub unsafe fn copy_buffer<I>(&mut self, src: &UnsafeBuffer, dest: &UnsafeBuffer, regions: I)
where I: IntoIterator<Item = BufferCopyRegion>
{
assert_eq!(src.device().internal_object(), self.device.internal_object());
assert_eq!(src.device().internal_object(), dest.device().internal_object());
let regions: SmallVec<[_; 4]> = {
let mut res = SmallVec::new();
for region in regions.into_iter() {
if region.size == 0 { continue; }
debug_assert!(region.source_offset < src.size());
debug_assert!(region.source_offset + region.size <= src.size());
debug_assert!(region.destination_offset < dest.size());
debug_assert!(region.destination_offset + region.size <= dest.size());
res.push(vk::BufferCopy {
srcOffset: region.source_offset as vk::DeviceSize,
dstOffset: region.destination_offset as vk::DeviceSize,
size: region.size as vk::DeviceSize,
});
}
res
};
if regions.is_empty() {
return;
}
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdCopyBuffer(cmd, src.internal_object(), dest.internal_object(), regions.len() as u32,
regions.as_ptr());
}
pub unsafe fn execute_commands<'sec, I, SecP: 'sec>(&mut self, command_buffers: I)
where I: IntoIterator<Item = &'sec UnsafeCommandBuffer<SecP>>,
SecP: CommandPool
{
let raw_cbs: SmallVec<[_; 16]> = command_buffers.into_iter().map(|cb| {
debug_assert!(cb.secondary_cb);
cb.cmd
}).collect();
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdExecuteCommands(cmd, raw_cbs.len() as u32, raw_cbs.as_ptr());
}
#[inline]
pub fn pipeline_barrier(&mut self, barrier: PipelineBarrierBuilder) {
if barrier.src_stage_mask == 0 || barrier.dst_stage_mask == 0 {
debug_assert!(barrier.src_stage_mask == 0 && barrier.dst_stage_mask == 0);
debug_assert!(barrier.memory_barriers.is_empty());
debug_assert!(barrier.buffer_barriers.is_empty());
debug_assert!(barrier.image_barriers.is_empty());
return;
}
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
unsafe {
vk.CmdPipelineBarrier(cmd, barrier.src_stage_mask, barrier.dst_stage_mask,
barrier.dependency_flags, barrier.memory_barriers.len() as u32,
barrier.memory_barriers.as_ptr(),
barrier.buffer_barriers.len() as u32,
barrier.buffer_barriers.as_ptr(),
barrier.image_barriers.len() as u32,
barrier.image_barriers.as_ptr());
}
}
pub unsafe fn begin_render_pass<I, F>(&mut self, render_pass: &UnsafeRenderPass,
framebuffer: &F, clear_values: I,
rect: [Range<u32>; 2], secondary: bool)
where I: Iterator<Item = ClearValue>,
F: Framebuffer
{
let clear_values: SmallVec<[_; 12]> = clear_values.map(|clear_value| {
match clear_value {
ClearValue::None => {
vk::ClearValue::color(vk::ClearColorValue::float32([0.0; 4]))
},
ClearValue::Float(val) => {
vk::ClearValue::color(vk::ClearColorValue::float32(val))
},
ClearValue::Int(val) => {
vk::ClearValue::color(vk::ClearColorValue::int32(val))
},
ClearValue::Uint(val) => {
vk::ClearValue::color(vk::ClearColorValue::uint32(val))
},
ClearValue::Depth(val) => {
vk::ClearValue::depth_stencil(vk::ClearDepthStencilValue {
depth: val, stencil: 0
})
},
ClearValue::Stencil(val) => {
vk::ClearValue::depth_stencil(vk::ClearDepthStencilValue {
depth: 0.0, stencil: val
})
},
ClearValue::DepthStencil((depth, stencil)) => {
vk::ClearValue::depth_stencil(vk::ClearDepthStencilValue {
depth: depth, stencil: stencil,
})
},
}
}).collect();
assert!(rect[0].start <= rect[0].end);
assert!(rect[1].start <= rect[1].end);
let infos = vk::RenderPassBeginInfo {
sType: vk::STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
pNext: ptr::null(),
renderPass: render_pass.internal_object(),
framebuffer: framebuffer.internal_object(),
renderArea: vk::Rect2D {
offset: vk::Offset2D {
x: rect[0].start as i32,
y: rect[1].start as i32,
},
extent: vk::Extent2D {
width: rect[0].end - rect[0].start,
height: rect[1].end - rect[1].start,
},
},
clearValueCount: clear_values.len() as u32,
pClearValues: clear_values.as_ptr(),
};
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdBeginRenderPass(cmd, &infos,
if secondary { vk::SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS }
else { vk::SUBPASS_CONTENTS_INLINE });
}
#[inline]
pub unsafe fn next_subpass(&mut self, secondary: bool) {
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdNextSubpass(cmd, if secondary { vk::SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS }
else { vk::SUBPASS_CONTENTS_INLINE });
}
#[inline]
pub unsafe fn end_render_pass(&mut self) {
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdEndRenderPass(cmd);
}
#[inline]
pub unsafe fn bind_pipeline_graphics<V, L, R>(&mut self, pipeline: &GraphicsPipeline<V, L, R>) {
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdBindPipeline(cmd, vk::PIPELINE_BIND_POINT_GRAPHICS, pipeline.internal_object());
}
#[inline]
pub unsafe fn bind_pipeline_compute<L>(&mut self, pipeline: &ComputePipeline<L>) {
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdBindPipeline(cmd, vk::PIPELINE_BIND_POINT_COMPUTE, pipeline.internal_object());
}
#[inline]
pub unsafe fn draw(&mut self, vertex_count: u32, instance_count: u32, first_vertex: u32,
first_instance: u32)
{
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdDraw(cmd, vertex_count, instance_count, first_vertex, first_instance);
}
#[inline]
pub unsafe fn draw_indexed(&mut self, vertex_count: u32, instance_count: u32,
first_index: u32, vertex_offset: i32, first_instance: u32)
{
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdDrawIndexed(cmd, vertex_count, instance_count, first_index, vertex_offset,
first_instance);
}
#[inline]
pub unsafe fn draw_indirect(&mut self, buffer: &UnsafeBuffer, offset: usize, draw_count: u32,
stride: u32)
{
assert_eq!(buffer.device().internal_object(), self.device.internal_object());
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdDrawIndirect(cmd, buffer.internal_object(), offset as vk::DeviceSize, draw_count,
stride);
}
#[inline]
pub unsafe fn draw_indexed_indirect(&mut self, buffer: &UnsafeBuffer, offset: usize,
draw_count: u32, stride: u32)
{
assert_eq!(buffer.device().internal_object(), self.device.internal_object());
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdDrawIndexedIndirect(cmd, buffer.internal_object(), offset as vk::DeviceSize,
draw_count, stride);
}
#[inline]
pub unsafe fn dispatch(&mut self, x: u32, y: u32, z: u32) {
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdDispatch(cmd, x, y, z);
}
#[inline]
pub unsafe fn dispatch_indirect(&mut self, buffer: &UnsafeBuffer, offset: usize) {
assert_eq!(buffer.device().internal_object(), self.device.internal_object());
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdDispatchIndirect(cmd, buffer.internal_object(), offset as vk::DeviceSize);
}
#[inline]
pub unsafe fn bind_vertex_buffers<'a, I>(&mut self, first_binding: u32, buffers: I)
where I: IntoIterator<Item = (&'a UnsafeBuffer, usize)>
{
let mut raw_buffers: SmallVec<[_; 8]> = SmallVec::new();
let mut raw_offsets: SmallVec<[_; 8]> = SmallVec::new();
for (buf, off) in buffers {
assert_eq!(buf.device().internal_object(), self.device.internal_object());
raw_buffers.push(buf.internal_object());
raw_offsets.push(off as vk::DeviceSize);
}
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdBindVertexBuffers(cmd, first_binding, raw_buffers.len() as u32, raw_buffers.as_ptr(),
raw_offsets.as_ptr());
}
#[inline]
pub unsafe fn bind_index_buffer(&mut self, buffer: &UnsafeBuffer, offset: usize,
index_ty: IndexType)
{
assert_eq!(buffer.device().internal_object(), self.device.internal_object());
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdBindIndexBuffer(cmd, buffer.internal_object(), offset as vk::DeviceSize,
index_ty as u32);
}
#[inline]
pub unsafe fn bind_descriptor_sets<'a, L, Ides, Idyn>(&mut self, graphics_bind_point: bool,
layout: &L, first_set: u32,
descriptor_sets: Ides,
dynamic_offsets: Idyn)
where L: PipelineLayout,
Ides: IntoIterator<Item = &'a UnsafeDescriptorSet>,
Idyn: IntoIterator<Item = u32>
{
let bind_point = if graphics_bind_point { vk::PIPELINE_BIND_POINT_GRAPHICS }
else { vk::PIPELINE_BIND_POINT_COMPUTE };
assert_eq!(layout.inner().device().internal_object(), self.device.internal_object());
let descriptor_sets: SmallVec<[_; 16]> = descriptor_sets.into_iter().map(|set| {
assert_eq!(set.layout().device().internal_object(), self.device.internal_object());
set.internal_object()
}).collect();
let dynamic_offsets: SmallVec<[_; 64]> = dynamic_offsets.into_iter().collect();
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdBindDescriptorSets(cmd, bind_point, layout.inner().internal_object(), first_set,
descriptor_sets.len() as u32, descriptor_sets.as_ptr(),
dynamic_offsets.len() as u32, dynamic_offsets.as_ptr());
}
#[inline]
pub unsafe fn push_constants<L, D: ?Sized>(&mut self, layout: &L,
stages: ShaderStages, offset: usize, data: &D)
where L: PipelineLayout
{
assert_eq!(layout.inner().device().internal_object(), self.device.internal_object());
debug_assert!(offset <= u32::MAX as usize);
debug_assert!(mem::size_of_val(data) <= u32::MAX as usize);
let vk = self.device.pointers();
let cmd = self.cmd.clone().take().unwrap();
vk.CmdPushConstants(cmd, layout.inner().internal_object(), stages.into(), offset as u32,
mem::size_of_val(data) as u32, data as *const D as *const _);
}
}
unsafe impl<P> VulkanObject for UnsafeCommandBufferBuilder<P> where P: CommandPool {
type Object = vk::CommandBuffer;
#[inline]
fn internal_object(&self) -> vk::CommandBuffer {
self.cmd.unwrap()
}
}
impl<P> Drop for UnsafeCommandBufferBuilder<P> where P: CommandPool {
#[inline]
fn drop(&mut self) {
if let Some(cmd) = self.cmd {
unsafe {
let vk = self.device.pointers();
vk.EndCommandBuffer(cmd);
self.pool.as_ref().unwrap().free(self.secondary_cb, Some(cmd.into()).into_iter());
}
}
}
}
#[derive(Clone)] pub enum Kind<'a, R: 'a, F: 'a> {
Primary,
Secondary,
SecondaryRenderPass {
subpass: Subpass<'a, R>,
framebuffer: Option<&'a F>,
},
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Flags {
None,
SimultaneousUse,
OneTimeSubmit,
}
pub struct ImageSubresourcesRange {
pub first_mipmap_level: u32,
pub num_mipmap_levels: u32,
pub first_array_layer: u32,
pub num_array_layers: u32,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct BufferCopyRegion {
pub source_offset: usize,
pub destination_offset: usize,
pub size: usize,
}
pub struct PipelineBarrierBuilder {
src_stage_mask: vk::PipelineStageFlags,
dst_stage_mask: vk::PipelineStageFlags,
dependency_flags: vk::DependencyFlags,
memory_barriers: SmallVec<[vk::MemoryBarrier; 2]>,
buffer_barriers: SmallVec<[vk::BufferMemoryBarrier; 8]>,
image_barriers: SmallVec<[vk::ImageMemoryBarrier; 8]>,
}
impl PipelineBarrierBuilder {
#[inline]
pub fn new() -> PipelineBarrierBuilder {
PipelineBarrierBuilder {
src_stage_mask: 0,
dst_stage_mask: 0,
dependency_flags: vk::DEPENDENCY_BY_REGION_BIT,
memory_barriers: SmallVec::new(),
buffer_barriers: SmallVec::new(),
image_barriers: SmallVec::new(),
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.src_stage_mask == 0 || self.dst_stage_mask == 0
}
#[inline]
pub fn merge(&mut self, other: PipelineBarrierBuilder) {
self.src_stage_mask |= other.src_stage_mask;
self.dst_stage_mask |= other.dst_stage_mask;
self.dependency_flags &= other.dependency_flags;
self.memory_barriers.extend(other.memory_barriers.into_iter());
self.buffer_barriers.extend(other.buffer_barriers.into_iter());
self.image_barriers.extend(other.image_barriers.into_iter());
}
#[inline]
pub unsafe fn add_execution_dependency(&mut self, source: PipelineStages, dest: PipelineStages,
by_region: bool)
{
if !by_region {
self.dependency_flags = 0;
}
self.src_stage_mask |= source.into();
self.dst_stage_mask |= dest.into();
}
pub unsafe fn add_memory_barrier(&mut self, source_stage: PipelineStages,
source_access: AccessFlagBits, dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool)
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
self.memory_barriers.push(vk::MemoryBarrier {
sType: vk::STRUCTURE_TYPE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
});
}
pub unsafe fn add_buffer_barrier_request(&mut self, buffer: &UnsafeBuffer,
request: BufferPipelineBarrierRequest)
{
if !request.by_region {
self.dependency_flags = 0;
}
self.src_stage_mask |= request.source_stage.into();
self.dst_stage_mask |= request.destination_stages.into();
if let Some(memory_barrier) = request.memory_barrier {
let (src_queue, dest_queue) =
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
;
debug_assert!(memory_barrier.offset + memory_barrier.size <= buffer.size());
self.buffer_barriers.push(vk::BufferMemoryBarrier {
sType: vk::STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: memory_barrier.source_access.into(),
dstAccessMask: memory_barrier.destination_access.into(),
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
buffer: buffer.internal_object(),
offset: memory_barrier.offset as vk::DeviceSize,
size: memory_barrier.size as vk::DeviceSize,
});
}
}
pub unsafe fn add_image_barrier_request(&mut self, image: &UnsafeImage,
request: ImagePipelineBarrierRequest)
{
if !request.by_region {
self.dependency_flags = 0;
}
self.src_stage_mask |= request.source_stage.into();
self.dst_stage_mask |= request.destination_stages.into();
if let Some(memory_barrier) = request.memory_barrier {
let (src_queue, dest_queue) =
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
;
debug_assert!(memory_barrier.first_mipmap +
memory_barrier.num_mipmaps <= image.mipmap_levels());
debug_assert!(memory_barrier.first_layer +
memory_barrier.num_layers <= image.dimensions().array_layers());
self.image_barriers.push(vk::ImageMemoryBarrier {
sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: memory_barrier.source_access.into(),
dstAccessMask: memory_barrier.destination_access.into(),
oldLayout: memory_barrier.old_layout as u32,
newLayout: memory_barrier.new_layout as u32,
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
image: image.internal_object(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: 1 | 2 | 4 | 8, baseMipLevel: memory_barrier.first_mipmap,
levelCount: memory_barrier.num_mipmaps,
baseArrayLayer: memory_barrier.first_layer,
layerCount: memory_barrier.num_layers,
},
});
}
}
pub unsafe fn add_buffer_memory_barrier<'a, T: ?Sized, B>
(&mut self, buffer: BufferSlice<'a, T, B>, source_stage: PipelineStages,
source_access: AccessFlagBits, dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>)
where B: Buffer
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
debug_assert!(buffer.size() + buffer.offset() <= buffer.buffer().size());
let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
(src_queue, dest_queue)
} else {
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
};
self.buffer_barriers.push(vk::BufferMemoryBarrier {
sType: vk::STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
buffer: buffer.buffer().inner().internal_object(),
offset: buffer.offset() as vk::DeviceSize,
size: buffer.size() as vk::DeviceSize,
});
}
pub unsafe fn add_image_memory_barrier<I>(&mut self, image: &Arc<I>, mipmaps: Range<u32>,
layers: Range<u32>, source_stage: PipelineStages, source_access: AccessFlagBits,
dest_stage: PipelineStages, dest_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>, current_layout: Layout, new_layout: Layout)
where I: Image
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
debug_assert!(mipmaps.start < mipmaps.end);
debug_assert!(layers.start < layers.end);
debug_assert!(layers.end <= image.dimensions().array_layers());
let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
(src_queue, dest_queue)
} else {
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
};
self.image_barriers.push(vk::ImageMemoryBarrier {
sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
oldLayout: current_layout as u32,
newLayout: new_layout as u32,
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
image: image.inner().internal_object(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: 1 | 2 | 4 | 8, baseMipLevel: mipmaps.start,
levelCount: mipmaps.end - mipmaps.start,
baseArrayLayer: layers.start,
layerCount: layers.end - layers.start,
},
});
}
}
pub struct UnsafeCommandBuffer<P> where P: CommandPool {
cmd: vk::CommandBuffer,
device: Arc<Device>,
pool: P::Finished,
flags: Flags,
already_submitted: AtomicBool,
secondary_cb: bool,
}
impl<P> UnsafeCommandBuffer<P> where P: CommandPool {
#[inline]
pub fn device(&self) -> &Arc<Device> {
&self.device
}
}
unsafe impl<P> Sync for UnsafeCommandBuffer<P> where P: CommandPool {}
unsafe impl<P> VulkanObject for UnsafeCommandBuffer<P> where P: CommandPool {
type Object = vk::CommandBuffer;
#[inline]
fn internal_object(&self) -> vk::CommandBuffer {
self.cmd
}
}
impl<P> Drop for UnsafeCommandBuffer<P> where P: CommandPool {
#[inline]
fn drop(&mut self) {
unsafe {
self.pool.free(self.secondary_cb, Some(self.cmd.into()).into_iter());
}
}
}