bedrock/
command.rs

1//! Vulkan Commands
2
3use vk::*;
4use {VkHandle, Device, DeviceChild};
5#[cfg(feature = "Implements")]
6use VkResultHandler;
7#[cfg(feature = "Implements")]
8use std::mem::{size_of, transmute};
9use std::ops::Range;
10use std::borrow::Borrow;
11use {Image, Buffer, ImageLayout};
12#[cfg(feature = "Implements")]
13use {Framebuffer, RenderPass, Pipeline, PipelineLayout, PipelineStageFlags, ShaderStage};
14#[cfg(feature = "Implements")]
15use {StencilFaceMask, FilterMode, Event};
16#[cfg(feature = "Implements")]
17use {QueryPipelineStatisticFlags, QueryPool, QueryResultFlags};
18
19/// Opaque handle to a command pool object
20#[derive(Clone)] pub struct CommandPool(VkCommandPool, ::Device);
21/// Opaque handle to a command buffer object
22#[repr(C)] #[derive(Clone, Copy)] pub struct CommandBuffer(VkCommandBuffer);
23
24#[cfg(feature = "Implements")] DeviceChildCommonDrop!{ for CommandPool[vkDestroyCommandPool] }
25impl VkHandle for CommandPool   { type Handle = VkCommandPool;   fn native_ptr(&self) -> VkCommandPool   { self.0 } }
26impl VkHandle for CommandBuffer { type Handle = VkCommandBuffer; fn native_ptr(&self) -> VkCommandBuffer { self.0 } }
27impl DeviceChild for CommandPool { fn device(&self) -> &Device { &self.1 } }
28
29/// The recording state of commandbuffers
30#[cfg(feature = "Implements")]
31pub struct CmdRecord<'d> { ptr: &'d CommandBuffer, layout: [Option<VkPipelineLayout>; 2] }
32
33/// Implicitly closing the recording state. This may cause a panic when there are errors in commands
34#[cfg(feature = "Implements")]
35impl<'d> Drop for CmdRecord<'d>
36{
37	fn drop(&mut self)
38	{
39		unsafe
40		{
41			vkEndCommandBuffer(self.ptr.native_ptr()).into_result().expect("Error closing command recording state");
42		}
43	}
44}
45
46/// Following methods are enabled with [feature = "Implements"]
47#[cfg(feature = "Implements")]
48impl CommandPool
49{
50	/// Create a new command pool object
51	/// # Failures
52	/// On failure, this command returns
53	///
54	/// * `VK_ERROR_OUT_OF_HOST_MEMORY`
55	/// * `VK_ERROR_OUT_OF_DEVICE_MEMORY`
56	pub fn new(device: &Device, queue_family: u32, transient: bool, indiv_resettable: bool) -> ::Result<Self>
57	{
58		let cinfo = VkCommandPoolCreateInfo
59		{
60			queueFamilyIndex: queue_family, flags: if transient { VK_COMMAND_POOL_CREATE_TRANSIENT_BIT } else { 0 }
61				| if indiv_resettable { VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT } else { 0 },
62			.. Default::default()
63		};
64		let mut h = VK_NULL_HANDLE as _;
65		unsafe
66		{
67			vkCreateCommandPool(device.native_ptr(), &cinfo, ::std::ptr::null(), &mut h)
68				.into_result().map(|_| CommandPool(h, device.clone()))
69		}
70	}
71	/// Allocate command buffers from an existing command pool
72	/// # Failures
73	/// On failure, this command returns
74	///
75	/// * `VK_ERROR_OUT_OF_HOST_MEMORY`
76	/// * `VK_ERROR_OUT_OF_DEVICE_MEMORY`
77	pub fn alloc(&self, count: u32, primary: bool) -> ::Result<Vec<CommandBuffer>>
78	{
79		let ainfo = VkCommandBufferAllocateInfo
80		{
81			commandBufferCount: count, level: if primary { VK_COMMAND_BUFFER_LEVEL_PRIMARY } else { VK_COMMAND_BUFFER_LEVEL_SECONDARY },
82			commandPool: self.0, .. Default::default()
83		};
84		let mut hs = vec![VK_NULL_HANDLE as _; count as _];
85		unsafe
86		{
87			vkAllocateCommandBuffers(self.1.native_ptr(), &ainfo, hs.as_mut_ptr()).into_result().map(|_| transmute(hs))
88		}
89	}
90    /// Resets a command pool
91    /// # Safety
92    /// Application cannot use command buffers after this call
93    /// # Failures
94    /// On failure, this command returns
95	///
96    /// * `VK_ERROR_OUT_OF_HOST_MEMORY`
97    /// * `VK_ERROR_OUT_OF_DEVICE_MEMORY`
98	pub fn reset(&self, release_resources: bool) -> ::Result<()>
99	{
100		let flags = if release_resources { VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT } else { 0 };
101		unsafe { vkResetCommandPool(self.1.native_ptr(), self.0, flags).into_result() }
102	}
103	/// Free command buffers
104	pub fn free(&self, buffers: &[CommandBuffer])
105	{
106		unsafe { vkFreeCommandBuffers(self.1.native_ptr(), self.0, buffers.len() as _, buffers.as_ptr() as *const _) };
107	}
108}
109
110/// Following methods are enabled with [feature = "Implements"]
111#[cfg(feature = "Implements")]
112impl CommandBuffer
113{
114	/// Start recording a primary command buffer
115	/// # Failures
116	/// On failure, this command returns
117	///
118	/// * `VK_ERROR_OUT_OF_HOST_MEMORY`
119	/// * `VK_ERROR_OUT_OF_DEVICE_MEMORY`
120	pub fn begin(&self) -> ::Result<CmdRecord>
121	{
122		unsafe
123		{
124			vkBeginCommandBuffer(self.0, &Default::default()).into_result()
125				.map(|_| CmdRecord { ptr: self, layout: [None, None] })
126		}
127	}
128	/// Start recording a primary command buffer that will be submitted once
129	/// # Failures
130	/// On failure, this command returns
131	/// 
132	/// * `VK_ERROR_OUT_OF_HOST_MEMORY`
133	/// * `VK_ERROR_OUT_OF_DEVICE_MEMORY`
134	pub fn begin_once(&self) -> ::Result<CmdRecord>
135	{
136		let info = VkCommandBufferBeginInfo { flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, .. Default::default() };
137		unsafe
138		{
139			vkBeginCommandBuffer(self.0, &info).into_result().map(|_| CmdRecord { ptr: self, layout: [None, None] })
140		}
141	}
142	/// Start recording a secondary command buffer
143	/// # Failures
144	/// On failure, this command returns
145	///
146	/// * `VK_ERROR_OUT_OF_HOST_MEMORY`
147	/// * `VK_ERROR_OUT_OF_DEVICE_MEMORY`
148	pub fn begin_inherit(&self, renderpass: Option<(&Framebuffer, &RenderPass, u32)>,
149		query: Option<(OcclusionQuery, QueryPipelineStatisticFlags)>) -> ::Result<CmdRecord>
150	{
151		let flags = if renderpass.is_some() { VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT } else { 0 };
152		let (fb, rp, s) = renderpass.map(|(f, r, s)| (f.native_ptr(), r.native_ptr(), s))
153			.unwrap_or((VK_NULL_HANDLE as _, VK_NULL_HANDLE as _, 0));
154		let (oq, psq) = query.map(|(o, p)| (o, p.0)).unwrap_or((OcclusionQuery::Disable, 0));
155		let inherit = VkCommandBufferInheritanceInfo
156		{
157			framebuffer: fb, renderPass: rp, subpass: s, occlusionQueryEnable: (oq != OcclusionQuery::Disable) as _,
158			queryFlags: if oq == OcclusionQuery::Precise { VK_QUERY_CONTROL_PRECISE_BIT } else { 0 },
159			pipelineStatistics: psq, .. Default::default()
160		};
161		let binfo = VkCommandBufferBeginInfo { pInheritanceInfo: &inherit, flags, .. Default::default() };
162		unsafe
163		{
164			vkBeginCommandBuffer(self.0, &binfo).into_result().map(|_| CmdRecord { ptr: self, layout: [None, None] })
165		}
166	}
167}
168
169/// [feature = "Implements"] Graphics/Compute Commands: Pipeline Setup
170#[cfg(feature = "Implements")]
171impl<'d> CmdRecord<'d>
172{
173	/// Bind a pipeline object to a command buffer
174	pub fn bind_graphics_pipeline(&mut self, pipeline: &Pipeline) -> &mut Self
175	{
176		unsafe { vkCmdBindPipeline(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.native_ptr()) };
177		return self;
178	}
179	/// Bind a pipeline object to a command buffer
180	pub fn bind_compute_pipeline(&mut self, pipeline: &Pipeline) -> &mut Self
181	{
182		unsafe { vkCmdBindPipeline(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.native_ptr()) };
183		return self;
184	}
185	/// Bind a pipeline layout object to a command buffer
186	pub fn bind_graphics_pipeline_layout(&mut self, layout: &PipelineLayout) -> &mut Self
187	{
188		self.layout[VK_PIPELINE_BIND_POINT_GRAPHICS as usize] = Some(layout.native_ptr());
189		return self;
190	}
191	/// Bind a pipeline layout object to a command buffer
192	pub fn bind_compute_pipeline_layout(&mut self, layout: &PipelineLayout) -> &mut Self
193	{
194		self.layout[VK_PIPELINE_BIND_POINT_COMPUTE as usize] = Some(layout.native_ptr());
195		return self;
196	}
197	/// Bind a pipeline object and a pipeline layout object to a command buffer
198	pub fn bind_graphics_pipeline_pair(&mut self, pipeline: &Pipeline, layout: &PipelineLayout) -> &mut Self
199	{
200		self.bind_graphics_pipeline_layout(layout).bind_graphics_pipeline(pipeline)
201	}
202	/// Bind a pipeline object and a pipeline layout object to a command buffer
203	pub fn bind_compute_pipeline_pair(&mut self, pipeline: &Pipeline, layout: &PipelineLayout) -> &mut Self
204	{
205		self.bind_compute_pipeline_layout(layout).bind_compute_pipeline(pipeline)
206	}
207	fn current_pipeline_layout_g(&self) -> VkPipelineLayout
208	{
209		self.layout[VK_PIPELINE_BIND_POINT_GRAPHICS as usize].expect("Pipeline is not bound for Graphics")
210	}
211	fn current_pipeline_layout_c(&self) -> VkPipelineLayout
212	{
213		self.layout[VK_PIPELINE_BIND_POINT_COMPUTE as usize].expect("Pipeline is not bound for Compute")
214	}
215	/// Binds descriptor sets to a command buffer
216	pub fn bind_graphics_descriptor_sets(&mut self, first: u32,
217		descriptor_sets: &[VkDescriptorSet], dynamic_offsets: &[u32]) -> &mut Self
218	{
219		unsafe
220		{
221			vkCmdBindDescriptorSets(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_GRAPHICS,
222				self.current_pipeline_layout_g(),
223				first, descriptor_sets.len() as _, descriptor_sets.as_ptr(),
224				dynamic_offsets.len() as _, dynamic_offsets.as_ptr())
225		};
226		return self;
227	}
228	/// Binds descriptor sets to a command buffer
229	pub fn bind_compute_descriptor_sets(&mut self, first: u32,
230		descriptor_sets:&[VkDescriptorSet], dynamic_offsets: &[u32]) -> &mut Self
231	{
232		unsafe
233		{ 
234			vkCmdBindDescriptorSets(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_COMPUTE,
235				self.current_pipeline_layout_c(),
236				first, descriptor_sets.len() as _, descriptor_sets.as_ptr(),
237				dynamic_offsets.len() as _, dynamic_offsets.as_ptr())
238		};
239		return self;
240	}
241	/// Update the value of push constant
242	pub fn push_graphics_constant<T>(&mut self, stage: ShaderStage, offset: u32, value: &T) -> &mut Self
243	{
244		unsafe
245		{
246			vkCmdPushConstants(self.ptr.native_ptr(), self.current_pipeline_layout_g(),
247				stage.0, offset, size_of::<T>() as _, value as *const T as *const _);
248		}
249		return self;
250	}
251	/// Update the value of push constant
252	pub fn push_compute_constant<T>(&mut self, stage: ShaderStage, offset: u32, value: &T) -> &mut Self
253	{
254		unsafe
255		{
256			vkCmdPushConstants(self.ptr.native_ptr(), self.current_pipeline_layout_c(),
257				stage.0, offset, size_of::<T>() as _, value as *const T as *const _);
258		}
259		return self;
260	}
261
262	/// Push descriptor updates into a command buffer
263	#[cfg(feature = "VK_KHR_push_descriptor")]
264	pub fn push_graphics_descriptor_set(&mut self, set: u32, writes: &[DescriptorSetWriteInfo]) -> &mut Self
265	{
266		// save flatten results
267		let wt = writes.iter().map(|x|
268		{
269			let (ty, cnt, iv, bv, bvv) = x.3.decomposite();
270			let ivs = iv.iter().map(|&(s, v, l)| VkDescriptorImageInfo
271			{
272				sampler: s.unwrap_or(VK_NULL_HANDLE as _), imageView: v, imageLayout: l as _
273			}).collect::<Vec<_>>();
274			let bvs = bv.iter()
275				.map(|&(b, ref r)| VkDescriptorBufferInfo { buffer: b, offset: r.start as _, range: r.len() as _ })
276				.collect::<Vec<_>>();
277			(x.0, x.1, x.2, ty, cnt, ivs, bvs, bvv)
278		}).collect::<Vec<_>>();
279		let w = wt.iter().map(|&(set, binding, array, dty, count, ref iv, ref bv, ref bvv)| VkWriteDescriptorSet
280		{
281			dstSet: set, dstBinding: binding, dstArrayElement: array, descriptorType: dty as _, descriptorCount: count,
282			pImageInfo: iv.as_ptr(), pBufferInfo: bv.as_ptr(), pTexelBufferView: bvv.as_ptr(), .. Default::default()
283		}).collect::<Vec<_>>();
284		unsafe
285		{
286			vkCmdPushDescriptorSetKHR(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_GRAPHICS,
287				self.current_pipeline_layout_g(), set, w.len() as _, w.as_ptr())
288		};
289		return self;
290	}
291	/// Push descriptor updates into a command buffer
292	#[cfg(feature = "VK_KHR_push_descriptor")]
293	pub fn push_compute_descriptor_set(&mut self, set: u32, writes: &[DescriptorSetWriteInfo]) -> &mut Self
294	{
295		// save flatten results
296		let wt = writes.iter().map(|x|
297		{
298			let (ty, cnt, iv, bv, bvv) = x.3.decomposite();
299			let ivs = iv.iter().map(|&(s, v, l)| VkDescriptorImageInfo
300			{
301				sampler: s.unwrap_or(VK_NULL_HANDLE as _), imageView: v, imageLayout: l as _
302			}).collect::<Vec<_>>();
303			let bvs = bv.iter()
304				.map(|&(b, ref r)| VkDescriptorBufferInfo { buffer: b, offset: r.start as _, range: r.len() as _ })
305				.collect::<Vec<_>>();
306			(x.0, x.1, x.2, ty, cnt, ivs, bvs, bvv)
307		}).collect::<Vec<_>>();
308		let w = wt.iter().map(|&(set, binding, array, dty, count, ref iv, ref bv, ref bvv)| VkWriteDescriptorSet
309		{
310			dstSet: set, dstBinding: binding, dstArrayElement: array, descriptorType: dty as _, descriptorCount: count,
311			pImageInfo: iv.as_ptr(), pBufferInfo: bv.as_ptr(), pTexelBufferView: bvv.as_ptr(), .. Default::default()
312		}).collect::<Vec<_>>();
313		unsafe
314		{
315			vkCmdPushDescriptorSetKHR(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_COMPUTE,
316				self.current_pipeline_layout_c(), set, w.len() as _, w.as_ptr())
317		};
318		return self;
319	}
320}
321
322/// [feature = "Implements"] Graphics Commands: Updating dynamic states
323#[cfg(feature = "Implements")]
324impl<'d> CmdRecord<'d>
325{
326	/// Set the viewport on a command buffer
327	pub fn set_viewport(&mut self, first: u32, viewports: &[VkViewport]) -> &mut Self
328	{
329		unsafe { vkCmdSetViewport(self.ptr.native_ptr(), first, viewports.len() as _, viewports.as_ptr()) };
330		return self;
331	}
332	/// Set the dynamic scissor rectangles on a command buffer
333	pub fn set_scissor(&mut self, first: u32, scissors: &[VkRect2D]) -> &mut Self
334	{
335		unsafe { vkCmdSetScissor(self.ptr.native_ptr(), first, scissors.len() as _, scissors.as_ptr()) };
336		return self;
337	}
338	/// Set the dynamic line width state
339	pub fn set_line_width(&mut self, w: f32) -> &Self
340	{
341		unsafe { vkCmdSetLineWidth(self.ptr.native_ptr(), w) };
342		return self;
343	}
344	/// Set the depth bias dynamic state
345	pub fn set_depth_bias(&mut self, constant_factor: f32, clamp: f32, slope_factor: f32) -> &mut Self
346	{
347		unsafe { vkCmdSetDepthBias(self.ptr.native_ptr(), constant_factor, clamp, slope_factor) };
348		return self;
349	}
350	/// Set the values of blend constants
351	pub fn set_blend_constants(&mut self, blend_constants: [f32; 4]) -> &mut Self
352	{
353		unsafe { vkCmdSetBlendConstants(self.ptr.native_ptr(), blend_constants) };
354		return self;
355	}
356	/// Set the depth bounds test values for a command buffer
357	pub fn set_depth_bounds(&mut self, bounds: Range<f32>) -> &mut Self
358	{
359		unsafe { vkCmdSetDepthBounds(self.ptr.native_ptr(), bounds.start, bounds.end) };
360		return self;
361	}
362	/// Set the stencil compare mask dynamic state
363	pub fn set_stencil_compare_mask(&mut self, face_mask: StencilFaceMask, compare_mask: u32) -> &mut Self
364	{
365		unsafe { vkCmdSetStencilCompareMask(self.ptr.native_ptr(), face_mask as _, compare_mask) };
366		return self;
367	}
368	/// Set the stencil write mask dynamic state
369	pub fn set_stencil_write_mask(&mut self, face_mask: StencilFaceMask, write_mask: u32) -> &mut Self
370	{
371		unsafe { vkCmdSetStencilWriteMask(self.ptr.native_ptr(), face_mask as _, write_mask) };
372		return self;
373	}
374	/// Set the stencil reference dynamic state
375	pub fn set_stencil_reference(&mut self, face_mask: StencilFaceMask, reference: u32) -> &mut Self
376	{
377		unsafe { vkCmdSetStencilReference(self.ptr.native_ptr(), face_mask as _, reference) };
378		return self;
379	}
380	/// [feature = "VK_EXT_sample_locations"]
381	/// Set the sample locations state
382	#[cfg(feature = "VK_EXT_sample_locations")]
383	pub fn set_sample_locations(&mut self, info: &VkSampleLocationsInfoEXT) -> &mut Self
384	{
385		unsafe { vkCmdSetSampleLocationsEXT(self.ptr.native_ptr(), info as _); }
386		return self;
387	}
388}
389
390/// [feature = "Implements"] Graphics Commands: Binding Buffers
391#[cfg(feature = "Implements")]
392impl<'d> CmdRecord<'d>
393{
394	/// Bind an index buffer to a command buffer
395	pub fn bind_index_buffer(&mut self, buffer: &Buffer, offset: usize, index_type: IndexType) -> &mut Self
396	{
397		unsafe { vkCmdBindIndexBuffer(self.ptr.native_ptr(), buffer.native_ptr(), offset as _, index_type as _) };
398		return self;
399	}
400	/// Bind vertex buffers to a command buffer
401	pub fn bind_vertex_buffers(&mut self, first: u32, buffers: &[(&Buffer, usize)]) -> &mut Self
402	{
403		let (bufs, ofs): (Vec<_>, Vec<_>) =
404			buffers.into_iter().map(|&(b, o)| (b.native_ptr(), o as VkDeviceSize)).unzip();
405		unsafe { vkCmdBindVertexBuffers(self.ptr.native_ptr(), first, bufs.len() as _, bufs.as_ptr(), ofs.as_ptr()) };
406		return self;
407	}
408}
409
410/// [feature = "Implements"] Graphics Commands: Inside a Render Pass
411#[cfg(feature = "Implements")]
412impl<'d> CmdRecord<'d>
413{
414	/// Draw primitives
415	pub fn draw(&mut self, vertex_count: u32, instance_count: u32, first_vertex: u32, first_instance: u32) -> &mut Self
416	{
417		unsafe { vkCmdDraw(self.ptr.native_ptr(), vertex_count, instance_count, first_vertex, first_instance) };
418		return self;
419	}
420	/// Issue an indexed draw into a command buffer
421	pub fn draw_indexed(&mut self, index_count: u32, instance_count: u32,
422		first_index: u32, vertex_offset: i32, first_instance: u32) -> &mut Self
423	{
424		unsafe
425		{
426			vkCmdDrawIndexed(self.ptr.native_ptr(), index_count, instance_count,
427				first_index, vertex_offset, first_instance)
428		};
429		return self;
430	}
431	/// Issue an indirect draw into a command buffer
432	pub fn draw_indirect(&mut self, buffer: &Buffer, offset: usize, draw_count: u32, stride: u32) -> &mut Self
433	{
434		unsafe { vkCmdDrawIndirect(self.ptr.native_ptr(), buffer.native_ptr(), offset as _, draw_count, stride) };
435		return self;
436	}
437	/// Perform an indexed indirect draw
438	pub fn draw_indexed_indirect(&mut self, buffer: &Buffer, offset: usize, draw_count: u32, stride: u32) -> &mut Self
439	{
440		unsafe
441		{
442			vkCmdDrawIndexedIndirect(self.ptr.native_ptr(), buffer.native_ptr(), offset as _, draw_count, stride)
443		};
444		return self;
445	}
446}
447
448/// [feature = "Implements"] Compute Commands: Dispatching kernels
449#[cfg(feature = "Implements")]
450impl<'d> CmdRecord<'d>
451{
452	/// Dispatch compute work items
453	pub fn dispatch(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) -> &mut Self
454	{
455		unsafe { vkCmdDispatch(self.ptr.native_ptr(), group_count_x, group_count_y, group_count_z) };
456		return self;
457	}
458	/// Dispatch compute work items using indirect parameters
459	pub fn dispatch_indirect(&mut self, buffer: &Buffer, offset: usize) -> &mut Self
460	{
461		unsafe { vkCmdDispatchIndirect(self.ptr.native_ptr(), buffer.native_ptr(), offset as _) };
462		return self;
463	}
464}
465
466/// [feature = "Implements"] Transfer Commands: Copying resources
467#[cfg(feature = "Implements")]
468impl<'d> CmdRecord<'d>
469{
470	/// Copy data between buffer regions
471	pub fn copy_buffer(&mut self, src: &Buffer, dst: &Buffer, regions: &[VkBufferCopy]) -> &mut Self
472	{
473		unsafe
474		{
475			vkCmdCopyBuffer(self.ptr.native_ptr(), src.native_ptr(),
476				dst.native_ptr(), regions.len() as _, regions.as_ptr())
477		};
478		return self;
479	}
480	/// Copy data between images
481	pub fn copy_image(&mut self, src: &Image, src_layout: ImageLayout,
482		dst: &Image, dst_layout: ImageLayout, regions: &[VkImageCopy]) -> &mut Self
483	{
484		unsafe
485		{
486			vkCmdCopyImage(self.ptr.native_ptr(), src.native_ptr(), src_layout as _,
487				dst.native_ptr(), dst_layout as _, regions.len() as _, regions.as_ptr())
488		};
489		return self;
490	}
491	/// Copy regions of an image, potentially performing format conversion
492	pub fn blit_image(&mut self, src: &Image, src_layout: ImageLayout, dst: &Image, dst_layout: ImageLayout,
493		regions: &[VkImageBlit], filter: FilterMode) -> &mut Self
494	{
495		unsafe
496		{
497			vkCmdBlitImage(self.ptr.native_ptr(), src.native_ptr(), src_layout as _, dst.native_ptr(), dst_layout as _,
498				regions.len() as _, regions.as_ptr(), filter as _)
499		};
500		return self;
501	}
502	/// Copy data from a buffer into an image
503	pub fn copy_buffer_to_image(&mut self, src_buffer: &Buffer, dst_image: &Image, dst_layout: ImageLayout,
504		regions: &[VkBufferImageCopy]) -> &mut Self
505	{
506		unsafe
507		{
508			vkCmdCopyBufferToImage(self.ptr.native_ptr(), src_buffer.native_ptr(),
509				dst_image.native_ptr(), dst_layout as _, regions.len() as _, regions.as_ptr())
510		};
511		return self;
512	}
513	/// Copy image data into a buffer
514	pub fn copy_image_to_buffer(&mut self, src_image: &Image, src_layout: ImageLayout, dst_buffer: &Buffer,
515		regions: &[VkBufferImageCopy]) -> &mut Self
516	{
517		unsafe
518		{
519			vkCmdCopyImageToBuffer(self.ptr.native_ptr(), src_image.native_ptr(), src_layout as _,
520				dst_buffer.native_ptr(), regions.len() as _, regions.as_ptr())
521		};
522		return self;
523	}
524	/// Update a buffer's contents from host memory
525	pub fn update_buffer<T>(&mut self, dst: &Buffer, dst_offset: usize, size: usize, data: &T) -> &mut Self
526	{
527		assert!(size <= size_of::<T>(), "Updated size exceeds size of datatype");
528		unsafe
529		{
530			vkCmdUpdateBuffer(self.ptr.native_ptr(), dst.native_ptr(), dst_offset as _, size as _,
531				data as *const T as *const _)
532		};
533		return self;
534	}
535}
536
537/// [feature = "Implements"] Graphics/Compute Commands: Transfer-like(clearing/filling) commands
538#[cfg(feature = "Implements")]
539impl<'d> CmdRecord<'d>
540{
541	/// Fill a region of a buffer with a fixed value.  
542	/// `size` is number of bytes to fill
543	pub fn fill_buffer(&mut self, dst: &Buffer, dst_offset: usize, size: usize, data: u32) -> &mut Self
544	{
545		unsafe { vkCmdFillBuffer(self.ptr.native_ptr(), dst.native_ptr(), dst_offset as _, size as _, data) };
546		return self;
547	}
548	/// Clear regions of a color image
549	pub fn clear_color_image<T: ClearColorValue>(&mut self, image: &Image, layout: ImageLayout,
550		color: &T, ranges: &[VkImageSubresourceRange]) -> &mut Self
551	{
552		unsafe
553		{
554			vkCmdClearColorImage(self.ptr.native_ptr(), image.native_ptr(), layout as _,
555				color.represent(), ranges.len() as _, ranges.as_ptr())
556		};
557		return self;
558	}
559	/// Fill regions of a combined depth/stencil image
560	pub fn clear_depth_stencil_image(&mut self, image: &Image, layout: ImageLayout, depth: f32, stencil: u32,
561		ranges: &[VkImageSubresourceRange]) -> &mut Self
562	{
563		unsafe
564		{
565			vkCmdClearDepthStencilImage(self.ptr.native_ptr(), image.native_ptr(),
566				layout as _, &VkClearDepthStencilValue { depth, stencil }, ranges.len() as _, ranges.as_ptr())
567		};
568		return self;
569	}
570	/// Clear regions within currently bound framebuffer attachments
571	pub fn clear_attachments(&mut self, attachments: &[VkClearAttachment], rects: &[VkClearRect]) -> &mut Self
572	{
573		unsafe
574		{
575			vkCmdClearAttachments(self.ptr.native_ptr(), attachments.len() as _,
576				attachments.as_ptr(), rects.len() as _, rects.as_ptr())
577		};
578		return self;
579	}
580}
581
582/// [feature = "Implements"] Graphics Commands: Executing Subcommands
583#[cfg(feature = "Implements")]
584impl<'d> CmdRecord<'d>
585{
586	/// Execute a secondary command buffer from a primary command buffer
587	/// # Safety
588	/// 
589	/// Caller must be primary buffer and in the render pass when executing secondary command buffer
590	pub unsafe fn execute_commands(&mut self, buffers: &[VkCommandBuffer]) -> &mut Self
591	{
592		vkCmdExecuteCommands(self.ptr.native_ptr(), buffers.len() as _, buffers.as_ptr());
593		return self;
594	}
595}
596
597/// [feature = "Implements"] Graphics Commands: Resolving an image to another image
598#[cfg(feature = "Implements")]
599impl<'d> CmdRecord<'d>
600{
601	/// Resolve regions of an image
602	pub fn resolve_image(&mut self, src: &Image, src_layout: ImageLayout, dst: &Image, dst_layout: ImageLayout,
603		regions: &[VkImageResolve]) -> &mut Self
604	{
605		unsafe
606		{
607			vkCmdResolveImage(self.ptr.native_ptr(), src.native_ptr(), src_layout as _,
608				dst.native_ptr(), dst_layout as _, regions.len() as _, regions.as_ptr())
609		};
610		return self;
611	}
612}
613
614/// [feature = "Implements"] Graphics/Compute Commands: Synchronization between command buffers/queues
615#[cfg(feature = "Implements")]
616impl<'d> CmdRecord<'d>
617{
618	/// Set an event object to signaled state
619	pub fn set_event(&mut self, event: &Event, stage_mask: PipelineStageFlags) -> &mut Self
620	{
621		unsafe { vkCmdSetEvent(self.ptr.native_ptr(), event.0, stage_mask.0) }; return self;
622	}
623	/// Reset an event object to non-signaled state
624	pub fn reset_event(&mut self, event: &Event, stage_mask: PipelineStageFlags) -> &mut Self
625	{
626		unsafe { vkCmdResetEvent(self.ptr.native_ptr(), event.0, stage_mask.0) }; return self;
627	}
628	/// Wait for one or more events and insert a set of memory
629	pub fn wait_events(&mut self, events: &[&Event],
630		src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
631		memory_barriers: &[VkMemoryBarrier], buffer_memory_barriers: &[VkBufferMemoryBarrier],
632		image_memory_barriers: &[VkImageMemoryBarrier]) -> &mut Self
633	{
634		let evs = events.into_iter().map(|x| x.0).collect::<Vec<_>>();
635		unsafe
636		{
637			vkCmdWaitEvents(self.ptr.native_ptr(), evs.len() as _, evs.as_ptr(), src_stage_mask.0, dst_stage_mask.0,
638				memory_barriers.len() as _, memory_barriers.as_ptr(),
639				buffer_memory_barriers.len() as _, buffer_memory_barriers.as_ptr(),
640				image_memory_barriers.len() as _, image_memory_barriers.as_ptr())
641		};
642		return self;
643	}
644	/// Insert a memory dependency
645	pub fn pipeline_barrier(&mut self, src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
646		by_region: bool, memory_barriers: &[VkMemoryBarrier], buffer_memory_barriers: &[BufferMemoryBarrier],
647		image_memory_barriers: &[ImageMemoryBarrier]) -> &mut Self
648	{
649		unsafe
650		{
651			vkCmdPipelineBarrier(self.ptr.native_ptr(), src_stage_mask.0, dst_stage_mask.0,
652				if by_region { VK_DEPENDENCY_BY_REGION_BIT } else { 0 },
653				memory_barriers.len() as _, memory_barriers.as_ptr(),
654				buffer_memory_barriers.len() as _, buffer_memory_barriers.as_ptr() as _,
655				image_memory_barriers.len() as _, image_memory_barriers.as_ptr() as _)
656		};
657		return self;
658	}
659}
660
661/// [feature = "Implements"] Graphics/Compute Commands: Querying
662#[cfg(feature = "Implements")]
663impl<'d> CmdRecord<'d>
664{
665	/// Begin a query
666	pub fn begin_query(&mut self, pool: &QueryPool, query: u32, precise_query: bool) -> &mut Self
667	{
668		unsafe
669		{
670			vkCmdBeginQuery(self.ptr.native_ptr(), pool.0, query,
671				if precise_query { VK_QUERY_CONTROL_PRECISE_BIT } else { 0 })
672		};
673		return self;
674	}
675	/// Ends a query
676	pub fn end_query(&mut self, pool: &QueryPool, query: u32) -> &mut Self
677	{
678		unsafe { vkCmdEndQuery(self.ptr.native_ptr(), pool.0, query) }; return self;
679	}
680	/// Reset queries in a query pool
681	pub fn reset_query_pool(&mut self, pool: &QueryPool, range: Range<u32>) -> &mut Self
682	{
683		unsafe { vkCmdResetQueryPool(self.ptr.native_ptr(), pool.0, range.start, range.end - range.start) };
684		return self;
685	}
686	/// Write a device timestamp into a query object
687	pub fn write_timestamp(&mut self, stage: PipelineStageFlags, pool: &QueryPool, query: u32) -> &mut Self
688	{
689		unsafe { vkCmdWriteTimestamp(self.ptr.native_ptr(), stage.0, pool.0, query) }; return self;
690	}
691	/// Copy the results of queries in a query pool to a buffer object
692	pub fn copy_query_pool_results(&mut self, pool: &QueryPool, range: Range<u32>, dst: &Buffer, dst_offset: usize,
693		stride: usize, wide_result: bool, flags: QueryResultFlags) -> &mut Self
694	{
695		unsafe
696		{
697			vkCmdCopyQueryPoolResults(self.ptr.native_ptr(), pool.0, range.start, range.end - range.start,
698				dst.native_ptr(), dst_offset as _, stride as _,
699				flags.0 | if wide_result { VK_QUERY_RESULT_64_BIT } else { 0 })
700		};
701		return self;
702	}
703}
704
705/// [feature = "Implements"] Graphics Commands: Manipulating with Render Passes
706#[cfg(feature = "Implements")]
707impl<'d> CmdRecord<'d>
708{
709	/// Begin a new render pass
710	pub fn begin_render_pass(&mut self, pass: &RenderPass, framebuffer: &Framebuffer, render_area: VkRect2D,
711		clear_values: &[ClearValue], inline_commands: bool) -> &mut Self
712	{
713		let cvalues = clear_values.into_iter().map(|x| match x
714		{
715			&ClearValue::Color(ref color) => VkClearValue { color: VkClearColorValue { float32: color.clone() } },
716			&ClearValue::DepthStencil(depth, stencil) =>
717				VkClearValue { depthStencil: VkClearDepthStencilValue { depth, stencil } }
718		}).collect::<Vec<_>>();
719		let binfo = VkRenderPassBeginInfo
720		{
721			renderPass: pass.native_ptr(), framebuffer: framebuffer.native_ptr(), renderArea: render_area,
722			clearValueCount: cvalues.len() as _, pClearValues: cvalues.as_ptr(), .. Default::default()
723		};
724		unsafe
725		{
726			vkCmdBeginRenderPass(self.ptr.native_ptr(), &binfo,
727				if inline_commands { VK_SUBPASS_CONTENTS_INLINE } else { VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS })
728		};
729		return self;
730	}
731	/// Transition to the next subpass of a render pass
732	pub fn next_subpass(&mut self, inline_commands: bool) -> &mut Self
733	{
734		unsafe
735		{
736			vkCmdNextSubpass(self.ptr.native_ptr(),
737				if inline_commands { VK_SUBPASS_CONTENTS_INLINE } else { VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS })
738		};
739		return self;
740	}
741	/// End the current render pass
742	pub fn end_render_pass(&mut self) -> &mut Self { unsafe { vkCmdEndRenderPass(self.ptr.native_ptr()) }; return self; }
743}
744
745/// The trait representation of `VkClearColorValue`
746pub trait ClearColorValue
747{
748	fn represent(&self) -> &VkClearColorValue;
749}
750impl ClearColorValue for [f32; 4] { fn represent(&self) -> &VkClearColorValue { unsafe { ::std::mem::transmute(self) } } }
751impl ClearColorValue for [i32; 4] { fn represent(&self) -> &VkClearColorValue { unsafe { ::std::mem::transmute(self) } } }
752impl ClearColorValue for [u32; 4] { fn represent(&self) -> &VkClearColorValue { unsafe { ::std::mem::transmute(self) } } }
753
754/// The enum representation of `VkClearValue`
755pub enum ClearValue
756{
757	/// Color Value: r, g, b, a
758	Color([f32; 4]),
759	/// Depth and Stencil Value: depth, stencil
760	DepthStencil(f32, u32)
761}
762
763/// Type of index buffer indices
764#[repr(C)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
765pub enum IndexType
766{
767	/// Indices are 16-bit unsigned integer values
768	U16 = VK_INDEX_TYPE_UINT16 as _,
769	/// Indices are 32-bit unsigned integer values
770	U32 = VK_INDEX_TYPE_UINT32 as _
771}
772
773/// Enabling or disabling the occlusion query
774#[derive(Debug, Clone, Copy, PartialEq, Eq)]
775pub enum OcclusionQuery
776{
777	Disable, Enable,
778	/// `VK_QUERY_CONTROL_PRECISE_BIT`
779	Precise
780}
781
782/// Access Types
783pub struct AccessFlags { pub read: VkAccessFlags, pub write: VkAccessFlags }
784impl AccessFlags
785{
786	/// Specifies read access to an indirect command structure read as part of an indirect drawing or dispatch command.
787	pub const INDIRECT_COMMAND_READ: VkAccessFlags = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
788	/// Specifies read access to an index buffer as part of an indexed drawing command, bound by `vkCmdBindIndexBuffer`.
789	pub const INDEX_READ: VkAccessFlags = VK_ACCESS_INDEX_READ_BIT;
790	/// Specifies read access to a vertex buffer as part of a drawing command, bound by `vkCmdBindVertexBuffers`.
791	pub const VERTEX_ATTRIBUTE_READ: VkAccessFlags = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
792	/// Specifies read access to a [uniform buffer](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#descriptorsets-uniformbuffer).
793	pub const UNIFORM_READ: VkAccessFlags = VK_ACCESS_UNIFORM_READ_BIT;
794	/// Specifies read access to an [input attachment](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass) within a render pass during fragment shading.
795	pub const INPUT_ATTACHMENT_READ: VkAccessFlags = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
796	/// Specifies read/write access to a [storage buffer](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#descriptorsets-storagebuffer),
797	/// [uniform texel buffer](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#descriptorsets-uniformtexelbuffer)(read only),
798	/// [storage texel buffer](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#descriptorsets-storagetexelbuffer),
799	/// [samples image](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#descriptorsets-sampledimage)(read only),
800	/// or [storage image](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#descriptorsets-storageimage).
801	pub const SHADER: Self = AccessFlags { read: VK_ACCESS_SHADER_READ_BIT, write: VK_ACCESS_SHADER_WRITE_BIT };
802	/// - `read`: Specifies read access to a [color attachment](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass),
803	///   such as via [blending](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#framebuffer-blending),
804	///   [logic operations](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#framebuffer-logicop),
805	///   or via certain [subpass load operations](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#framebuffer-logicop).
806	/// - `write`: specifies write access to a [color or resolve attachment](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass)
807	///   during a [render pass](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass)
808	///   or via certain [subpass load and store operations](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass-load-store-ops).
809	pub const COLOR_ATTACHMENT: Self = AccessFlags
810	{
811		read: VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, write: VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
812	};
813	/// - `read`: Specifies read access to a [depth/stencil attachment](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass),
814	///   via [depth or stencil operations](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#fragops-ds-state)
815	///   or via certain [subpass load operations](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass-load-store-ops).
816	/// - `write`: Specifies write access to a [depth/stencil attachment](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass),
817	///   via [depth or stencil operations](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#fragops-ds-state)
818	///   or via certain [subpass load and store operations](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#renderpass-load-store-ops).
819	pub const DEPTH_STENCIL_ATTACHMENT: Self = AccessFlags
820	{
821		read: VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, write: VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
822	};
823	/// Specifies read/write access to an image or buffer in a [clear](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#clears)(write only)
824	/// or [copy](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#copies) operation.
825	pub const TRANSFER: Self = AccessFlags { read: VK_ACCESS_TRANSFER_READ_BIT, write: VK_ACCESS_TRANSFER_WRITE_BIT };
826	/// Specifies read/write access by a host operation.
827	/// Accesses of this type are not performed through a resource, but directly on memory.
828	pub const HOST: Self = AccessFlags { read: VK_ACCESS_HOST_READ_BIT, write: VK_ACCESS_HOST_WRITE_BIT };
829	/// Specifies read/write access via non-specific entities.
830	/// These entities include the Vulkan device and host, but *may* also include entities external to the Vulkan device
831	/// or otherwise not part of the core Vulkan pipeline.
832	/// 
833	/// - When the `write` mask included in a source access mask, all writes that are performed by entities known to the
834	///   Vulkan device are made available.
835	/// - When included in a destination access mask, makes all available writes visible to all future read accesses on
836	///   entities known to the Vulkan device.
837	pub const MEMORY: Self = AccessFlags { read: VK_ACCESS_MEMORY_READ_BIT, write: VK_ACCESS_MEMORY_WRITE_BIT };
838}
839
840use std::mem::replace;
841/// Image Subresource Slice
842#[derive(Clone)]
843pub struct ImageSubref<'d>(pub &'d Image, pub VkImageSubresourceRange);
844impl<'d> ImageSubref<'d>
845{
846	/// Construct a slice for the Color aspect(`VK_IMAGE_ASPECT_COLOR_BIT`)
847	pub fn color<Levels, Layers>(image: &'d Image, mip_levels: Levels, array_layers: Layers) -> Self
848		where Levels: ::AnalogNumRange<u32>, Layers: ::AnalogNumRange<u32>
849	{
850		ImageSubref(image, VkImageSubresourceRange
851		{
852			aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
853			baseMipLevel: mip_levels.begin(), baseArrayLayer: array_layers.begin(),
854			levelCount: mip_levels.count(), layerCount: array_layers.count()
855		})
856	}
857	/// Construct a slice for the Stencil aspect(`VK_IMAGE_ASPECT_STENCIL_BIT`)
858	pub fn stencil<Levels, Layers>(image: &'d Image, mip_levels: Levels, array_layers: Layers) -> Self
859		where Levels: ::AnalogNumRange<u32>, Layers: ::AnalogNumRange<u32>
860	{
861		ImageSubref(image, VkImageSubresourceRange
862		{
863			aspectMask: VK_IMAGE_ASPECT_STENCIL_BIT,
864			baseMipLevel: mip_levels.begin(), baseArrayLayer: array_layers.begin(),
865			levelCount: mip_levels.count(), layerCount: array_layers.count()
866		})
867	}
868}
869
870/// Wrapper object of `VkImageMemoryBarrier`, derscribes a memory barrier of an image.
871#[derive(Clone)]
872pub struct ImageMemoryBarrier(VkImageMemoryBarrier);
873impl ImageMemoryBarrier
874{
875	/// Construct a new barrier descriptor
876	pub fn new(img: &ImageSubref, old_layout: ImageLayout, new_layout: ImageLayout) -> Self
877	{
878		ImageMemoryBarrier(VkImageMemoryBarrier
879		{
880			image: img.0.native_ptr(), subresourceRange: img.1.clone(),
881			oldLayout: old_layout as _, newLayout: new_layout as _,
882			srcAccessMask: old_layout.default_access_mask(),
883			dstAccessMask: new_layout.default_access_mask(), .. Default::default()
884		})
885	}
886	/// Construct a new barrier descriptor from discrete pair of resource and subresource range
887	pub fn new_raw<SR>(res: &Image, subres: &SR, old: ImageLayout, new: ImageLayout) -> Self
888		where SR: Borrow<VkImageSubresourceRange>
889	{
890		ImageMemoryBarrier(VkImageMemoryBarrier
891		{
892			image: res.native_ptr(), subresourceRange: subres.borrow().clone(),
893			oldLayout: old as _, newLayout: new as _,
894			srcAccessMask: old.default_access_mask(), dstAccessMask: new.default_access_mask(), .. Default::default()
895		})
896	}
897	/// Update the source access mask
898	pub fn src_access_mask(mut self, mask: VkAccessFlags) -> Self
899	{
900		self.0.srcAccessMask = mask; return self;
901	}
902	/// Update the destination access mask
903	pub fn dest_access_mask(mut self, mask: VkAccessFlags) -> Self
904	{
905		self.0.dstAccessMask = mask; return self;
906	}
907	/// Flip access masks and image layouts
908	pub fn flip(mut self) -> Self
909	{
910		self.0.dstAccessMask = replace(&mut self.0.srcAccessMask, self.0.dstAccessMask);
911		self.0.newLayout = replace(&mut self.0.oldLayout, self.0.newLayout);
912		return self;
913	}
914}
915/// Wrapper object of `VkBufferMemoryBarrier`, describes a memory barrier of a buffer.
916#[derive(Clone)]
917pub struct BufferMemoryBarrier(VkBufferMemoryBarrier);
918impl BufferMemoryBarrier
919{
920	/// Construct a new buffer descriptor
921	pub fn new(buf: &Buffer, range: Range<usize>, src_access_mask: VkAccessFlags, dst_access_mask: VkAccessFlags)
922		-> Self
923	{
924		BufferMemoryBarrier(VkBufferMemoryBarrier
925		{
926			buffer: buf.native_ptr(), offset: range.start as _, size: (range.end - range.start) as _,
927			srcAccessMask: src_access_mask, dstAccessMask: dst_access_mask, .. Default::default()
928		})
929	}
930	/// Update the source access mask
931	pub fn src_access_mask(mut self, mask: VkAccessFlags) -> Self
932	{
933		self.0.srcAccessMask = mask; return self;
934	}
935	/// Update the destination access mask
936	pub fn dest_access_mask(mut self, mask: VkAccessFlags) -> Self
937	{
938		self.0.dstAccessMask = mask; return self;
939	}
940	/// Flip access masks
941	pub fn flip(mut self) -> Self
942	{
943		self.0.dstAccessMask = replace(&mut self.0.srcAccessMask, self.0.dstAccessMask);
944		return self;
945	}
946}