1use vk::*;
4use {VkHandle, Device, DeviceChild};
5#[cfg(feature = "Implements")]
6use VkResultHandler;
7#[cfg(feature = "Implements")]
8use std::mem::{size_of, transmute};
9use std::ops::Range;
10use std::borrow::Borrow;
11use {Image, Buffer, ImageLayout};
12#[cfg(feature = "Implements")]
13use {Framebuffer, RenderPass, Pipeline, PipelineLayout, PipelineStageFlags, ShaderStage};
14#[cfg(feature = "Implements")]
15use {StencilFaceMask, FilterMode, Event};
16#[cfg(feature = "Implements")]
17use {QueryPipelineStatisticFlags, QueryPool, QueryResultFlags};
18
19#[derive(Clone)] pub struct CommandPool(VkCommandPool, ::Device);
21#[repr(C)] #[derive(Clone, Copy)] pub struct CommandBuffer(VkCommandBuffer);
23
24#[cfg(feature = "Implements")] DeviceChildCommonDrop!{ for CommandPool[vkDestroyCommandPool] }
25impl VkHandle for CommandPool { type Handle = VkCommandPool; fn native_ptr(&self) -> VkCommandPool { self.0 } }
26impl VkHandle for CommandBuffer { type Handle = VkCommandBuffer; fn native_ptr(&self) -> VkCommandBuffer { self.0 } }
27impl DeviceChild for CommandPool { fn device(&self) -> &Device { &self.1 } }
28
29#[cfg(feature = "Implements")]
31pub struct CmdRecord<'d> { ptr: &'d CommandBuffer, layout: [Option<VkPipelineLayout>; 2] }
32
33#[cfg(feature = "Implements")]
35impl<'d> Drop for CmdRecord<'d>
36{
37 fn drop(&mut self)
38 {
39 unsafe
40 {
41 vkEndCommandBuffer(self.ptr.native_ptr()).into_result().expect("Error closing command recording state");
42 }
43 }
44}
45
46#[cfg(feature = "Implements")]
48impl CommandPool
49{
50 pub fn new(device: &Device, queue_family: u32, transient: bool, indiv_resettable: bool) -> ::Result<Self>
57 {
58 let cinfo = VkCommandPoolCreateInfo
59 {
60 queueFamilyIndex: queue_family, flags: if transient { VK_COMMAND_POOL_CREATE_TRANSIENT_BIT } else { 0 }
61 | if indiv_resettable { VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT } else { 0 },
62 .. Default::default()
63 };
64 let mut h = VK_NULL_HANDLE as _;
65 unsafe
66 {
67 vkCreateCommandPool(device.native_ptr(), &cinfo, ::std::ptr::null(), &mut h)
68 .into_result().map(|_| CommandPool(h, device.clone()))
69 }
70 }
71 pub fn alloc(&self, count: u32, primary: bool) -> ::Result<Vec<CommandBuffer>>
78 {
79 let ainfo = VkCommandBufferAllocateInfo
80 {
81 commandBufferCount: count, level: if primary { VK_COMMAND_BUFFER_LEVEL_PRIMARY } else { VK_COMMAND_BUFFER_LEVEL_SECONDARY },
82 commandPool: self.0, .. Default::default()
83 };
84 let mut hs = vec![VK_NULL_HANDLE as _; count as _];
85 unsafe
86 {
87 vkAllocateCommandBuffers(self.1.native_ptr(), &ainfo, hs.as_mut_ptr()).into_result().map(|_| transmute(hs))
88 }
89 }
90 pub fn reset(&self, release_resources: bool) -> ::Result<()>
99 {
100 let flags = if release_resources { VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT } else { 0 };
101 unsafe { vkResetCommandPool(self.1.native_ptr(), self.0, flags).into_result() }
102 }
103 pub fn free(&self, buffers: &[CommandBuffer])
105 {
106 unsafe { vkFreeCommandBuffers(self.1.native_ptr(), self.0, buffers.len() as _, buffers.as_ptr() as *const _) };
107 }
108}
109
110#[cfg(feature = "Implements")]
112impl CommandBuffer
113{
114 pub fn begin(&self) -> ::Result<CmdRecord>
121 {
122 unsafe
123 {
124 vkBeginCommandBuffer(self.0, &Default::default()).into_result()
125 .map(|_| CmdRecord { ptr: self, layout: [None, None] })
126 }
127 }
128 pub fn begin_once(&self) -> ::Result<CmdRecord>
135 {
136 let info = VkCommandBufferBeginInfo { flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, .. Default::default() };
137 unsafe
138 {
139 vkBeginCommandBuffer(self.0, &info).into_result().map(|_| CmdRecord { ptr: self, layout: [None, None] })
140 }
141 }
142 pub fn begin_inherit(&self, renderpass: Option<(&Framebuffer, &RenderPass, u32)>,
149 query: Option<(OcclusionQuery, QueryPipelineStatisticFlags)>) -> ::Result<CmdRecord>
150 {
151 let flags = if renderpass.is_some() { VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT } else { 0 };
152 let (fb, rp, s) = renderpass.map(|(f, r, s)| (f.native_ptr(), r.native_ptr(), s))
153 .unwrap_or((VK_NULL_HANDLE as _, VK_NULL_HANDLE as _, 0));
154 let (oq, psq) = query.map(|(o, p)| (o, p.0)).unwrap_or((OcclusionQuery::Disable, 0));
155 let inherit = VkCommandBufferInheritanceInfo
156 {
157 framebuffer: fb, renderPass: rp, subpass: s, occlusionQueryEnable: (oq != OcclusionQuery::Disable) as _,
158 queryFlags: if oq == OcclusionQuery::Precise { VK_QUERY_CONTROL_PRECISE_BIT } else { 0 },
159 pipelineStatistics: psq, .. Default::default()
160 };
161 let binfo = VkCommandBufferBeginInfo { pInheritanceInfo: &inherit, flags, .. Default::default() };
162 unsafe
163 {
164 vkBeginCommandBuffer(self.0, &binfo).into_result().map(|_| CmdRecord { ptr: self, layout: [None, None] })
165 }
166 }
167}
168
169#[cfg(feature = "Implements")]
171impl<'d> CmdRecord<'d>
172{
173 pub fn bind_graphics_pipeline(&mut self, pipeline: &Pipeline) -> &mut Self
175 {
176 unsafe { vkCmdBindPipeline(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline.native_ptr()) };
177 return self;
178 }
179 pub fn bind_compute_pipeline(&mut self, pipeline: &Pipeline) -> &mut Self
181 {
182 unsafe { vkCmdBindPipeline(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline.native_ptr()) };
183 return self;
184 }
185 pub fn bind_graphics_pipeline_layout(&mut self, layout: &PipelineLayout) -> &mut Self
187 {
188 self.layout[VK_PIPELINE_BIND_POINT_GRAPHICS as usize] = Some(layout.native_ptr());
189 return self;
190 }
191 pub fn bind_compute_pipeline_layout(&mut self, layout: &PipelineLayout) -> &mut Self
193 {
194 self.layout[VK_PIPELINE_BIND_POINT_COMPUTE as usize] = Some(layout.native_ptr());
195 return self;
196 }
197 pub fn bind_graphics_pipeline_pair(&mut self, pipeline: &Pipeline, layout: &PipelineLayout) -> &mut Self
199 {
200 self.bind_graphics_pipeline_layout(layout).bind_graphics_pipeline(pipeline)
201 }
202 pub fn bind_compute_pipeline_pair(&mut self, pipeline: &Pipeline, layout: &PipelineLayout) -> &mut Self
204 {
205 self.bind_compute_pipeline_layout(layout).bind_compute_pipeline(pipeline)
206 }
207 fn current_pipeline_layout_g(&self) -> VkPipelineLayout
208 {
209 self.layout[VK_PIPELINE_BIND_POINT_GRAPHICS as usize].expect("Pipeline is not bound for Graphics")
210 }
211 fn current_pipeline_layout_c(&self) -> VkPipelineLayout
212 {
213 self.layout[VK_PIPELINE_BIND_POINT_COMPUTE as usize].expect("Pipeline is not bound for Compute")
214 }
215 pub fn bind_graphics_descriptor_sets(&mut self, first: u32,
217 descriptor_sets: &[VkDescriptorSet], dynamic_offsets: &[u32]) -> &mut Self
218 {
219 unsafe
220 {
221 vkCmdBindDescriptorSets(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_GRAPHICS,
222 self.current_pipeline_layout_g(),
223 first, descriptor_sets.len() as _, descriptor_sets.as_ptr(),
224 dynamic_offsets.len() as _, dynamic_offsets.as_ptr())
225 };
226 return self;
227 }
228 pub fn bind_compute_descriptor_sets(&mut self, first: u32,
230 descriptor_sets:&[VkDescriptorSet], dynamic_offsets: &[u32]) -> &mut Self
231 {
232 unsafe
233 {
234 vkCmdBindDescriptorSets(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_COMPUTE,
235 self.current_pipeline_layout_c(),
236 first, descriptor_sets.len() as _, descriptor_sets.as_ptr(),
237 dynamic_offsets.len() as _, dynamic_offsets.as_ptr())
238 };
239 return self;
240 }
241 pub fn push_graphics_constant<T>(&mut self, stage: ShaderStage, offset: u32, value: &T) -> &mut Self
243 {
244 unsafe
245 {
246 vkCmdPushConstants(self.ptr.native_ptr(), self.current_pipeline_layout_g(),
247 stage.0, offset, size_of::<T>() as _, value as *const T as *const _);
248 }
249 return self;
250 }
251 pub fn push_compute_constant<T>(&mut self, stage: ShaderStage, offset: u32, value: &T) -> &mut Self
253 {
254 unsafe
255 {
256 vkCmdPushConstants(self.ptr.native_ptr(), self.current_pipeline_layout_c(),
257 stage.0, offset, size_of::<T>() as _, value as *const T as *const _);
258 }
259 return self;
260 }
261
262 #[cfg(feature = "VK_KHR_push_descriptor")]
264 pub fn push_graphics_descriptor_set(&mut self, set: u32, writes: &[DescriptorSetWriteInfo]) -> &mut Self
265 {
266 let wt = writes.iter().map(|x|
268 {
269 let (ty, cnt, iv, bv, bvv) = x.3.decomposite();
270 let ivs = iv.iter().map(|&(s, v, l)| VkDescriptorImageInfo
271 {
272 sampler: s.unwrap_or(VK_NULL_HANDLE as _), imageView: v, imageLayout: l as _
273 }).collect::<Vec<_>>();
274 let bvs = bv.iter()
275 .map(|&(b, ref r)| VkDescriptorBufferInfo { buffer: b, offset: r.start as _, range: r.len() as _ })
276 .collect::<Vec<_>>();
277 (x.0, x.1, x.2, ty, cnt, ivs, bvs, bvv)
278 }).collect::<Vec<_>>();
279 let w = wt.iter().map(|&(set, binding, array, dty, count, ref iv, ref bv, ref bvv)| VkWriteDescriptorSet
280 {
281 dstSet: set, dstBinding: binding, dstArrayElement: array, descriptorType: dty as _, descriptorCount: count,
282 pImageInfo: iv.as_ptr(), pBufferInfo: bv.as_ptr(), pTexelBufferView: bvv.as_ptr(), .. Default::default()
283 }).collect::<Vec<_>>();
284 unsafe
285 {
286 vkCmdPushDescriptorSetKHR(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_GRAPHICS,
287 self.current_pipeline_layout_g(), set, w.len() as _, w.as_ptr())
288 };
289 return self;
290 }
291 #[cfg(feature = "VK_KHR_push_descriptor")]
293 pub fn push_compute_descriptor_set(&mut self, set: u32, writes: &[DescriptorSetWriteInfo]) -> &mut Self
294 {
295 let wt = writes.iter().map(|x|
297 {
298 let (ty, cnt, iv, bv, bvv) = x.3.decomposite();
299 let ivs = iv.iter().map(|&(s, v, l)| VkDescriptorImageInfo
300 {
301 sampler: s.unwrap_or(VK_NULL_HANDLE as _), imageView: v, imageLayout: l as _
302 }).collect::<Vec<_>>();
303 let bvs = bv.iter()
304 .map(|&(b, ref r)| VkDescriptorBufferInfo { buffer: b, offset: r.start as _, range: r.len() as _ })
305 .collect::<Vec<_>>();
306 (x.0, x.1, x.2, ty, cnt, ivs, bvs, bvv)
307 }).collect::<Vec<_>>();
308 let w = wt.iter().map(|&(set, binding, array, dty, count, ref iv, ref bv, ref bvv)| VkWriteDescriptorSet
309 {
310 dstSet: set, dstBinding: binding, dstArrayElement: array, descriptorType: dty as _, descriptorCount: count,
311 pImageInfo: iv.as_ptr(), pBufferInfo: bv.as_ptr(), pTexelBufferView: bvv.as_ptr(), .. Default::default()
312 }).collect::<Vec<_>>();
313 unsafe
314 {
315 vkCmdPushDescriptorSetKHR(self.ptr.native_ptr(), VK_PIPELINE_BIND_POINT_COMPUTE,
316 self.current_pipeline_layout_c(), set, w.len() as _, w.as_ptr())
317 };
318 return self;
319 }
320}
321
322#[cfg(feature = "Implements")]
324impl<'d> CmdRecord<'d>
325{
326 pub fn set_viewport(&mut self, first: u32, viewports: &[VkViewport]) -> &mut Self
328 {
329 unsafe { vkCmdSetViewport(self.ptr.native_ptr(), first, viewports.len() as _, viewports.as_ptr()) };
330 return self;
331 }
332 pub fn set_scissor(&mut self, first: u32, scissors: &[VkRect2D]) -> &mut Self
334 {
335 unsafe { vkCmdSetScissor(self.ptr.native_ptr(), first, scissors.len() as _, scissors.as_ptr()) };
336 return self;
337 }
338 pub fn set_line_width(&mut self, w: f32) -> &Self
340 {
341 unsafe { vkCmdSetLineWidth(self.ptr.native_ptr(), w) };
342 return self;
343 }
344 pub fn set_depth_bias(&mut self, constant_factor: f32, clamp: f32, slope_factor: f32) -> &mut Self
346 {
347 unsafe { vkCmdSetDepthBias(self.ptr.native_ptr(), constant_factor, clamp, slope_factor) };
348 return self;
349 }
350 pub fn set_blend_constants(&mut self, blend_constants: [f32; 4]) -> &mut Self
352 {
353 unsafe { vkCmdSetBlendConstants(self.ptr.native_ptr(), blend_constants) };
354 return self;
355 }
356 pub fn set_depth_bounds(&mut self, bounds: Range<f32>) -> &mut Self
358 {
359 unsafe { vkCmdSetDepthBounds(self.ptr.native_ptr(), bounds.start, bounds.end) };
360 return self;
361 }
362 pub fn set_stencil_compare_mask(&mut self, face_mask: StencilFaceMask, compare_mask: u32) -> &mut Self
364 {
365 unsafe { vkCmdSetStencilCompareMask(self.ptr.native_ptr(), face_mask as _, compare_mask) };
366 return self;
367 }
368 pub fn set_stencil_write_mask(&mut self, face_mask: StencilFaceMask, write_mask: u32) -> &mut Self
370 {
371 unsafe { vkCmdSetStencilWriteMask(self.ptr.native_ptr(), face_mask as _, write_mask) };
372 return self;
373 }
374 pub fn set_stencil_reference(&mut self, face_mask: StencilFaceMask, reference: u32) -> &mut Self
376 {
377 unsafe { vkCmdSetStencilReference(self.ptr.native_ptr(), face_mask as _, reference) };
378 return self;
379 }
380 #[cfg(feature = "VK_EXT_sample_locations")]
383 pub fn set_sample_locations(&mut self, info: &VkSampleLocationsInfoEXT) -> &mut Self
384 {
385 unsafe { vkCmdSetSampleLocationsEXT(self.ptr.native_ptr(), info as _); }
386 return self;
387 }
388}
389
390#[cfg(feature = "Implements")]
392impl<'d> CmdRecord<'d>
393{
394 pub fn bind_index_buffer(&mut self, buffer: &Buffer, offset: usize, index_type: IndexType) -> &mut Self
396 {
397 unsafe { vkCmdBindIndexBuffer(self.ptr.native_ptr(), buffer.native_ptr(), offset as _, index_type as _) };
398 return self;
399 }
400 pub fn bind_vertex_buffers(&mut self, first: u32, buffers: &[(&Buffer, usize)]) -> &mut Self
402 {
403 let (bufs, ofs): (Vec<_>, Vec<_>) =
404 buffers.into_iter().map(|&(b, o)| (b.native_ptr(), o as VkDeviceSize)).unzip();
405 unsafe { vkCmdBindVertexBuffers(self.ptr.native_ptr(), first, bufs.len() as _, bufs.as_ptr(), ofs.as_ptr()) };
406 return self;
407 }
408}
409
410#[cfg(feature = "Implements")]
412impl<'d> CmdRecord<'d>
413{
414 pub fn draw(&mut self, vertex_count: u32, instance_count: u32, first_vertex: u32, first_instance: u32) -> &mut Self
416 {
417 unsafe { vkCmdDraw(self.ptr.native_ptr(), vertex_count, instance_count, first_vertex, first_instance) };
418 return self;
419 }
420 pub fn draw_indexed(&mut self, index_count: u32, instance_count: u32,
422 first_index: u32, vertex_offset: i32, first_instance: u32) -> &mut Self
423 {
424 unsafe
425 {
426 vkCmdDrawIndexed(self.ptr.native_ptr(), index_count, instance_count,
427 first_index, vertex_offset, first_instance)
428 };
429 return self;
430 }
431 pub fn draw_indirect(&mut self, buffer: &Buffer, offset: usize, draw_count: u32, stride: u32) -> &mut Self
433 {
434 unsafe { vkCmdDrawIndirect(self.ptr.native_ptr(), buffer.native_ptr(), offset as _, draw_count, stride) };
435 return self;
436 }
437 pub fn draw_indexed_indirect(&mut self, buffer: &Buffer, offset: usize, draw_count: u32, stride: u32) -> &mut Self
439 {
440 unsafe
441 {
442 vkCmdDrawIndexedIndirect(self.ptr.native_ptr(), buffer.native_ptr(), offset as _, draw_count, stride)
443 };
444 return self;
445 }
446}
447
448#[cfg(feature = "Implements")]
450impl<'d> CmdRecord<'d>
451{
452 pub fn dispatch(&mut self, group_count_x: u32, group_count_y: u32, group_count_z: u32) -> &mut Self
454 {
455 unsafe { vkCmdDispatch(self.ptr.native_ptr(), group_count_x, group_count_y, group_count_z) };
456 return self;
457 }
458 pub fn dispatch_indirect(&mut self, buffer: &Buffer, offset: usize) -> &mut Self
460 {
461 unsafe { vkCmdDispatchIndirect(self.ptr.native_ptr(), buffer.native_ptr(), offset as _) };
462 return self;
463 }
464}
465
466#[cfg(feature = "Implements")]
468impl<'d> CmdRecord<'d>
469{
470 pub fn copy_buffer(&mut self, src: &Buffer, dst: &Buffer, regions: &[VkBufferCopy]) -> &mut Self
472 {
473 unsafe
474 {
475 vkCmdCopyBuffer(self.ptr.native_ptr(), src.native_ptr(),
476 dst.native_ptr(), regions.len() as _, regions.as_ptr())
477 };
478 return self;
479 }
480 pub fn copy_image(&mut self, src: &Image, src_layout: ImageLayout,
482 dst: &Image, dst_layout: ImageLayout, regions: &[VkImageCopy]) -> &mut Self
483 {
484 unsafe
485 {
486 vkCmdCopyImage(self.ptr.native_ptr(), src.native_ptr(), src_layout as _,
487 dst.native_ptr(), dst_layout as _, regions.len() as _, regions.as_ptr())
488 };
489 return self;
490 }
491 pub fn blit_image(&mut self, src: &Image, src_layout: ImageLayout, dst: &Image, dst_layout: ImageLayout,
493 regions: &[VkImageBlit], filter: FilterMode) -> &mut Self
494 {
495 unsafe
496 {
497 vkCmdBlitImage(self.ptr.native_ptr(), src.native_ptr(), src_layout as _, dst.native_ptr(), dst_layout as _,
498 regions.len() as _, regions.as_ptr(), filter as _)
499 };
500 return self;
501 }
502 pub fn copy_buffer_to_image(&mut self, src_buffer: &Buffer, dst_image: &Image, dst_layout: ImageLayout,
504 regions: &[VkBufferImageCopy]) -> &mut Self
505 {
506 unsafe
507 {
508 vkCmdCopyBufferToImage(self.ptr.native_ptr(), src_buffer.native_ptr(),
509 dst_image.native_ptr(), dst_layout as _, regions.len() as _, regions.as_ptr())
510 };
511 return self;
512 }
513 pub fn copy_image_to_buffer(&mut self, src_image: &Image, src_layout: ImageLayout, dst_buffer: &Buffer,
515 regions: &[VkBufferImageCopy]) -> &mut Self
516 {
517 unsafe
518 {
519 vkCmdCopyImageToBuffer(self.ptr.native_ptr(), src_image.native_ptr(), src_layout as _,
520 dst_buffer.native_ptr(), regions.len() as _, regions.as_ptr())
521 };
522 return self;
523 }
524 pub fn update_buffer<T>(&mut self, dst: &Buffer, dst_offset: usize, size: usize, data: &T) -> &mut Self
526 {
527 assert!(size <= size_of::<T>(), "Updated size exceeds size of datatype");
528 unsafe
529 {
530 vkCmdUpdateBuffer(self.ptr.native_ptr(), dst.native_ptr(), dst_offset as _, size as _,
531 data as *const T as *const _)
532 };
533 return self;
534 }
535}
536
537#[cfg(feature = "Implements")]
539impl<'d> CmdRecord<'d>
540{
541 pub fn fill_buffer(&mut self, dst: &Buffer, dst_offset: usize, size: usize, data: u32) -> &mut Self
544 {
545 unsafe { vkCmdFillBuffer(self.ptr.native_ptr(), dst.native_ptr(), dst_offset as _, size as _, data) };
546 return self;
547 }
548 pub fn clear_color_image<T: ClearColorValue>(&mut self, image: &Image, layout: ImageLayout,
550 color: &T, ranges: &[VkImageSubresourceRange]) -> &mut Self
551 {
552 unsafe
553 {
554 vkCmdClearColorImage(self.ptr.native_ptr(), image.native_ptr(), layout as _,
555 color.represent(), ranges.len() as _, ranges.as_ptr())
556 };
557 return self;
558 }
559 pub fn clear_depth_stencil_image(&mut self, image: &Image, layout: ImageLayout, depth: f32, stencil: u32,
561 ranges: &[VkImageSubresourceRange]) -> &mut Self
562 {
563 unsafe
564 {
565 vkCmdClearDepthStencilImage(self.ptr.native_ptr(), image.native_ptr(),
566 layout as _, &VkClearDepthStencilValue { depth, stencil }, ranges.len() as _, ranges.as_ptr())
567 };
568 return self;
569 }
570 pub fn clear_attachments(&mut self, attachments: &[VkClearAttachment], rects: &[VkClearRect]) -> &mut Self
572 {
573 unsafe
574 {
575 vkCmdClearAttachments(self.ptr.native_ptr(), attachments.len() as _,
576 attachments.as_ptr(), rects.len() as _, rects.as_ptr())
577 };
578 return self;
579 }
580}
581
582#[cfg(feature = "Implements")]
584impl<'d> CmdRecord<'d>
585{
586 pub unsafe fn execute_commands(&mut self, buffers: &[VkCommandBuffer]) -> &mut Self
591 {
592 vkCmdExecuteCommands(self.ptr.native_ptr(), buffers.len() as _, buffers.as_ptr());
593 return self;
594 }
595}
596
597#[cfg(feature = "Implements")]
599impl<'d> CmdRecord<'d>
600{
601 pub fn resolve_image(&mut self, src: &Image, src_layout: ImageLayout, dst: &Image, dst_layout: ImageLayout,
603 regions: &[VkImageResolve]) -> &mut Self
604 {
605 unsafe
606 {
607 vkCmdResolveImage(self.ptr.native_ptr(), src.native_ptr(), src_layout as _,
608 dst.native_ptr(), dst_layout as _, regions.len() as _, regions.as_ptr())
609 };
610 return self;
611 }
612}
613
614#[cfg(feature = "Implements")]
616impl<'d> CmdRecord<'d>
617{
618 pub fn set_event(&mut self, event: &Event, stage_mask: PipelineStageFlags) -> &mut Self
620 {
621 unsafe { vkCmdSetEvent(self.ptr.native_ptr(), event.0, stage_mask.0) }; return self;
622 }
623 pub fn reset_event(&mut self, event: &Event, stage_mask: PipelineStageFlags) -> &mut Self
625 {
626 unsafe { vkCmdResetEvent(self.ptr.native_ptr(), event.0, stage_mask.0) }; return self;
627 }
628 pub fn wait_events(&mut self, events: &[&Event],
630 src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
631 memory_barriers: &[VkMemoryBarrier], buffer_memory_barriers: &[VkBufferMemoryBarrier],
632 image_memory_barriers: &[VkImageMemoryBarrier]) -> &mut Self
633 {
634 let evs = events.into_iter().map(|x| x.0).collect::<Vec<_>>();
635 unsafe
636 {
637 vkCmdWaitEvents(self.ptr.native_ptr(), evs.len() as _, evs.as_ptr(), src_stage_mask.0, dst_stage_mask.0,
638 memory_barriers.len() as _, memory_barriers.as_ptr(),
639 buffer_memory_barriers.len() as _, buffer_memory_barriers.as_ptr(),
640 image_memory_barriers.len() as _, image_memory_barriers.as_ptr())
641 };
642 return self;
643 }
644 pub fn pipeline_barrier(&mut self, src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
646 by_region: bool, memory_barriers: &[VkMemoryBarrier], buffer_memory_barriers: &[BufferMemoryBarrier],
647 image_memory_barriers: &[ImageMemoryBarrier]) -> &mut Self
648 {
649 unsafe
650 {
651 vkCmdPipelineBarrier(self.ptr.native_ptr(), src_stage_mask.0, dst_stage_mask.0,
652 if by_region { VK_DEPENDENCY_BY_REGION_BIT } else { 0 },
653 memory_barriers.len() as _, memory_barriers.as_ptr(),
654 buffer_memory_barriers.len() as _, buffer_memory_barriers.as_ptr() as _,
655 image_memory_barriers.len() as _, image_memory_barriers.as_ptr() as _)
656 };
657 return self;
658 }
659}
660
661#[cfg(feature = "Implements")]
663impl<'d> CmdRecord<'d>
664{
665 pub fn begin_query(&mut self, pool: &QueryPool, query: u32, precise_query: bool) -> &mut Self
667 {
668 unsafe
669 {
670 vkCmdBeginQuery(self.ptr.native_ptr(), pool.0, query,
671 if precise_query { VK_QUERY_CONTROL_PRECISE_BIT } else { 0 })
672 };
673 return self;
674 }
675 pub fn end_query(&mut self, pool: &QueryPool, query: u32) -> &mut Self
677 {
678 unsafe { vkCmdEndQuery(self.ptr.native_ptr(), pool.0, query) }; return self;
679 }
680 pub fn reset_query_pool(&mut self, pool: &QueryPool, range: Range<u32>) -> &mut Self
682 {
683 unsafe { vkCmdResetQueryPool(self.ptr.native_ptr(), pool.0, range.start, range.end - range.start) };
684 return self;
685 }
686 pub fn write_timestamp(&mut self, stage: PipelineStageFlags, pool: &QueryPool, query: u32) -> &mut Self
688 {
689 unsafe { vkCmdWriteTimestamp(self.ptr.native_ptr(), stage.0, pool.0, query) }; return self;
690 }
691 pub fn copy_query_pool_results(&mut self, pool: &QueryPool, range: Range<u32>, dst: &Buffer, dst_offset: usize,
693 stride: usize, wide_result: bool, flags: QueryResultFlags) -> &mut Self
694 {
695 unsafe
696 {
697 vkCmdCopyQueryPoolResults(self.ptr.native_ptr(), pool.0, range.start, range.end - range.start,
698 dst.native_ptr(), dst_offset as _, stride as _,
699 flags.0 | if wide_result { VK_QUERY_RESULT_64_BIT } else { 0 })
700 };
701 return self;
702 }
703}
704
705#[cfg(feature = "Implements")]
707impl<'d> CmdRecord<'d>
708{
709 pub fn begin_render_pass(&mut self, pass: &RenderPass, framebuffer: &Framebuffer, render_area: VkRect2D,
711 clear_values: &[ClearValue], inline_commands: bool) -> &mut Self
712 {
713 let cvalues = clear_values.into_iter().map(|x| match x
714 {
715 &ClearValue::Color(ref color) => VkClearValue { color: VkClearColorValue { float32: color.clone() } },
716 &ClearValue::DepthStencil(depth, stencil) =>
717 VkClearValue { depthStencil: VkClearDepthStencilValue { depth, stencil } }
718 }).collect::<Vec<_>>();
719 let binfo = VkRenderPassBeginInfo
720 {
721 renderPass: pass.native_ptr(), framebuffer: framebuffer.native_ptr(), renderArea: render_area,
722 clearValueCount: cvalues.len() as _, pClearValues: cvalues.as_ptr(), .. Default::default()
723 };
724 unsafe
725 {
726 vkCmdBeginRenderPass(self.ptr.native_ptr(), &binfo,
727 if inline_commands { VK_SUBPASS_CONTENTS_INLINE } else { VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS })
728 };
729 return self;
730 }
731 pub fn next_subpass(&mut self, inline_commands: bool) -> &mut Self
733 {
734 unsafe
735 {
736 vkCmdNextSubpass(self.ptr.native_ptr(),
737 if inline_commands { VK_SUBPASS_CONTENTS_INLINE } else { VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS })
738 };
739 return self;
740 }
741 pub fn end_render_pass(&mut self) -> &mut Self { unsafe { vkCmdEndRenderPass(self.ptr.native_ptr()) }; return self; }
743}
744
745pub trait ClearColorValue
747{
748 fn represent(&self) -> &VkClearColorValue;
749}
750impl ClearColorValue for [f32; 4] { fn represent(&self) -> &VkClearColorValue { unsafe { ::std::mem::transmute(self) } } }
751impl ClearColorValue for [i32; 4] { fn represent(&self) -> &VkClearColorValue { unsafe { ::std::mem::transmute(self) } } }
752impl ClearColorValue for [u32; 4] { fn represent(&self) -> &VkClearColorValue { unsafe { ::std::mem::transmute(self) } } }
753
754pub enum ClearValue
756{
757 Color([f32; 4]),
759 DepthStencil(f32, u32)
761}
762
763#[repr(C)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
765pub enum IndexType
766{
767 U16 = VK_INDEX_TYPE_UINT16 as _,
769 U32 = VK_INDEX_TYPE_UINT32 as _
771}
772
773#[derive(Debug, Clone, Copy, PartialEq, Eq)]
775pub enum OcclusionQuery
776{
777 Disable, Enable,
778 Precise
780}
781
782pub struct AccessFlags { pub read: VkAccessFlags, pub write: VkAccessFlags }
784impl AccessFlags
785{
786 pub const INDIRECT_COMMAND_READ: VkAccessFlags = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
788 pub const INDEX_READ: VkAccessFlags = VK_ACCESS_INDEX_READ_BIT;
790 pub const VERTEX_ATTRIBUTE_READ: VkAccessFlags = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
792 pub const UNIFORM_READ: VkAccessFlags = VK_ACCESS_UNIFORM_READ_BIT;
794 pub const INPUT_ATTACHMENT_READ: VkAccessFlags = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
796 pub const SHADER: Self = AccessFlags { read: VK_ACCESS_SHADER_READ_BIT, write: VK_ACCESS_SHADER_WRITE_BIT };
802 pub const COLOR_ATTACHMENT: Self = AccessFlags
810 {
811 read: VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, write: VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
812 };
813 pub const DEPTH_STENCIL_ATTACHMENT: Self = AccessFlags
820 {
821 read: VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, write: VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
822 };
823 pub const TRANSFER: Self = AccessFlags { read: VK_ACCESS_TRANSFER_READ_BIT, write: VK_ACCESS_TRANSFER_WRITE_BIT };
826 pub const HOST: Self = AccessFlags { read: VK_ACCESS_HOST_READ_BIT, write: VK_ACCESS_HOST_WRITE_BIT };
829 pub const MEMORY: Self = AccessFlags { read: VK_ACCESS_MEMORY_READ_BIT, write: VK_ACCESS_MEMORY_WRITE_BIT };
838}
839
840use std::mem::replace;
841#[derive(Clone)]
843pub struct ImageSubref<'d>(pub &'d Image, pub VkImageSubresourceRange);
844impl<'d> ImageSubref<'d>
845{
846 pub fn color<Levels, Layers>(image: &'d Image, mip_levels: Levels, array_layers: Layers) -> Self
848 where Levels: ::AnalogNumRange<u32>, Layers: ::AnalogNumRange<u32>
849 {
850 ImageSubref(image, VkImageSubresourceRange
851 {
852 aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
853 baseMipLevel: mip_levels.begin(), baseArrayLayer: array_layers.begin(),
854 levelCount: mip_levels.count(), layerCount: array_layers.count()
855 })
856 }
857 pub fn stencil<Levels, Layers>(image: &'d Image, mip_levels: Levels, array_layers: Layers) -> Self
859 where Levels: ::AnalogNumRange<u32>, Layers: ::AnalogNumRange<u32>
860 {
861 ImageSubref(image, VkImageSubresourceRange
862 {
863 aspectMask: VK_IMAGE_ASPECT_STENCIL_BIT,
864 baseMipLevel: mip_levels.begin(), baseArrayLayer: array_layers.begin(),
865 levelCount: mip_levels.count(), layerCount: array_layers.count()
866 })
867 }
868}
869
870#[derive(Clone)]
872pub struct ImageMemoryBarrier(VkImageMemoryBarrier);
873impl ImageMemoryBarrier
874{
875 pub fn new(img: &ImageSubref, old_layout: ImageLayout, new_layout: ImageLayout) -> Self
877 {
878 ImageMemoryBarrier(VkImageMemoryBarrier
879 {
880 image: img.0.native_ptr(), subresourceRange: img.1.clone(),
881 oldLayout: old_layout as _, newLayout: new_layout as _,
882 srcAccessMask: old_layout.default_access_mask(),
883 dstAccessMask: new_layout.default_access_mask(), .. Default::default()
884 })
885 }
886 pub fn new_raw<SR>(res: &Image, subres: &SR, old: ImageLayout, new: ImageLayout) -> Self
888 where SR: Borrow<VkImageSubresourceRange>
889 {
890 ImageMemoryBarrier(VkImageMemoryBarrier
891 {
892 image: res.native_ptr(), subresourceRange: subres.borrow().clone(),
893 oldLayout: old as _, newLayout: new as _,
894 srcAccessMask: old.default_access_mask(), dstAccessMask: new.default_access_mask(), .. Default::default()
895 })
896 }
897 pub fn src_access_mask(mut self, mask: VkAccessFlags) -> Self
899 {
900 self.0.srcAccessMask = mask; return self;
901 }
902 pub fn dest_access_mask(mut self, mask: VkAccessFlags) -> Self
904 {
905 self.0.dstAccessMask = mask; return self;
906 }
907 pub fn flip(mut self) -> Self
909 {
910 self.0.dstAccessMask = replace(&mut self.0.srcAccessMask, self.0.dstAccessMask);
911 self.0.newLayout = replace(&mut self.0.oldLayout, self.0.newLayout);
912 return self;
913 }
914}
915#[derive(Clone)]
917pub struct BufferMemoryBarrier(VkBufferMemoryBarrier);
918impl BufferMemoryBarrier
919{
920 pub fn new(buf: &Buffer, range: Range<usize>, src_access_mask: VkAccessFlags, dst_access_mask: VkAccessFlags)
922 -> Self
923 {
924 BufferMemoryBarrier(VkBufferMemoryBarrier
925 {
926 buffer: buf.native_ptr(), offset: range.start as _, size: (range.end - range.start) as _,
927 srcAccessMask: src_access_mask, dstAccessMask: dst_access_mask, .. Default::default()
928 })
929 }
930 pub fn src_access_mask(mut self, mask: VkAccessFlags) -> Self
932 {
933 self.0.srcAccessMask = mask; return self;
934 }
935 pub fn dest_access_mask(mut self, mask: VkAccessFlags) -> Self
937 {
938 self.0.dstAccessMask = mask; return self;
939 }
940 pub fn flip(mut self) -> Self
942 {
943 self.0.dstAccessMask = replace(&mut self.0.srcAccessMask, self.0.dstAccessMask);
944 return self;
945 }
946}