1use std::sync::Arc;
2use smallvec::SmallVec;
3use vks;
4use ::{VdResult, Device, Handle, CommandPool, CommandBufferUsageFlags, CommandBufferBeginInfo,
5    DeviceSize, PipelineStageFlags, DependencyFlags, MemoryBarrier, BufferMemoryBarrier,
6    ImageMemoryBarrier, BufferImageCopy, ImageLayout, BufferCopy, CommandBufferResetFlags,
7    PipelineBindPoint, Viewport, Rect2d, StencilFaceFlags, DebugMarkerMarkerInfoExt,
8    DescriptorSetHandle, QueryResultFlags, ShaderStageFlags, RenderPassBeginInfo, SubpassContents,
9    ImageCopy, IndexType, ImageBlit, Filter, ClearColorValue, ImageSubresourceRange,
10    ClearDepthStencilValue, ClearAttachment, ImageResolve, QueryControlFlags, ClearRect,
11    BufferHandle, EventHandle,Buffer, Image, Event, QueryPool, PipelineLayout, DescriptorSet,
12    PipelineHandle};
13
14
15#[derive(Clone, Copy, Debug, Eq, PartialEq)]
16#[repr(C)]
17pub struct CommandBufferHandle(pub(crate) vks::VkCommandBuffer);
18
19impl CommandBufferHandle {
20    pub fn to_raw(&self) -> vks::VkCommandBuffer {
21        self.0
22    }
23}
24
25unsafe impl Handle for CommandBufferHandle {
26    type Target = CommandBufferHandle;
27
28    #[inline]
29    fn handle(&self) -> Self::Target {
30        *self
31    }
32}
33
34#[derive(Debug)]
35struct Inner {
36    handle: CommandBufferHandle,
37    command_pool: CommandPool,
38}
39
40impl Drop for Inner {
41    fn drop(&mut self) {
42        unsafe {
43            self.command_pool.device().free_command_buffers(self.command_pool.handle(),
44                &[self.handle]);
45        }
46    }
47}
48
49
50#[derive(Debug, Clone)]
59pub struct CommandBuffer {
60    inner: Arc<Inner>,
61}
62
63impl CommandBuffer {
64    pub(crate) fn from_parts(command_pool: CommandPool, handle: CommandBufferHandle)
66            -> VdResult<CommandBuffer> {
67        Ok(CommandBuffer {
68            inner: Arc::new(Inner {
69                command_pool,
70                handle,
71            })
72        })
73    }
74
75    #[inline]
77    pub fn handle(&self) -> CommandBufferHandle {
78        self.inner.handle
79    }
80
81    #[inline]
83    pub fn device(&self) -> &Device {
84        self.inner.command_pool.device()
85    }
86
87    #[inline]
92    pub fn begin(&self, flags: CommandBufferUsageFlags) -> VdResult<()> {
93        let begin_info = CommandBufferBeginInfo::builder()
94            .flags(flags)
95            .build();
96
97        unsafe {
98            self.inner.command_pool.device().begin_command_buffer(self.inner.handle, &begin_info)
99        }
100    }
101
102    #[inline]
107    pub fn end(&self) -> VdResult<()> {
108        unsafe {
109            self.inner.command_pool.device().end_command_buffer(self.inner.handle)
110        }
111    }
112
113    #[inline]
118    pub fn reset(&self, flags: CommandBufferResetFlags) -> VdResult<()> {
119        unsafe { self.device().cmd_reset_command_buffer(self.handle(), flags) }
120    }
121
122    #[inline]
127    pub fn bind_pipeline<P>(&self, pipeline_bind_point: PipelineBindPoint,
128            pipeline: &P) where P: Handle<Target=PipelineHandle> {
129        unsafe { self.device().cmd_bind_pipeline(self.handle(), pipeline_bind_point,
130            pipeline.handle()); }
131    }
132
133    #[inline]
138    pub fn set_viewport(&self, first_viewport: u32, viewports: &[Viewport]) {
139        unsafe { self.device().cmd_set_viewport(self.handle(), first_viewport, viewports); }
140    }
141
142    #[inline]
147    pub fn set_scissor(&self, first_scissor: u32, scissors: &[Rect2d]) {
148        unsafe { self.device().cmd_set_scissor(self.handle(), first_scissor, scissors); }
149    }
150
151    #[inline]
156    pub fn set_line_width(&self, line_width: f32) {
157        unsafe { self.device().cmd_set_line_width(self.handle(), line_width); }
158    }
159
160    #[inline]
165    pub fn set_depth_bias(&self, depth_bias_constant_factor: f32, depth_bias_clamp: f32,
166            depth_bias_slope_factor: f32) {
167        unsafe { self.device().cmd_set_depth_bias(self.handle(),
168            depth_bias_constant_factor, depth_bias_clamp, depth_bias_slope_factor); }
169    }
170
171    #[inline]
176    pub fn set_blend_constants(&self, blend_constants: [f32; 4]) {
177        unsafe { self.device().cmd_set_blend_constants(self.handle(), blend_constants); }
178    }
179
180    pub fn set_depth_bounds(&self, min_depth_bounds: f32, max_depth_bounds: f32) {
185        unsafe { self.device().cmd_set_depth_bounds(self.handle(), min_depth_bounds, max_depth_bounds); }
186    }
187
188    #[inline]
193    pub fn set_stencil_compare_mask(&self, face_mask: StencilFaceFlags, compare_mask: u32) {
194        unsafe { self.device().cmd_set_stencil_compare_mask(self.handle(), face_mask, compare_mask); }
195    }
196
197    #[inline]
202    pub fn set_stencil_write_mask(&self, face_mask: StencilFaceFlags, write_mask: u32) {
203        unsafe { self.device().cmd_set_stencil_write_mask(self.handle(), face_mask, write_mask); }
204    }
205
206    #[inline]
211    pub fn set_stencil_reference(&self, face_mask: StencilFaceFlags, reference: u32) {
212        unsafe { self.device().cmd_set_stencil_reference(self.handle(), face_mask, reference); }
213    }
214
215    #[inline]
220    pub fn bind_descriptor_sets(&self, pipeline_bind_point: PipelineBindPoint,
221        layout: &PipelineLayout, first_set: u32, descriptor_sets: &[&DescriptorSet],
222            dynamic_offsets: &[u32]) {
223        let ds_handles: SmallVec<[DescriptorSetHandle; 16]> = descriptor_sets.iter()
224            .map(|ds| ds.handle()).collect();
225        unsafe {
226            self.device().cmd_bind_descriptor_sets(self.handle(), pipeline_bind_point,
227                layout.handle(), first_set, &ds_handles, dynamic_offsets);
228        }
229    }
230
231    #[inline]
236    pub fn bind_index_buffer(&self, buffer: &Buffer, offset: u64, index_type: IndexType) {
237        unsafe { self.device().cmd_bind_index_buffer(self.handle(), buffer.handle(),
238            offset, index_type); }
239    }
240
241    #[inline]
246    pub fn bind_vertex_buffers(&self, first_binding: u32, buffers: &[&Buffer], offsets: &[u64]) {
247        let buffer_handles: SmallVec<[BufferHandle; 16]> = buffers.iter()
248            .map(|b| b.handle()).collect();
249        unsafe { self.device().cmd_bind_vertex_buffers(self.handle(),
250            first_binding, &buffer_handles, offsets); }
251    }
252
253    #[inline]
258    pub fn draw(&self, vertex_count: u32, instance_count: u32, first_vertex: u32,
259            first_instance: u32) {
260        unsafe { self.device().cmd_draw(self.handle(), vertex_count, instance_count,
261            first_vertex, first_instance); }
262    }
263
264    #[inline]
269    pub fn draw_indexed(&self, index_count: u32, instance_count: u32, first_index: u32,
270            vertex_offset: i32, first_instance: u32) {
271        unsafe { self.device().cmd_draw_indexed(self.handle(), index_count,
272            instance_count, first_index, vertex_offset, first_instance); }
273    }
274
275    #[inline]
280    pub unsafe fn draw_indirect(&self, buffer: &Buffer, offset: u64, draw_count: u32,
281            stride: u32) {
282        self.device().cmd_draw_indirect(self.handle(),
283            buffer.handle(), offset, draw_count, stride);
284    }
285
286    #[inline]
291    pub unsafe fn draw_indexed_indirect(&self, buffer: &Buffer, offset: u64, draw_count: u32,
292            stride: u32) {
293        self.device().cmd_draw_indexed_indirect(self.handle(),
294            buffer.handle(), offset, draw_count, stride);
295    }
296
297    pub fn dispatch(&self, group_count_x: u32, group_count_y: u32, group_count_z: u32) {
302        unsafe {
303            self.device().cmd_dispatch(self.handle(), group_count_x, group_count_y, group_count_z);
304        }
305    }
306
307    #[inline]
312    pub unsafe fn dispatch_indirect(&self, buffer: &Buffer, offset: u64) {
313            self.device().cmd_dispatch_indirect(self.handle(), buffer.handle(), offset);
314    }
315
316    #[inline]
321    pub unsafe fn copy_buffer(&self, src_buffer: &Buffer, dst_buffer: &Buffer,
322            regions: &[BufferCopy]) {
323        self.device().cmd_copy_buffer(self.handle(), src_buffer.handle(),
324            dst_buffer.handle(), regions);
325    }
326
327    #[inline]
332    pub unsafe fn copy_image(&self, src_image: &Image, src_image_layout: ImageLayout,
333            dst_image: &Image, dst_image_layout: ImageLayout, regions: &[ImageCopy]) {
334        self.device().cmd_copy_image(self.handle(),
335            src_image.handle(), src_image_layout, dst_image.handle(), dst_image_layout, regions);
336    }
337
338    #[inline]
343    pub unsafe fn blit_image(&self, src_image: &Image, src_image_layout: ImageLayout,
344            dst_image: &Image, dst_image_layout: ImageLayout, regions: &[ImageBlit],
345            filter: Filter) {
346        self.device().cmd_blit_image(self.handle(), src_image.handle(), src_image_layout,
347            dst_image.handle(), dst_image_layout, regions, filter);
348    }
349
350    #[inline]
355    pub unsafe fn copy_buffer_to_image(&self, src_buffer: &Buffer, dst_image: &Image,
356            dst_image_layout: ImageLayout, regions: &[BufferImageCopy]) {
357        self.device().cmd_copy_buffer_to_image(self.handle(), src_buffer.handle(),
358            dst_image.handle(), dst_image_layout, regions, );
359    }
360
361    #[inline]
366    pub unsafe fn copy_image_to_buffer(&self, src_image: &Image, src_image_layout: ImageLayout,
367            dst_buffer: &Buffer, regions: &[BufferImageCopy]) {
368        self.device().cmd_copy_image_to_buffer(self.handle(),
369            src_image.handle(), src_image_layout, dst_buffer.handle(), regions);
370    }
371
372    #[inline]
377    pub unsafe fn update_buffer(&self, dst_buffer: &Buffer, dst_offset: u64, data: &[u8]) {
378        self.device().cmd_update_buffer(self.handle(),
379            dst_buffer.handle(), dst_offset, data);
380    }
381
382    #[inline]
387    pub unsafe fn fill_buffer(&self, dst_buffer: &Buffer, dst_offset: u64,
388            size: Option<DeviceSize>, data: u32) {
389        self.device().cmd_fill_buffer(self.handle(),
390            dst_buffer.handle(), dst_offset, size, data);
391    }
392
393    #[inline]
398    pub unsafe fn clear_color_image(&self, image: &Image, image_layout: ImageLayout,
399            color: &ClearColorValue, ranges: &[ImageSubresourceRange]) {
400        self.device().cmd_clear_color_image(self.handle(),
401            image.handle(), image_layout, color, ranges);
402    }
403
404    #[inline]
409    pub unsafe fn clear_depth_stencil_image(&self, image: &Image, image_layout: ImageLayout,
410            depth_stencil: &ClearDepthStencilValue, ranges: &[ImageSubresourceRange]) {
411        self.device().cmd_clear_depth_stencil_image(self.handle(),
412            image.handle(), image_layout, depth_stencil, ranges);
413    }
414
415    #[inline]
420    pub fn clear_attachments(&self, attachments: &[ClearAttachment], rects: &[ClearRect]) {
421        unsafe { self.device().cmd_clear_attachments(self.handle(), attachments, rects); }
422    }
423
424    #[inline]
429    pub unsafe fn resolve_image(&self, src_image: &Image, src_image_layout: ImageLayout, dst_image: &Image,
430            dst_image_layout: ImageLayout, regions: &[ImageResolve]) {
431        self.device().cmd_resolve_image(self.handle(), src_image.handle(), src_image_layout,
432            dst_image.handle(), dst_image_layout, regions);
433    }
434
435    #[inline]
440    pub fn set_event(&self, event: &Event, stage_mask: PipelineStageFlags) {
441        unsafe { self.device().cmd_set_event(self.handle(),
442            event.handle(), stage_mask); }
443    }
444
445    #[inline]
450    pub fn reset_event(&self, event: &Event, stage_mask: PipelineStageFlags) {
451        unsafe { self.device().cmd_reset_event(self.handle(), event.handle(), stage_mask); }
452    }
453
454    #[inline]
459    pub fn wait_events(&self, events: &[&Event],
460            src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
461            memory_barriers: &[MemoryBarrier],
462            buffer_memory_barriers: &[BufferMemoryBarrier],
463            image_memory_barriers: &[ImageMemoryBarrier]) {
464        let event_handles: SmallVec<[EventHandle; 16]> = events.iter()
465            .map(|e| e.handle()).collect();
466        unsafe { self.device().cmd_wait_events(self.handle(), &event_handles, src_stage_mask,
467            dst_stage_mask, memory_barriers, buffer_memory_barriers, image_memory_barriers); }
468    }
469
470    #[inline]
475    pub fn pipeline_barrier(&self, src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
476            dependency_flags: DependencyFlags, memory_barriers: &[MemoryBarrier],
477            buffer_memory_barriers: &[BufferMemoryBarrier],
478            image_memory_barriers: &[ImageMemoryBarrier]) {
479        unsafe {
480            self.device().cmd_pipeline_barrier(self.handle(), src_stage_mask,
481            dst_stage_mask, dependency_flags, memory_barriers, buffer_memory_barriers,
482            image_memory_barriers);
483        }
484    }
485
486    #[inline]
491    pub fn begin_query(&self, query_pool: &QueryPool, query: u32, flags: QueryControlFlags) {
492        unsafe { self.device().cmd_begin_query(self.handle(), query_pool.handle(), query, flags); }
493    }
494
495    #[inline]
500    pub fn end_query(&self, query_pool: &QueryPool, query: u32) {
501        unsafe { self.device().cmd_end_query(self.handle(), query_pool.handle(), query); }
502    }
503
504    #[inline]
509    pub fn reset_query_pool(&self, query_pool: &QueryPool, first_query: u32, query_count: u32) {
510        unsafe { self.device().cmd_reset_query_pool(self.handle(),
511            query_pool.handle(), first_query, query_count); }
512    }
513
514    #[inline]
519    pub fn write_timestamp(&self, pipeline_stage: PipelineStageFlags, query_pool: &QueryPool, query: u32) {
520        unsafe { self.device().cmd_write_timestamp(self.handle(),
521            pipeline_stage, query_pool.handle(), query); }
522    }
523
524    #[inline]
529    pub unsafe fn copy_query_pool_results(&self, query_pool: &QueryPool, first_query: u32, query_count: u32,
530            dst_buffer: &Buffer, dst_offset: u64, stride: u64, flags: QueryResultFlags) {
531        self.device().cmd_copy_query_pool_results(self.handle(), query_pool.handle(),
532            first_query, query_count, dst_buffer.handle(), dst_offset, stride, flags);
533    }
534
535    #[inline]
540    pub fn push_constants(&self, layout: &PipelineLayout, stage_flags: ShaderStageFlags, offset: u32,
541            values: &[u8]) {
542        unsafe { self.device().cmd_push_constants(self.handle(), layout.handle(),
543            stage_flags, offset, values); }
544    }
545
546    #[inline]
551    pub fn begin_render_pass(&self, render_pass_begin: &RenderPassBeginInfo, contents: SubpassContents) {
552        unsafe { self.device().cmd_begin_render_pass(self.handle(),
553            render_pass_begin, contents); }
554    }
555
556    #[inline]
561    pub fn next_subpass(&self, contents: SubpassContents) {
562        unsafe { self.device().cmd_next_subpass(self.handle(), contents); }
563    }
564
565    #[inline]
570    pub fn end_render_pass(&self) {
571        unsafe { self.device().cmd_end_render_pass(self.handle()); }
572    }
573
574    #[inline]
579    pub fn execute_commands(&self, command_buffers: &[&CommandBuffer]) {
580        let command_buffer_handles: SmallVec<[CommandBufferHandle; 16]> = command_buffers.iter()
581            .map(|cb| cb.handle()).collect();
582        unsafe { self.device().cmd_execute_commands(self.handle(), &command_buffer_handles); }
583    }
584
585    #[inline]
587    pub fn debug_marker_begin_ext(&self, marker_info: &DebugMarkerMarkerInfoExt) {
588        unsafe { self.device().cmd_debug_marker_begin_ext(self.handle(), marker_info); }
589    }
590
591    #[inline]
593    pub fn debug_marker_end_ext(&self) {
594        unsafe { self.device().cmd_debug_marker_end_ext(self.handle()); }
595    }
596
597    #[inline]
599    pub fn debug_marker_insert_ext(&self, marker_info: &DebugMarkerMarkerInfoExt) {
600        unsafe { self.device().cmd_debug_marker_insert_ext(self.handle(), marker_info); }
601    }
602
603}
604
605unsafe impl<'h> Handle for &'h CommandBuffer {
606    type Target = CommandBufferHandle;
607
608    #[inline]
609    fn handle(&self) -> Self::Target {
610        self.inner.handle
611    }
612}