1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::{extensions::ext, vk};
5
6use std::{mem, ops::Range, slice};
7
8const ALLOCATION_GRANULARITY: u32 = 16;
9const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
10
11impl super::Texture {
12 fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
13 where
14 T: Iterator<Item = crate::BufferTextureCopy>,
15 {
16 let (block_width, block_height) = self.format.block_dimensions();
17 let format = self.format;
18 let copy_size = self.copy_size;
19 regions.map(move |r| {
20 let extent = r.texture_base.max_copy_size(©_size).min(&r.size);
21 let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
22 vk::BufferImageCopy {
23 buffer_offset: r.buffer_layout.offset,
24 buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
25 let block_size = format
26 .block_size(Some(r.texture_base.aspect.map()))
27 .unwrap();
28 block_width * (bpr / block_size)
29 }),
30 buffer_image_height: r
31 .buffer_layout
32 .rows_per_image
33 .map_or(0, |rpi| rpi * block_height),
34 image_subresource,
35 image_offset,
36 image_extent: conv::map_copy_extent(&extent),
37 }
38 })
39 }
40}
41
42impl super::DeviceShared {
43 fn debug_messenger(&self) -> Option<&ext::DebugUtils> {
44 Some(&self.instance.debug_utils.as_ref()?.extension)
45 }
46}
47
48impl super::CommandEncoder {
49 fn write_pass_end_timestamp_if_requested(&mut self) {
50 if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
51 unsafe {
52 self.device.raw.cmd_write_timestamp(
53 self.active,
54 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
55 query_set,
56 index,
57 );
58 }
59 }
60 }
61}
62
63impl crate::CommandEncoder<super::Api> for super::CommandEncoder {
64 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
65 if self.free.is_empty() {
66 let vk_info = vk::CommandBufferAllocateInfo::builder()
67 .command_pool(self.raw)
68 .command_buffer_count(ALLOCATION_GRANULARITY)
69 .build();
70 let cmd_buf_vec = unsafe { self.device.raw.allocate_command_buffers(&vk_info)? };
71 self.free.extend(cmd_buf_vec);
72 }
73 let raw = self.free.pop().unwrap();
74
75 unsafe {
78 self.device.set_object_name(
79 vk::ObjectType::COMMAND_BUFFER,
80 raw,
81 label.unwrap_or_default(),
82 )
83 };
84
85 self.rpass_debug_marker_active = false;
87
88 let vk_info = vk::CommandBufferBeginInfo::builder()
89 .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
90 .build();
91 unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }?;
92 self.active = raw;
93
94 Ok(())
95 }
96
97 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
98 let raw = self.active;
99 self.active = vk::CommandBuffer::null();
100 unsafe { self.device.raw.end_command_buffer(raw) }?;
101 Ok(super::CommandBuffer { raw })
102 }
103
104 unsafe fn discard_encoding(&mut self) {
105 self.discarded.push(self.active);
106 self.active = vk::CommandBuffer::null();
107 }
108
109 unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
110 where
111 I: Iterator<Item = super::CommandBuffer>,
112 {
113 self.temp.clear();
114 self.free
115 .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
116 self.free.append(&mut self.discarded);
117 let _ = unsafe {
118 self.device
119 .raw
120 .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
121 };
122 }
123
124 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
125 where
126 T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
127 {
128 let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
130 let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
131 let vk_barriers = &mut self.temp.buffer_barriers;
132 vk_barriers.clear();
133
134 for bar in barriers {
135 let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.start);
136 src_stages |= src_stage;
137 let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.end);
138 dst_stages |= dst_stage;
139
140 vk_barriers.push(
141 vk::BufferMemoryBarrier::builder()
142 .buffer(bar.buffer.raw)
143 .size(vk::WHOLE_SIZE)
144 .src_access_mask(src_access)
145 .dst_access_mask(dst_access)
146 .build(),
147 )
148 }
149
150 if !vk_barriers.is_empty() {
151 unsafe {
152 self.device.raw.cmd_pipeline_barrier(
153 self.active,
154 src_stages,
155 dst_stages,
156 vk::DependencyFlags::empty(),
157 &[],
158 vk_barriers,
159 &[],
160 )
161 };
162 }
163 }
164
165 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
166 where
167 T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
168 {
169 let mut src_stages = vk::PipelineStageFlags::empty();
170 let mut dst_stages = vk::PipelineStageFlags::empty();
171 let vk_barriers = &mut self.temp.image_barriers;
172 vk_barriers.clear();
173
174 for bar in barriers {
175 let range = conv::map_subresource_range_combined_aspect(
176 &bar.range,
177 bar.texture.format,
178 &self.device.private_caps,
179 );
180 let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.start);
181 let src_layout = conv::derive_image_layout(bar.usage.start, bar.texture.format);
182 src_stages |= src_stage;
183 let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.end);
184 let dst_layout = conv::derive_image_layout(bar.usage.end, bar.texture.format);
185 dst_stages |= dst_stage;
186
187 vk_barriers.push(
188 vk::ImageMemoryBarrier::builder()
189 .image(bar.texture.raw)
190 .subresource_range(range)
191 .src_access_mask(src_access)
192 .dst_access_mask(dst_access)
193 .old_layout(src_layout)
194 .new_layout(dst_layout)
195 .build(),
196 );
197 }
198
199 if !vk_barriers.is_empty() {
200 unsafe {
201 self.device.raw.cmd_pipeline_barrier(
202 self.active,
203 src_stages,
204 dst_stages,
205 vk::DependencyFlags::empty(),
206 &[],
207 &[],
208 vk_barriers,
209 )
210 };
211 }
212 }
213
214 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
215 let range_size = range.end - range.start;
216 if self.device.workarounds.contains(
217 super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
218 ) && range_size >= 4096
219 && range.start % 16 != 0
220 {
221 let rounded_start = wgt::math::align_to(range.start, 16);
222 let prefix_size = rounded_start - range.start;
223
224 unsafe {
225 self.device.raw.cmd_fill_buffer(
226 self.active,
227 buffer.raw,
228 range.start,
229 prefix_size,
230 0,
231 )
232 };
233
234 let suffix_size = range.end - rounded_start;
236
237 unsafe {
238 self.device.raw.cmd_fill_buffer(
239 self.active,
240 buffer.raw,
241 rounded_start,
242 suffix_size,
243 0,
244 )
245 };
246 } else {
247 unsafe {
248 self.device
249 .raw
250 .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
251 };
252 }
253 }
254
255 unsafe fn copy_buffer_to_buffer<T>(
256 &mut self,
257 src: &super::Buffer,
258 dst: &super::Buffer,
259 regions: T,
260 ) where
261 T: Iterator<Item = crate::BufferCopy>,
262 {
263 let vk_regions_iter = regions.map(|r| vk::BufferCopy {
264 src_offset: r.src_offset,
265 dst_offset: r.dst_offset,
266 size: r.size.get(),
267 });
268
269 unsafe {
270 self.device.raw.cmd_copy_buffer(
271 self.active,
272 src.raw,
273 dst.raw,
274 &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
275 )
276 };
277 }
278
279 unsafe fn copy_texture_to_texture<T>(
280 &mut self,
281 src: &super::Texture,
282 src_usage: crate::TextureUses,
283 dst: &super::Texture,
284 regions: T,
285 ) where
286 T: Iterator<Item = crate::TextureCopy>,
287 {
288 let src_layout = conv::derive_image_layout(src_usage, src.format);
289
290 let vk_regions_iter = regions.map(|r| {
291 let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
292 let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
293 let extent = r
294 .size
295 .min(&r.src_base.max_copy_size(&src.copy_size))
296 .min(&r.dst_base.max_copy_size(&dst.copy_size));
297 vk::ImageCopy {
298 src_subresource,
299 src_offset,
300 dst_subresource,
301 dst_offset,
302 extent: conv::map_copy_extent(&extent),
303 }
304 });
305
306 unsafe {
307 self.device.raw.cmd_copy_image(
308 self.active,
309 src.raw,
310 src_layout,
311 dst.raw,
312 DST_IMAGE_LAYOUT,
313 &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
314 )
315 };
316 }
317
318 unsafe fn copy_buffer_to_texture<T>(
319 &mut self,
320 src: &super::Buffer,
321 dst: &super::Texture,
322 regions: T,
323 ) where
324 T: Iterator<Item = crate::BufferTextureCopy>,
325 {
326 let vk_regions_iter = dst.map_buffer_copies(regions);
327
328 unsafe {
329 self.device.raw.cmd_copy_buffer_to_image(
330 self.active,
331 src.raw,
332 dst.raw,
333 DST_IMAGE_LAYOUT,
334 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
335 )
336 };
337 }
338
339 unsafe fn copy_texture_to_buffer<T>(
340 &mut self,
341 src: &super::Texture,
342 src_usage: crate::TextureUses,
343 dst: &super::Buffer,
344 regions: T,
345 ) where
346 T: Iterator<Item = crate::BufferTextureCopy>,
347 {
348 let src_layout = conv::derive_image_layout(src_usage, src.format);
349 let vk_regions_iter = src.map_buffer_copies(regions);
350
351 unsafe {
352 self.device.raw.cmd_copy_image_to_buffer(
353 self.active,
354 src.raw,
355 src_layout,
356 dst.raw,
357 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
358 )
359 };
360 }
361
362 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
363 unsafe {
364 self.device.raw.cmd_begin_query(
365 self.active,
366 set.raw,
367 index,
368 vk::QueryControlFlags::empty(),
369 )
370 };
371 }
372 unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
373 unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
374 }
375 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
376 unsafe {
377 self.device.raw.cmd_write_timestamp(
378 self.active,
379 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
380 set.raw,
381 index,
382 )
383 };
384 }
385 unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
386 unsafe {
387 self.device.raw.cmd_reset_query_pool(
388 self.active,
389 set.raw,
390 range.start,
391 range.end - range.start,
392 )
393 };
394 }
395 unsafe fn copy_query_results(
396 &mut self,
397 set: &super::QuerySet,
398 range: Range<u32>,
399 buffer: &super::Buffer,
400 offset: wgt::BufferAddress,
401 stride: wgt::BufferSize,
402 ) {
403 unsafe {
404 self.device.raw.cmd_copy_query_pool_results(
405 self.active,
406 set.raw,
407 range.start,
408 range.end - range.start,
409 buffer.raw,
410 offset,
411 stride.get(),
412 vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
413 )
414 };
415 }
416
417 unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
420 let mut vk_clear_values =
421 ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
422 let mut vk_image_views = ArrayVec::<vk::ImageView, { super::MAX_TOTAL_ATTACHMENTS }>::new();
423 let mut rp_key = super::RenderPassKey::default();
424 let mut fb_key = super::FramebufferKey {
425 attachments: ArrayVec::default(),
426 extent: desc.extent,
427 sample_count: desc.sample_count,
428 };
429 let caps = &self.device.private_caps;
430
431 for cat in desc.color_attachments {
432 if let Some(cat) = cat.as_ref() {
433 vk_clear_values.push(vk::ClearValue {
434 color: unsafe { cat.make_vk_clear_color() },
435 });
436 vk_image_views.push(cat.target.view.raw);
437 let color = super::ColorAttachmentKey {
438 base: cat.target.make_attachment_key(cat.ops, caps),
439 resolve: cat.resolve_target.as_ref().map(|target| {
440 target.make_attachment_key(crate::AttachmentOps::STORE, caps)
441 }),
442 };
443
444 rp_key.colors.push(Some(color));
445 fb_key.attachments.push(cat.target.view.attachment.clone());
446 if let Some(ref at) = cat.resolve_target {
447 vk_clear_values.push(unsafe { mem::zeroed() });
448 vk_image_views.push(at.view.raw);
449 fb_key.attachments.push(at.view.attachment.clone());
450 }
451
452 if let Some(multiview) = desc.multiview {
455 assert_eq!(cat.target.view.layers, multiview);
456 if let Some(ref resolve_target) = cat.resolve_target {
457 assert_eq!(resolve_target.view.layers, multiview);
458 }
459 }
460 } else {
461 rp_key.colors.push(None);
462 }
463 }
464 if let Some(ref ds) = desc.depth_stencil_attachment {
465 vk_clear_values.push(vk::ClearValue {
466 depth_stencil: vk::ClearDepthStencilValue {
467 depth: ds.clear_value.0,
468 stencil: ds.clear_value.1,
469 },
470 });
471 vk_image_views.push(ds.target.view.raw);
472 rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
473 base: ds.target.make_attachment_key(ds.depth_ops, caps),
474 stencil_ops: ds.stencil_ops,
475 });
476 fb_key.attachments.push(ds.target.view.attachment.clone());
477
478 if let Some(multiview) = desc.multiview {
481 assert_eq!(ds.target.view.layers, multiview);
482 }
483 }
484 rp_key.sample_count = fb_key.sample_count;
485 rp_key.multiview = desc.multiview;
486
487 let render_area = vk::Rect2D {
488 offset: vk::Offset2D { x: 0, y: 0 },
489 extent: vk::Extent2D {
490 width: desc.extent.width,
491 height: desc.extent.height,
492 },
493 };
494 let vk_viewports = [vk::Viewport {
495 x: 0.0,
496 y: if self.device.private_caps.flip_y_requires_shift {
497 desc.extent.height as f32
498 } else {
499 0.0
500 },
501 width: desc.extent.width as f32,
502 height: -(desc.extent.height as f32),
503 min_depth: 0.0,
504 max_depth: 1.0,
505 }];
506
507 let raw_pass = self.device.make_render_pass(rp_key).unwrap();
508 let raw_framebuffer = self
509 .device
510 .make_framebuffer(fb_key, raw_pass, desc.label)
511 .unwrap();
512
513 let mut vk_info = vk::RenderPassBeginInfo::builder()
514 .render_pass(raw_pass)
515 .render_area(render_area)
516 .clear_values(&vk_clear_values)
517 .framebuffer(raw_framebuffer);
518 let mut vk_attachment_info = if caps.imageless_framebuffers {
519 Some(
520 vk::RenderPassAttachmentBeginInfo::builder()
521 .attachments(&vk_image_views)
522 .build(),
523 )
524 } else {
525 None
526 };
527 if let Some(attachment_info) = vk_attachment_info.as_mut() {
528 vk_info = vk_info.push_next(attachment_info);
529 }
530
531 if let Some(label) = desc.label {
532 unsafe { self.begin_debug_marker(label) };
533 self.rpass_debug_marker_active = true;
534 }
535
536 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
538 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
539 unsafe {
540 self.write_timestamp(timestamp_writes.query_set, index);
541 }
542 }
543 self.end_of_pass_timer_query = timestamp_writes
544 .end_of_pass_write_index
545 .map(|index| (timestamp_writes.query_set.raw, index));
546 }
547
548 unsafe {
549 self.device
550 .raw
551 .cmd_set_viewport(self.active, 0, &vk_viewports);
552 self.device
553 .raw
554 .cmd_set_scissor(self.active, 0, &[render_area]);
555 self.device.raw.cmd_begin_render_pass(
556 self.active,
557 &vk_info,
558 vk::SubpassContents::INLINE,
559 );
560 };
561
562 self.bind_point = vk::PipelineBindPoint::GRAPHICS;
563 }
564 unsafe fn end_render_pass(&mut self) {
565 unsafe {
566 self.device.raw.cmd_end_render_pass(self.active);
567 }
568
569 self.write_pass_end_timestamp_if_requested();
571
572 if self.rpass_debug_marker_active {
573 unsafe {
574 self.end_debug_marker();
575 }
576 self.rpass_debug_marker_active = false;
577 }
578 }
579
580 unsafe fn set_bind_group(
581 &mut self,
582 layout: &super::PipelineLayout,
583 index: u32,
584 group: &super::BindGroup,
585 dynamic_offsets: &[wgt::DynamicOffset],
586 ) {
587 let sets = [*group.set.raw()];
588 unsafe {
589 self.device.raw.cmd_bind_descriptor_sets(
590 self.active,
591 self.bind_point,
592 layout.raw,
593 index,
594 &sets,
595 dynamic_offsets,
596 )
597 };
598 }
599 unsafe fn set_push_constants(
600 &mut self,
601 layout: &super::PipelineLayout,
602 stages: wgt::ShaderStages,
603 offset: u32,
604 data: &[u32],
605 ) {
606 unsafe {
607 self.device.raw.cmd_push_constants(
608 self.active,
609 layout.raw,
610 conv::map_shader_stage(stages),
611 offset,
612 slice::from_raw_parts(data.as_ptr() as _, data.len() * 4),
613 )
614 };
615 }
616
617 unsafe fn insert_debug_marker(&mut self, label: &str) {
618 if let Some(ext) = self.device.debug_messenger() {
619 let cstr = self.temp.make_c_str(label);
620 let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
621 unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
622 }
623 }
624 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
625 if let Some(ext) = self.device.debug_messenger() {
626 let cstr = self.temp.make_c_str(group_label);
627 let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
628 unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
629 }
630 }
631 unsafe fn end_debug_marker(&mut self) {
632 if let Some(ext) = self.device.debug_messenger() {
633 unsafe { ext.cmd_end_debug_utils_label(self.active) };
634 }
635 }
636
637 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
638 unsafe {
639 self.device.raw.cmd_bind_pipeline(
640 self.active,
641 vk::PipelineBindPoint::GRAPHICS,
642 pipeline.raw,
643 )
644 };
645 }
646
647 unsafe fn set_index_buffer<'a>(
648 &mut self,
649 binding: crate::BufferBinding<'a, super::Api>,
650 format: wgt::IndexFormat,
651 ) {
652 unsafe {
653 self.device.raw.cmd_bind_index_buffer(
654 self.active,
655 binding.buffer.raw,
656 binding.offset,
657 conv::map_index_format(format),
658 )
659 };
660 }
661 unsafe fn set_vertex_buffer<'a>(
662 &mut self,
663 index: u32,
664 binding: crate::BufferBinding<'a, super::Api>,
665 ) {
666 let vk_buffers = [binding.buffer.raw];
667 let vk_offsets = [binding.offset];
668 unsafe {
669 self.device
670 .raw
671 .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
672 };
673 }
674 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
675 let vk_viewports = [vk::Viewport {
676 x: rect.x,
677 y: if self.device.private_caps.flip_y_requires_shift {
678 rect.y + rect.h
679 } else {
680 rect.y
681 },
682 width: rect.w,
683 height: -rect.h, min_depth: depth_range.start,
685 max_depth: depth_range.end,
686 }];
687 unsafe {
688 self.device
689 .raw
690 .cmd_set_viewport(self.active, 0, &vk_viewports)
691 };
692 }
693 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
694 let vk_scissors = [vk::Rect2D {
695 offset: vk::Offset2D {
696 x: rect.x as i32,
697 y: rect.y as i32,
698 },
699 extent: vk::Extent2D {
700 width: rect.w,
701 height: rect.h,
702 },
703 }];
704 unsafe {
705 self.device
706 .raw
707 .cmd_set_scissor(self.active, 0, &vk_scissors)
708 };
709 }
710 unsafe fn set_stencil_reference(&mut self, value: u32) {
711 unsafe {
712 self.device.raw.cmd_set_stencil_reference(
713 self.active,
714 vk::StencilFaceFlags::FRONT_AND_BACK,
715 value,
716 )
717 };
718 }
719 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
720 unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
721 }
722
723 unsafe fn draw(
724 &mut self,
725 start_vertex: u32,
726 vertex_count: u32,
727 start_instance: u32,
728 instance_count: u32,
729 ) {
730 unsafe {
731 self.device.raw.cmd_draw(
732 self.active,
733 vertex_count,
734 instance_count,
735 start_vertex,
736 start_instance,
737 )
738 };
739 }
740 unsafe fn draw_indexed(
741 &mut self,
742 start_index: u32,
743 index_count: u32,
744 base_vertex: i32,
745 start_instance: u32,
746 instance_count: u32,
747 ) {
748 unsafe {
749 self.device.raw.cmd_draw_indexed(
750 self.active,
751 index_count,
752 instance_count,
753 start_index,
754 base_vertex,
755 start_instance,
756 )
757 };
758 }
759 unsafe fn draw_indirect(
760 &mut self,
761 buffer: &super::Buffer,
762 offset: wgt::BufferAddress,
763 draw_count: u32,
764 ) {
765 unsafe {
766 self.device.raw.cmd_draw_indirect(
767 self.active,
768 buffer.raw,
769 offset,
770 draw_count,
771 mem::size_of::<wgt::DrawIndirectArgs>() as u32,
772 )
773 };
774 }
775 unsafe fn draw_indexed_indirect(
776 &mut self,
777 buffer: &super::Buffer,
778 offset: wgt::BufferAddress,
779 draw_count: u32,
780 ) {
781 unsafe {
782 self.device.raw.cmd_draw_indexed_indirect(
783 self.active,
784 buffer.raw,
785 offset,
786 draw_count,
787 mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
788 )
789 };
790 }
791 unsafe fn draw_indirect_count(
792 &mut self,
793 buffer: &super::Buffer,
794 offset: wgt::BufferAddress,
795 count_buffer: &super::Buffer,
796 count_offset: wgt::BufferAddress,
797 max_count: u32,
798 ) {
799 let stride = mem::size_of::<wgt::DrawIndirectArgs>() as u32;
800 match self.device.extension_fns.draw_indirect_count {
801 Some(ref t) => {
802 unsafe {
803 t.cmd_draw_indirect_count(
804 self.active,
805 buffer.raw,
806 offset,
807 count_buffer.raw,
808 count_offset,
809 max_count,
810 stride,
811 )
812 };
813 }
814 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
815 }
816 }
817 unsafe fn draw_indexed_indirect_count(
818 &mut self,
819 buffer: &super::Buffer,
820 offset: wgt::BufferAddress,
821 count_buffer: &super::Buffer,
822 count_offset: wgt::BufferAddress,
823 max_count: u32,
824 ) {
825 let stride = mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
826 match self.device.extension_fns.draw_indirect_count {
827 Some(ref t) => {
828 unsafe {
829 t.cmd_draw_indexed_indirect_count(
830 self.active,
831 buffer.raw,
832 offset,
833 count_buffer.raw,
834 count_offset,
835 max_count,
836 stride,
837 )
838 };
839 }
840 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
841 }
842 }
843
844 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<'_, super::Api>) {
847 self.bind_point = vk::PipelineBindPoint::COMPUTE;
848 if let Some(label) = desc.label {
849 unsafe { self.begin_debug_marker(label) };
850 self.rpass_debug_marker_active = true;
851 }
852
853 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
854 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
855 unsafe {
856 self.write_timestamp(timestamp_writes.query_set, index);
857 }
858 }
859 self.end_of_pass_timer_query = timestamp_writes
860 .end_of_pass_write_index
861 .map(|index| (timestamp_writes.query_set.raw, index));
862 }
863 }
864 unsafe fn end_compute_pass(&mut self) {
865 self.write_pass_end_timestamp_if_requested();
866
867 if self.rpass_debug_marker_active {
868 unsafe { self.end_debug_marker() };
869 self.rpass_debug_marker_active = false
870 }
871 }
872
873 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
874 unsafe {
875 self.device.raw.cmd_bind_pipeline(
876 self.active,
877 vk::PipelineBindPoint::COMPUTE,
878 pipeline.raw,
879 )
880 };
881 }
882
883 unsafe fn dispatch(&mut self, count: [u32; 3]) {
884 unsafe {
885 self.device
886 .raw
887 .cmd_dispatch(self.active, count[0], count[1], count[2])
888 };
889 }
890 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
891 unsafe {
892 self.device
893 .raw
894 .cmd_dispatch_indirect(self.active, buffer.raw, offset)
895 }
896 }
897}
898
899#[test]
900fn check_dst_image_layout() {
901 assert_eq!(
902 conv::derive_image_layout(crate::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
903 DST_IMAGE_LAYOUT
904 );
905}