Skip to main content

wgpu_hal/vulkan/
command.rs

1use super::conv;
2use arrayvec::ArrayVec;
3use ash::vk;
4use core::{mem, ops::Range};
5use hashbrown::hash_map::Entry;
6
7const ALLOCATION_GRANULARITY: u32 = 16;
8const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
9
10impl super::Texture {
11    fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
12    where
13        T: Iterator<Item = crate::BufferTextureCopy>,
14    {
15        let (block_width, block_height) = self.format.block_dimensions();
16        let format = self.format;
17        let copy_size = self.copy_size;
18        regions.map(move |r| {
19            let extent = r.texture_base.max_copy_size(&copy_size).min(&r.size);
20            let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
21            vk::BufferImageCopy {
22                buffer_offset: r.buffer_layout.offset,
23                buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
24                    let block_size = format
25                        .block_copy_size(Some(r.texture_base.aspect.map()))
26                        .unwrap();
27                    block_width * (bpr / block_size)
28                }),
29                buffer_image_height: r
30                    .buffer_layout
31                    .rows_per_image
32                    .map_or(0, |rpi| rpi * block_height),
33                image_subresource,
34                image_offset,
35                image_extent: conv::map_copy_extent(&extent),
36            }
37        })
38    }
39}
40
41impl super::CommandEncoder {
42    fn write_pass_end_timestamp_if_requested(&mut self) {
43        if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
44            unsafe {
45                self.device.raw.cmd_write_timestamp(
46                    self.active,
47                    vk::PipelineStageFlags::BOTTOM_OF_PIPE,
48                    query_set,
49                    index,
50                );
51            }
52        }
53    }
54
55    fn make_framebuffer(
56        &mut self,
57        key: super::FramebufferKey,
58    ) -> Result<vk::Framebuffer, crate::DeviceError> {
59        Ok(match self.framebuffers.entry(key) {
60            Entry::Occupied(e) => *e.get(),
61            Entry::Vacant(e) => {
62                let super::FramebufferKey {
63                    raw_pass,
64                    ref attachment_views,
65                    attachment_identities: _,
66                    extent,
67                } = *e.key();
68
69                let vk_info = vk::FramebufferCreateInfo::default()
70                    .render_pass(raw_pass)
71                    .width(extent.width)
72                    .height(extent.height)
73                    .layers(extent.depth_or_array_layers)
74                    .attachments(attachment_views);
75
76                let raw = unsafe { self.device.raw.create_framebuffer(&vk_info, None).unwrap() };
77                *e.insert(raw)
78            }
79        })
80    }
81
82    fn make_temp_texture_view(
83        &mut self,
84        key: super::TempTextureViewKey,
85    ) -> Result<super::IdentifiedTextureView, crate::DeviceError> {
86        Ok(match self.temp_texture_views.entry(key) {
87            Entry::Occupied(e) => *e.get(),
88            Entry::Vacant(e) => {
89                let super::TempTextureViewKey {
90                    texture,
91                    texture_identity: _,
92                    format,
93                    mip_level,
94                    depth_slice,
95                } = *e.key();
96
97                let vk_info = vk::ImageViewCreateInfo::default()
98                    .image(texture)
99                    .view_type(vk::ImageViewType::TYPE_2D)
100                    .format(format)
101                    .subresource_range(vk::ImageSubresourceRange {
102                        aspect_mask: vk::ImageAspectFlags::COLOR,
103                        base_mip_level: mip_level,
104                        level_count: 1,
105                        base_array_layer: depth_slice,
106                        layer_count: 1,
107                    });
108                let raw = unsafe { self.device.raw.create_image_view(&vk_info, None) }
109                    .map_err(super::map_host_device_oom_and_ioca_err)?;
110
111                let identity = self.device.texture_view_identity_factory.next();
112
113                *e.insert(super::IdentifiedTextureView { raw, identity })
114            }
115        })
116    }
117}
118
119impl crate::CommandEncoder for super::CommandEncoder {
120    type A = super::Api;
121
122    unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
123        if self.free.is_empty() {
124            let vk_info = vk::CommandBufferAllocateInfo::default()
125                .command_pool(self.raw)
126                .command_buffer_count(ALLOCATION_GRANULARITY);
127            let cmd_buf_vec = unsafe {
128                self.device
129                    .raw
130                    .allocate_command_buffers(&vk_info)
131                    .map_err(super::map_host_device_oom_err)?
132            };
133            self.free.extend(cmd_buf_vec);
134        }
135        let raw = self.free.pop().unwrap();
136
137        // Set the name unconditionally, since there might be a
138        // previous name assigned to this.
139        unsafe { self.device.set_object_name(raw, label.unwrap_or_default()) };
140
141        // Reset this in case the last renderpass was never ended.
142        self.rpass_debug_marker_active = false;
143
144        let vk_info = vk::CommandBufferBeginInfo::default()
145            .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
146        unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }
147            .map_err(super::map_host_device_oom_err)?;
148        self.active = raw;
149
150        Ok(())
151    }
152
153    unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
154        let raw = self.active;
155        self.active = vk::CommandBuffer::null();
156        unsafe { self.device.raw.end_command_buffer(raw) }.map_err(map_err)?;
157        fn map_err(err: vk::Result) -> crate::DeviceError {
158            // We don't use VK_KHR_video_encode_queue
159            // VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR
160            super::map_host_device_oom_err(err)
161        }
162        Ok(super::CommandBuffer { raw })
163    }
164
165    unsafe fn discard_encoding(&mut self) {
166        // Safe use requires this is not called in the "closed" state, so the buffer
167        // shouldn't be null. Assert this to make sure we're not pushing null
168        // buffers to the discard pile.
169        assert_ne!(self.active, vk::CommandBuffer::null());
170
171        self.discarded.push(self.active);
172        self.active = vk::CommandBuffer::null();
173    }
174
175    unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
176    where
177        I: Iterator<Item = super::CommandBuffer>,
178    {
179        self.temp.clear();
180        self.free
181            .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
182        self.free.append(&mut self.discarded);
183        // Delete framebuffers from the framebuffer cache
184        for (_, framebuffer) in self.framebuffers.drain() {
185            unsafe { self.device.raw.destroy_framebuffer(framebuffer, None) };
186        }
187        let _ = unsafe {
188            self.device
189                .raw
190                .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
191        };
192    }
193
194    unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
195    where
196        T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
197    {
198        //Note: this is done so that we never end up with empty stage flags
199        let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
200        let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
201        let vk_barriers = &mut self.temp.buffer_barriers;
202        vk_barriers.clear();
203
204        for bar in barriers {
205            let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.from);
206            src_stages |= src_stage;
207            let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.to);
208            dst_stages |= dst_stage;
209
210            vk_barriers.push(
211                vk::BufferMemoryBarrier::default()
212                    .buffer(bar.buffer.raw)
213                    .size(vk::WHOLE_SIZE)
214                    .src_access_mask(src_access)
215                    .dst_access_mask(dst_access),
216            )
217        }
218
219        if !vk_barriers.is_empty() {
220            unsafe {
221                self.device.raw.cmd_pipeline_barrier(
222                    self.active,
223                    src_stages,
224                    dst_stages,
225                    vk::DependencyFlags::empty(),
226                    &[],
227                    vk_barriers,
228                    &[],
229                )
230            };
231        }
232    }
233
234    unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
235    where
236        T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
237    {
238        let mut src_stages = vk::PipelineStageFlags::empty();
239        let mut dst_stages = vk::PipelineStageFlags::empty();
240        let vk_barriers = &mut self.temp.image_barriers;
241        vk_barriers.clear();
242
243        for bar in barriers {
244            let range = conv::map_subresource_range_combined_aspect(
245                &bar.range,
246                bar.texture.format,
247                &self.device.private_caps,
248            );
249            let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.from);
250            let src_layout = conv::derive_image_layout(bar.usage.from, bar.texture.format);
251            src_stages |= src_stage;
252            let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.to);
253            let dst_layout = conv::derive_image_layout(bar.usage.to, bar.texture.format);
254            dst_stages |= dst_stage;
255
256            vk_barriers.push(
257                vk::ImageMemoryBarrier::default()
258                    .image(bar.texture.raw)
259                    .subresource_range(range)
260                    .src_access_mask(src_access)
261                    .dst_access_mask(dst_access)
262                    .old_layout(src_layout)
263                    .new_layout(dst_layout),
264            );
265        }
266
267        if !vk_barriers.is_empty() {
268            unsafe {
269                self.device.raw.cmd_pipeline_barrier(
270                    self.active,
271                    src_stages,
272                    dst_stages,
273                    vk::DependencyFlags::empty(),
274                    &[],
275                    &[],
276                    vk_barriers,
277                )
278            };
279        }
280    }
281
282    unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
283        let range_size = range.end - range.start;
284        if self.device.workarounds.contains(
285            super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
286        ) && range_size >= 4096
287            && !range.start.is_multiple_of(16)
288        {
289            let rounded_start = wgt::math::align_to(range.start, 16);
290            let prefix_size = rounded_start - range.start;
291
292            unsafe {
293                self.device.raw.cmd_fill_buffer(
294                    self.active,
295                    buffer.raw,
296                    range.start,
297                    prefix_size,
298                    0,
299                )
300            };
301
302            // This will never be zero, as rounding can only add up to 12 bytes, and the total size is 4096.
303            let suffix_size = range.end - rounded_start;
304
305            unsafe {
306                self.device.raw.cmd_fill_buffer(
307                    self.active,
308                    buffer.raw,
309                    rounded_start,
310                    suffix_size,
311                    0,
312                )
313            };
314        } else {
315            unsafe {
316                self.device
317                    .raw
318                    .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
319            };
320        }
321    }
322
323    unsafe fn copy_buffer_to_buffer<T>(
324        &mut self,
325        src: &super::Buffer,
326        dst: &super::Buffer,
327        regions: T,
328    ) where
329        T: Iterator<Item = crate::BufferCopy>,
330    {
331        let vk_regions_iter = regions.map(|r| vk::BufferCopy {
332            src_offset: r.src_offset,
333            dst_offset: r.dst_offset,
334            size: r.size.get(),
335        });
336
337        unsafe {
338            self.device.raw.cmd_copy_buffer(
339                self.active,
340                src.raw,
341                dst.raw,
342                &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
343            )
344        };
345    }
346
347    unsafe fn copy_texture_to_texture<T>(
348        &mut self,
349        src: &super::Texture,
350        src_usage: wgt::TextureUses,
351        dst: &super::Texture,
352        regions: T,
353    ) where
354        T: Iterator<Item = crate::TextureCopy>,
355    {
356        let src_layout = conv::derive_image_layout(src_usage, src.format);
357
358        let vk_regions_iter = regions.map(|r| {
359            let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
360            let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
361            let extent = r
362                .size
363                .min(&r.src_base.max_copy_size(&src.copy_size))
364                .min(&r.dst_base.max_copy_size(&dst.copy_size));
365            vk::ImageCopy {
366                src_subresource,
367                src_offset,
368                dst_subresource,
369                dst_offset,
370                extent: conv::map_copy_extent(&extent),
371            }
372        });
373
374        unsafe {
375            self.device.raw.cmd_copy_image(
376                self.active,
377                src.raw,
378                src_layout,
379                dst.raw,
380                DST_IMAGE_LAYOUT,
381                &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
382            )
383        };
384    }
385
386    unsafe fn copy_buffer_to_texture<T>(
387        &mut self,
388        src: &super::Buffer,
389        dst: &super::Texture,
390        regions: T,
391    ) where
392        T: Iterator<Item = crate::BufferTextureCopy>,
393    {
394        let vk_regions_iter = dst.map_buffer_copies(regions);
395
396        unsafe {
397            self.device.raw.cmd_copy_buffer_to_image(
398                self.active,
399                src.raw,
400                dst.raw,
401                DST_IMAGE_LAYOUT,
402                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
403            )
404        };
405    }
406
407    unsafe fn copy_texture_to_buffer<T>(
408        &mut self,
409        src: &super::Texture,
410        src_usage: wgt::TextureUses,
411        dst: &super::Buffer,
412        regions: T,
413    ) where
414        T: Iterator<Item = crate::BufferTextureCopy>,
415    {
416        let src_layout = conv::derive_image_layout(src_usage, src.format);
417        let vk_regions_iter = src.map_buffer_copies(regions);
418
419        unsafe {
420            self.device.raw.cmd_copy_image_to_buffer(
421                self.active,
422                src.raw,
423                src_layout,
424                dst.raw,
425                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
426            )
427        };
428    }
429
430    unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
431        unsafe {
432            self.device.raw.cmd_begin_query(
433                self.active,
434                set.raw,
435                index,
436                vk::QueryControlFlags::empty(),
437            )
438        };
439    }
440    unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
441        unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
442    }
443    unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
444        unsafe {
445            self.device.raw.cmd_write_timestamp(
446                self.active,
447                vk::PipelineStageFlags::BOTTOM_OF_PIPE,
448                set.raw,
449                index,
450            )
451        };
452    }
453    unsafe fn read_acceleration_structure_compact_size(
454        &mut self,
455        acceleration_structure: &super::AccelerationStructure,
456        buffer: &super::Buffer,
457    ) {
458        let ray_tracing_functions = self
459            .device
460            .extension_fns
461            .ray_tracing
462            .as_ref()
463            .expect("Feature `RAY_TRACING` not enabled");
464        let query_pool = acceleration_structure
465            .compacted_size_query
466            .as_ref()
467            .unwrap();
468        unsafe {
469            self.device
470                .raw
471                .cmd_reset_query_pool(self.active, *query_pool, 0, 1);
472            ray_tracing_functions
473                .acceleration_structure
474                .cmd_write_acceleration_structures_properties(
475                    self.active,
476                    &[acceleration_structure.raw],
477                    vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
478                    *query_pool,
479                    0,
480                );
481            self.device.raw.cmd_copy_query_pool_results(
482                self.active,
483                *query_pool,
484                0,
485                1,
486                buffer.raw,
487                0,
488                wgt::QUERY_SIZE as vk::DeviceSize,
489                vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
490            )
491        };
492    }
493    unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
494        unsafe {
495            self.device.raw.cmd_reset_query_pool(
496                self.active,
497                set.raw,
498                range.start,
499                range.end - range.start,
500            )
501        };
502    }
503    unsafe fn copy_query_results(
504        &mut self,
505        set: &super::QuerySet,
506        range: Range<u32>,
507        buffer: &super::Buffer,
508        offset: wgt::BufferAddress,
509        stride: wgt::BufferSize,
510    ) {
511        unsafe {
512            self.device.raw.cmd_copy_query_pool_results(
513                self.active,
514                set.raw,
515                range.start,
516                range.end - range.start,
517                buffer.raw,
518                offset,
519                stride.get(),
520                vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
521            )
522        };
523    }
524
525    unsafe fn build_acceleration_structures<'a, T>(&mut self, descriptor_count: u32, descriptors: T)
526    where
527        super::Api: 'a,
528        T: IntoIterator<
529            Item = crate::BuildAccelerationStructureDescriptor<
530                'a,
531                super::Buffer,
532                super::AccelerationStructure,
533            >,
534        >,
535    {
536        const CAPACITY_OUTER: usize = 8;
537        const CAPACITY_INNER: usize = 1;
538        let descriptor_count = descriptor_count as usize;
539
540        let ray_tracing_functions = self
541            .device
542            .extension_fns
543            .ray_tracing
544            .as_ref()
545            .expect("Feature `RAY_TRACING` not enabled");
546
547        let get_device_address = |buffer: Option<&super::Buffer>| unsafe {
548            match buffer {
549                Some(buffer) => ray_tracing_functions
550                    .buffer_device_address
551                    .get_buffer_device_address(
552                        &vk::BufferDeviceAddressInfo::default().buffer(buffer.raw),
553                    ),
554                None => panic!("Buffers are required to build acceleration structures"),
555            }
556        };
557
558        // storage to all the data required for cmd_build_acceleration_structures
559        let mut ranges_storage = smallvec::SmallVec::<
560            [smallvec::SmallVec<[vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER]>;
561                CAPACITY_OUTER],
562        >::with_capacity(descriptor_count);
563        let mut geometries_storage = smallvec::SmallVec::<
564            [smallvec::SmallVec<[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER]>;
565                CAPACITY_OUTER],
566        >::with_capacity(descriptor_count);
567
568        // pointers to all the data required for cmd_build_acceleration_structures
569        let mut geometry_infos = smallvec::SmallVec::<
570            [vk::AccelerationStructureBuildGeometryInfoKHR; CAPACITY_OUTER],
571        >::with_capacity(descriptor_count);
572        let mut ranges_ptrs = smallvec::SmallVec::<
573            [&[vk::AccelerationStructureBuildRangeInfoKHR]; CAPACITY_OUTER],
574        >::with_capacity(descriptor_count);
575
576        for desc in descriptors {
577            let (geometries, ranges) = match *desc.entries {
578                crate::AccelerationStructureEntries::Instances(ref instances) => {
579                    let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default(
580                    // TODO: Code is so large that rustfmt refuses to treat this... :(
581                    )
582                    .data(vk::DeviceOrHostAddressConstKHR {
583                        device_address: get_device_address(instances.buffer),
584                    });
585
586                    let geometry = vk::AccelerationStructureGeometryKHR::default()
587                        .geometry_type(vk::GeometryTypeKHR::INSTANCES)
588                        .geometry(vk::AccelerationStructureGeometryDataKHR {
589                            instances: instance_data,
590                        });
591
592                    let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
593                        .primitive_count(instances.count)
594                        .primitive_offset(instances.offset);
595
596                    (smallvec::smallvec![geometry], smallvec::smallvec![range])
597                }
598                crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
599                    let mut ranges = smallvec::SmallVec::<
600                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
601                    >::with_capacity(in_geometries.len());
602                    let mut geometries = smallvec::SmallVec::<
603                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
604                    >::with_capacity(in_geometries.len());
605                    for triangles in in_geometries {
606                        let mut triangle_data =
607                            vk::AccelerationStructureGeometryTrianglesDataKHR::default()
608                                // IndexType::NONE_KHR is not set by default (due to being provided by VK_KHR_acceleration_structure) but unless there is an
609                                // index buffer we need to have IndexType::NONE_KHR as our index type.
610                                .index_type(vk::IndexType::NONE_KHR)
611                                .vertex_data(vk::DeviceOrHostAddressConstKHR {
612                                    device_address: get_device_address(triangles.vertex_buffer)
613                                        + (triangles.first_vertex as u64 * triangles.vertex_stride),
614                                })
615                                .vertex_format(conv::map_vertex_format(triangles.vertex_format))
616                                .max_vertex(triangles.vertex_count)
617                                .vertex_stride(triangles.vertex_stride);
618
619                        let mut range = vk::AccelerationStructureBuildRangeInfoKHR::default();
620
621                        if let Some(ref indices) = triangles.indices {
622                            triangle_data = triangle_data
623                                .index_data(vk::DeviceOrHostAddressConstKHR {
624                                    device_address: get_device_address(indices.buffer),
625                                })
626                                .index_type(conv::map_index_format(indices.format));
627
628                            range = range
629                                .primitive_count(indices.count / 3)
630                                .primitive_offset(indices.offset);
631                        } else {
632                            range = range.primitive_count(triangles.vertex_count / 3);
633                        }
634
635                        if let Some(ref transform) = triangles.transform {
636                            let transform_device_address = unsafe {
637                                ray_tracing_functions
638                                    .buffer_device_address
639                                    .get_buffer_device_address(
640                                        &vk::BufferDeviceAddressInfo::default()
641                                            .buffer(transform.buffer.raw),
642                                    )
643                            };
644                            triangle_data =
645                                triangle_data.transform_data(vk::DeviceOrHostAddressConstKHR {
646                                    device_address: transform_device_address,
647                                });
648
649                            range = range.transform_offset(transform.offset);
650                        }
651
652                        let geometry = vk::AccelerationStructureGeometryKHR::default()
653                            .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
654                            .geometry(vk::AccelerationStructureGeometryDataKHR {
655                                triangles: triangle_data,
656                            })
657                            .flags(conv::map_acceleration_structure_geometry_flags(
658                                triangles.flags,
659                            ));
660
661                        geometries.push(geometry);
662                        ranges.push(range);
663                    }
664                    (geometries, ranges)
665                }
666                crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
667                    let mut ranges = smallvec::SmallVec::<
668                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
669                    >::with_capacity(in_geometries.len());
670                    let mut geometries = smallvec::SmallVec::<
671                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
672                    >::with_capacity(in_geometries.len());
673                    for aabb in in_geometries {
674                        let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
675                            .data(vk::DeviceOrHostAddressConstKHR {
676                                device_address: get_device_address(aabb.buffer),
677                            })
678                            .stride(aabb.stride);
679
680                        let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
681                            .primitive_count(aabb.count)
682                            .primitive_offset(aabb.offset);
683
684                        let geometry = vk::AccelerationStructureGeometryKHR::default()
685                            .geometry_type(vk::GeometryTypeKHR::AABBS)
686                            .geometry(vk::AccelerationStructureGeometryDataKHR {
687                                aabbs: aabbs_data,
688                            })
689                            .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
690
691                        geometries.push(geometry);
692                        ranges.push(range);
693                    }
694                    (geometries, ranges)
695                }
696            };
697
698            ranges_storage.push(ranges);
699            geometries_storage.push(geometries);
700
701            let scratch_device_address = unsafe {
702                ray_tracing_functions
703                    .buffer_device_address
704                    .get_buffer_device_address(
705                        &vk::BufferDeviceAddressInfo::default().buffer(desc.scratch_buffer.raw),
706                    )
707            };
708            let ty = match *desc.entries {
709                crate::AccelerationStructureEntries::Instances(_) => {
710                    vk::AccelerationStructureTypeKHR::TOP_LEVEL
711                }
712                _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
713            };
714            let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
715                .ty(ty)
716                .mode(conv::map_acceleration_structure_build_mode(desc.mode))
717                .flags(conv::map_acceleration_structure_flags(desc.flags))
718                .dst_acceleration_structure(desc.destination_acceleration_structure.raw)
719                .scratch_data(vk::DeviceOrHostAddressKHR {
720                    device_address: scratch_device_address + desc.scratch_buffer_offset,
721                });
722
723            if desc.mode == crate::AccelerationStructureBuildMode::Update {
724                geometry_info.src_acceleration_structure = desc
725                    .source_acceleration_structure
726                    .unwrap_or(desc.destination_acceleration_structure)
727                    .raw;
728            }
729
730            geometry_infos.push(geometry_info);
731        }
732
733        for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
734            geometry_info.geometry_count = geometries_storage[i].len() as u32;
735            geometry_info.p_geometries = geometries_storage[i].as_ptr();
736            ranges_ptrs.push(&ranges_storage[i]);
737        }
738
739        unsafe {
740            ray_tracing_functions
741                .acceleration_structure
742                .cmd_build_acceleration_structures(self.active, &geometry_infos, &ranges_ptrs);
743        }
744    }
745
746    unsafe fn place_acceleration_structure_barrier(
747        &mut self,
748        barrier: crate::AccelerationStructureBarrier,
749    ) {
750        let (src_stage, src_access) = conv::map_acceleration_structure_usage_to_barrier(
751            barrier.usage.from,
752            self.device.features,
753        );
754        let (dst_stage, dst_access) = conv::map_acceleration_structure_usage_to_barrier(
755            barrier.usage.to,
756            self.device.features,
757        );
758
759        unsafe {
760            self.device.raw.cmd_pipeline_barrier(
761                self.active,
762                src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
763                dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
764                vk::DependencyFlags::empty(),
765                &[vk::MemoryBarrier::default()
766                    .src_access_mask(src_access)
767                    .dst_access_mask(dst_access)],
768                &[],
769                &[],
770            )
771        };
772    }
773
774    unsafe fn set_acceleration_structure_dependencies(
775        _command_buffers: &[&super::CommandBuffer],
776        _dependencies: &[&super::AccelerationStructure],
777    ) {
778    }
779    // render
780
781    unsafe fn begin_render_pass(
782        &mut self,
783        desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
784    ) -> Result<(), crate::DeviceError> {
785        let mut vk_clear_values =
786            ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
787        let mut rp_key = super::RenderPassKey {
788            colors: ArrayVec::default(),
789            depth_stencil: None,
790            sample_count: desc.sample_count,
791            multiview_mask: desc.multiview_mask,
792        };
793        let mut fb_key = super::FramebufferKey {
794            raw_pass: vk::RenderPass::null(),
795            attachment_views: ArrayVec::default(),
796            attachment_identities: ArrayVec::default(),
797            extent: desc.extent,
798        };
799
800        for cat in desc.color_attachments {
801            if let Some(cat) = cat.as_ref() {
802                let color_view = if cat.target.view.dimension == wgt::TextureViewDimension::D3 {
803                    let key = super::TempTextureViewKey {
804                        texture: cat.target.view.raw_texture,
805                        texture_identity: cat.target.view.texture_identity,
806                        format: cat.target.view.raw_format,
807                        mip_level: cat.target.view.base_mip_level,
808                        depth_slice: cat.depth_slice.unwrap(),
809                    };
810                    self.make_temp_texture_view(key)?
811                } else {
812                    cat.target.view.identified_raw_view()
813                };
814
815                vk_clear_values.push(vk::ClearValue {
816                    color: unsafe { cat.make_vk_clear_color() },
817                });
818                let color = super::ColorAttachmentKey {
819                    base: cat.target.make_attachment_key(cat.ops),
820                    resolve: cat.resolve_target.as_ref().map(|target| {
821                        target.make_attachment_key(
822                            crate::AttachmentOps::LOAD_CLEAR | crate::AttachmentOps::STORE,
823                        )
824                    }),
825                };
826
827                rp_key.colors.push(Some(color));
828                fb_key.push_view(color_view);
829                if let Some(ref at) = cat.resolve_target {
830                    vk_clear_values.push(unsafe { mem::zeroed() });
831                    fb_key.push_view(at.view.identified_raw_view());
832                }
833            } else {
834                rp_key.colors.push(None);
835            }
836        }
837        if let Some(ref ds) = desc.depth_stencil_attachment {
838            vk_clear_values.push(vk::ClearValue {
839                depth_stencil: vk::ClearDepthStencilValue {
840                    depth: ds.clear_value.0,
841                    stencil: ds.clear_value.1,
842                },
843            });
844            rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
845                base: ds.target.make_attachment_key(ds.depth_ops),
846                stencil_ops: ds.stencil_ops,
847            });
848            fb_key.push_view(ds.target.view.identified_raw_view());
849        }
850
851        let render_area = vk::Rect2D {
852            offset: vk::Offset2D { x: 0, y: 0 },
853            extent: vk::Extent2D {
854                width: desc.extent.width,
855                height: desc.extent.height,
856            },
857        };
858        let vk_viewports = [vk::Viewport {
859            x: 0.0,
860            y: desc.extent.height as f32,
861            width: desc.extent.width as f32,
862            height: -(desc.extent.height as f32),
863            min_depth: 0.0,
864            max_depth: 1.0,
865        }];
866
867        let raw_pass = self.device.make_render_pass(rp_key).unwrap();
868        fb_key.raw_pass = raw_pass;
869        let raw_framebuffer = self.make_framebuffer(fb_key).unwrap();
870
871        let vk_info = vk::RenderPassBeginInfo::default()
872            .render_pass(raw_pass)
873            .render_area(render_area)
874            .clear_values(&vk_clear_values)
875            .framebuffer(raw_framebuffer);
876
877        if let Some(label) = desc.label {
878            unsafe { self.begin_debug_marker(label) };
879            self.rpass_debug_marker_active = true;
880        }
881
882        // Start timestamp if any (before all other commands but after debug marker)
883        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
884            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
885                unsafe {
886                    self.write_timestamp(timestamp_writes.query_set, index);
887                }
888            }
889            self.end_of_pass_timer_query = timestamp_writes
890                .end_of_pass_write_index
891                .map(|index| (timestamp_writes.query_set.raw, index));
892        }
893
894        unsafe {
895            self.device
896                .raw
897                .cmd_set_viewport(self.active, 0, &vk_viewports);
898            self.device
899                .raw
900                .cmd_set_scissor(self.active, 0, &[render_area]);
901            self.device.raw.cmd_begin_render_pass(
902                self.active,
903                &vk_info,
904                vk::SubpassContents::INLINE,
905            );
906        };
907
908        self.bind_point = vk::PipelineBindPoint::GRAPHICS;
909
910        Ok(())
911    }
912    unsafe fn end_render_pass(&mut self) {
913        unsafe {
914            self.device.raw.cmd_end_render_pass(self.active);
915        }
916
917        // After all other commands but before debug marker, so this is still seen as part of this pass.
918        self.write_pass_end_timestamp_if_requested();
919
920        if self.rpass_debug_marker_active {
921            unsafe {
922                self.end_debug_marker();
923            }
924            self.rpass_debug_marker_active = false;
925        }
926    }
927
928    unsafe fn set_bind_group(
929        &mut self,
930        layout: &super::PipelineLayout,
931        index: u32,
932        group: &super::BindGroup,
933        dynamic_offsets: &[wgt::DynamicOffset],
934    ) {
935        let sets = [*group.set.raw()];
936        unsafe {
937            self.device.raw.cmd_bind_descriptor_sets(
938                self.active,
939                self.bind_point,
940                layout.raw,
941                index,
942                &sets,
943                dynamic_offsets,
944            )
945        };
946    }
947    unsafe fn set_immediates(
948        &mut self,
949        layout: &super::PipelineLayout,
950        offset_bytes: u32,
951        data: &[u32],
952    ) {
953        unsafe {
954            self.device.raw.cmd_push_constants(
955                self.active,
956                layout.raw,
957                vk::ShaderStageFlags::ALL,
958                offset_bytes,
959                bytemuck::cast_slice(data),
960            )
961        };
962    }
963
964    unsafe fn insert_debug_marker(&mut self, label: &str) {
965        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
966            let cstr = self.temp.make_c_str(label);
967            let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
968            unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
969        }
970    }
971    unsafe fn begin_debug_marker(&mut self, group_label: &str) {
972        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
973            let cstr = self.temp.make_c_str(group_label);
974            let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
975            unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
976        }
977    }
978    unsafe fn end_debug_marker(&mut self) {
979        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
980            unsafe { ext.cmd_end_debug_utils_label(self.active) };
981        }
982    }
983
984    unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
985        unsafe {
986            self.current_pipeline_is_multiview = pipeline.is_multiview;
987            self.device.raw.cmd_bind_pipeline(
988                self.active,
989                vk::PipelineBindPoint::GRAPHICS,
990                pipeline.raw,
991            )
992        };
993    }
994
995    unsafe fn set_index_buffer<'a>(
996        &mut self,
997        binding: crate::BufferBinding<'a, super::Buffer>,
998        format: wgt::IndexFormat,
999    ) {
1000        unsafe {
1001            self.device.raw.cmd_bind_index_buffer(
1002                self.active,
1003                binding.buffer.raw,
1004                binding.offset,
1005                conv::map_index_format(format),
1006            )
1007        };
1008    }
1009    unsafe fn set_vertex_buffer<'a>(
1010        &mut self,
1011        index: u32,
1012        binding: crate::BufferBinding<'a, super::Buffer>,
1013    ) {
1014        let vk_buffers = [binding.buffer.raw];
1015        let vk_offsets = [binding.offset];
1016        unsafe {
1017            self.device
1018                .raw
1019                .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
1020        };
1021    }
1022    unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
1023        let vk_viewports = [vk::Viewport {
1024            x: rect.x,
1025            y: rect.y + rect.h,
1026            width: rect.w,
1027            height: -rect.h, // flip Y
1028            min_depth: depth_range.start,
1029            max_depth: depth_range.end,
1030        }];
1031        unsafe {
1032            self.device
1033                .raw
1034                .cmd_set_viewport(self.active, 0, &vk_viewports)
1035        };
1036    }
1037    unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1038        let vk_scissors = [vk::Rect2D {
1039            offset: vk::Offset2D {
1040                x: rect.x as i32,
1041                y: rect.y as i32,
1042            },
1043            extent: vk::Extent2D {
1044                width: rect.w,
1045                height: rect.h,
1046            },
1047        }];
1048        unsafe {
1049            self.device
1050                .raw
1051                .cmd_set_scissor(self.active, 0, &vk_scissors)
1052        };
1053    }
1054    unsafe fn set_stencil_reference(&mut self, value: u32) {
1055        unsafe {
1056            self.device.raw.cmd_set_stencil_reference(
1057                self.active,
1058                vk::StencilFaceFlags::FRONT_AND_BACK,
1059                value,
1060            )
1061        };
1062    }
1063    unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1064        unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
1065    }
1066
1067    unsafe fn draw(
1068        &mut self,
1069        first_vertex: u32,
1070        vertex_count: u32,
1071        first_instance: u32,
1072        instance_count: u32,
1073    ) {
1074        if self.current_pipeline_is_multiview
1075            && (first_instance as u64 + instance_count as u64 - 1)
1076                > self.device.private_caps.multiview_instance_index_limit as u64
1077        {
1078            panic!("This vulkan device is affected by [#8333](https://github.com/gfx-rs/wgpu/issues/8333)");
1079        }
1080        unsafe {
1081            self.device.raw.cmd_draw(
1082                self.active,
1083                vertex_count,
1084                instance_count,
1085                first_vertex,
1086                first_instance,
1087            )
1088        };
1089    }
1090    unsafe fn draw_indexed(
1091        &mut self,
1092        first_index: u32,
1093        index_count: u32,
1094        base_vertex: i32,
1095        first_instance: u32,
1096        instance_count: u32,
1097    ) {
1098        if self.current_pipeline_is_multiview
1099            && (first_instance as u64 + instance_count as u64 - 1)
1100                > self.device.private_caps.multiview_instance_index_limit as u64
1101        {
1102            panic!("This vulkan device is affected by [#8333](https://github.com/gfx-rs/wgpu/issues/8333)");
1103        }
1104        unsafe {
1105            self.device.raw.cmd_draw_indexed(
1106                self.active,
1107                index_count,
1108                instance_count,
1109                first_index,
1110                base_vertex,
1111                first_instance,
1112            )
1113        };
1114    }
1115    unsafe fn draw_mesh_tasks(
1116        &mut self,
1117        group_count_x: u32,
1118        group_count_y: u32,
1119        group_count_z: u32,
1120    ) {
1121        if let Some(ref t) = self.device.extension_fns.mesh_shading {
1122            unsafe {
1123                t.cmd_draw_mesh_tasks(self.active, group_count_x, group_count_y, group_count_z);
1124            };
1125        } else {
1126            panic!("Feature `MESH_SHADING` not enabled");
1127        }
1128    }
1129    unsafe fn draw_indirect(
1130        &mut self,
1131        buffer: &super::Buffer,
1132        offset: wgt::BufferAddress,
1133        draw_count: u32,
1134    ) {
1135        if draw_count >= 1
1136            && self.device.private_caps.multi_draw_indirect
1137            && draw_count <= self.device.private_caps.max_draw_indirect_count
1138        {
1139            unsafe {
1140                self.device.raw.cmd_draw_indirect(
1141                    self.active,
1142                    buffer.raw,
1143                    offset,
1144                    draw_count,
1145                    size_of::<wgt::DrawIndirectArgs>() as u32,
1146                )
1147            };
1148        } else {
1149            for i in 0..draw_count {
1150                let indirect_offset = offset
1151                    + i as wgt::BufferAddress
1152                        * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1153                unsafe {
1154                    self.device.raw.cmd_draw_indirect(
1155                        self.active,
1156                        buffer.raw,
1157                        indirect_offset,
1158                        1,
1159                        size_of::<wgt::DrawIndirectArgs>() as u32,
1160                    )
1161                };
1162            }
1163        }
1164    }
1165    unsafe fn draw_indexed_indirect(
1166        &mut self,
1167        buffer: &super::Buffer,
1168        offset: wgt::BufferAddress,
1169        draw_count: u32,
1170    ) {
1171        if draw_count >= 1
1172            && self.device.private_caps.multi_draw_indirect
1173            && draw_count <= self.device.private_caps.max_draw_indirect_count
1174        {
1175            unsafe {
1176                self.device.raw.cmd_draw_indexed_indirect(
1177                    self.active,
1178                    buffer.raw,
1179                    offset,
1180                    draw_count,
1181                    size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1182                )
1183            };
1184        } else {
1185            for i in 0..draw_count {
1186                let indirect_offset = offset
1187                    + i as wgt::BufferAddress
1188                        * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1189                unsafe {
1190                    self.device.raw.cmd_draw_indexed_indirect(
1191                        self.active,
1192                        buffer.raw,
1193                        indirect_offset,
1194                        1,
1195                        size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1196                    )
1197                };
1198            }
1199        }
1200    }
1201    unsafe fn draw_mesh_tasks_indirect(
1202        &mut self,
1203        buffer: &<Self::A as crate::Api>::Buffer,
1204        offset: wgt::BufferAddress,
1205        draw_count: u32,
1206    ) {
1207        if let Some(ref t) = self.device.extension_fns.mesh_shading {
1208            unsafe {
1209                t.cmd_draw_mesh_tasks_indirect(
1210                    self.active,
1211                    buffer.raw,
1212                    offset,
1213                    draw_count,
1214                    size_of::<wgt::DispatchIndirectArgs>() as u32,
1215                );
1216            };
1217        } else {
1218            panic!("Feature `MESH_SHADING` not enabled");
1219        }
1220    }
1221    unsafe fn draw_indirect_count(
1222        &mut self,
1223        buffer: &super::Buffer,
1224        offset: wgt::BufferAddress,
1225        count_buffer: &super::Buffer,
1226        count_offset: wgt::BufferAddress,
1227        max_count: u32,
1228    ) {
1229        let stride = size_of::<wgt::DrawIndirectArgs>() as u32;
1230        match self.device.extension_fns.draw_indirect_count {
1231            Some(ref t) => {
1232                unsafe {
1233                    t.cmd_draw_indirect_count(
1234                        self.active,
1235                        buffer.raw,
1236                        offset,
1237                        count_buffer.raw,
1238                        count_offset,
1239                        max_count,
1240                        stride,
1241                    )
1242                };
1243            }
1244            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1245        }
1246    }
1247    unsafe fn draw_indexed_indirect_count(
1248        &mut self,
1249        buffer: &super::Buffer,
1250        offset: wgt::BufferAddress,
1251        count_buffer: &super::Buffer,
1252        count_offset: wgt::BufferAddress,
1253        max_count: u32,
1254    ) {
1255        let stride = size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
1256        match self.device.extension_fns.draw_indirect_count {
1257            Some(ref t) => {
1258                unsafe {
1259                    t.cmd_draw_indexed_indirect_count(
1260                        self.active,
1261                        buffer.raw,
1262                        offset,
1263                        count_buffer.raw,
1264                        count_offset,
1265                        max_count,
1266                        stride,
1267                    )
1268                };
1269            }
1270            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1271        }
1272    }
1273    unsafe fn draw_mesh_tasks_indirect_count(
1274        &mut self,
1275        buffer: &<Self::A as crate::Api>::Buffer,
1276        offset: wgt::BufferAddress,
1277        count_buffer: &super::Buffer,
1278        count_offset: wgt::BufferAddress,
1279        max_count: u32,
1280    ) {
1281        if self.device.extension_fns.draw_indirect_count.is_none() {
1282            panic!("Feature `DRAW_INDIRECT_COUNT` not enabled");
1283        }
1284        if let Some(ref t) = self.device.extension_fns.mesh_shading {
1285            unsafe {
1286                t.cmd_draw_mesh_tasks_indirect_count(
1287                    self.active,
1288                    buffer.raw,
1289                    offset,
1290                    count_buffer.raw,
1291                    count_offset,
1292                    max_count,
1293                    size_of::<wgt::DispatchIndirectArgs>() as u32,
1294                );
1295            };
1296        } else {
1297            panic!("Feature `MESH_SHADING` not enabled");
1298        }
1299    }
1300
1301    // compute
1302
1303    unsafe fn begin_compute_pass(
1304        &mut self,
1305        desc: &crate::ComputePassDescriptor<'_, super::QuerySet>,
1306    ) {
1307        self.bind_point = vk::PipelineBindPoint::COMPUTE;
1308        if let Some(label) = desc.label {
1309            unsafe { self.begin_debug_marker(label) };
1310            self.rpass_debug_marker_active = true;
1311        }
1312
1313        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
1314            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
1315                unsafe {
1316                    self.write_timestamp(timestamp_writes.query_set, index);
1317                }
1318            }
1319            self.end_of_pass_timer_query = timestamp_writes
1320                .end_of_pass_write_index
1321                .map(|index| (timestamp_writes.query_set.raw, index));
1322        }
1323    }
1324    unsafe fn end_compute_pass(&mut self) {
1325        self.write_pass_end_timestamp_if_requested();
1326
1327        if self.rpass_debug_marker_active {
1328            unsafe { self.end_debug_marker() };
1329            self.rpass_debug_marker_active = false
1330        }
1331    }
1332
1333    unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1334        unsafe {
1335            self.device.raw.cmd_bind_pipeline(
1336                self.active,
1337                vk::PipelineBindPoint::COMPUTE,
1338                pipeline.raw,
1339            )
1340        };
1341    }
1342
1343    unsafe fn dispatch(&mut self, count: [u32; 3]) {
1344        unsafe {
1345            self.device
1346                .raw
1347                .cmd_dispatch(self.active, count[0], count[1], count[2])
1348        };
1349    }
1350    unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1351        unsafe {
1352            self.device
1353                .raw
1354                .cmd_dispatch_indirect(self.active, buffer.raw, offset)
1355        }
1356    }
1357
1358    unsafe fn copy_acceleration_structure_to_acceleration_structure(
1359        &mut self,
1360        src: &super::AccelerationStructure,
1361        dst: &super::AccelerationStructure,
1362        copy: wgt::AccelerationStructureCopy,
1363    ) {
1364        let ray_tracing_functions = self
1365            .device
1366            .extension_fns
1367            .ray_tracing
1368            .as_ref()
1369            .expect("Feature `RAY_TRACING` not enabled");
1370
1371        let mode = match copy {
1372            wgt::AccelerationStructureCopy::Clone => vk::CopyAccelerationStructureModeKHR::CLONE,
1373            wgt::AccelerationStructureCopy::Compact => {
1374                vk::CopyAccelerationStructureModeKHR::COMPACT
1375            }
1376        };
1377
1378        unsafe {
1379            ray_tracing_functions
1380                .acceleration_structure
1381                .cmd_copy_acceleration_structure(
1382                    self.active,
1383                    &vk::CopyAccelerationStructureInfoKHR {
1384                        s_type: vk::StructureType::COPY_ACCELERATION_STRUCTURE_INFO_KHR,
1385                        p_next: core::ptr::null(),
1386                        src: src.raw,
1387                        dst: dst.raw,
1388                        mode,
1389                        _marker: Default::default(),
1390                    },
1391                );
1392        }
1393    }
1394}
1395
1396#[test]
1397fn check_dst_image_layout() {
1398    assert_eq!(
1399        conv::derive_image_layout(wgt::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
1400        DST_IMAGE_LAYOUT
1401    );
1402}