li_wgpu_hal/vulkan/
device.rs

1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::{extensions::khr, vk};
5use parking_lot::Mutex;
6
7use std::{
8    borrow::Cow,
9    collections::{hash_map::Entry, BTreeMap},
10    ffi::{CStr, CString},
11    num::NonZeroU32,
12    ptr,
13    sync::Arc,
14};
15
16impl super::DeviceShared {
17    pub(super) unsafe fn set_object_name(
18        &self,
19        object_type: vk::ObjectType,
20        object: impl vk::Handle,
21        name: &str,
22    ) {
23        let extension = match self.instance.debug_utils {
24            Some(ref debug_utils) => &debug_utils.extension,
25            None => return,
26        };
27
28        // Keep variables outside the if-else block to ensure they do not
29        // go out of scope while we hold a pointer to them
30        let mut buffer: [u8; 64] = [0u8; 64];
31        let buffer_vec: Vec<u8>;
32
33        // Append a null terminator to the string
34        let name_bytes = if name.len() < buffer.len() {
35            // Common case, string is very small. Allocate a copy on the stack.
36            buffer[..name.len()].copy_from_slice(name.as_bytes());
37            // Add null terminator
38            buffer[name.len()] = 0;
39            &buffer[..name.len() + 1]
40        } else {
41            // Less common case, the string is large.
42            // This requires a heap allocation.
43            buffer_vec = name
44                .as_bytes()
45                .iter()
46                .cloned()
47                .chain(std::iter::once(0))
48                .collect();
49            &buffer_vec
50        };
51
52        let name = unsafe { CStr::from_bytes_with_nul_unchecked(name_bytes) };
53
54        let _result = unsafe {
55            extension.set_debug_utils_object_name(
56                self.raw.handle(),
57                &vk::DebugUtilsObjectNameInfoEXT::builder()
58                    .object_type(object_type)
59                    .object_handle(object.as_raw())
60                    .object_name(name),
61            )
62        };
63    }
64
65    pub fn make_render_pass(
66        &self,
67        key: super::RenderPassKey,
68    ) -> Result<vk::RenderPass, crate::DeviceError> {
69        Ok(match self.render_passes.lock().entry(key) {
70            Entry::Occupied(e) => *e.get(),
71            Entry::Vacant(e) => {
72                let mut vk_attachments = Vec::new();
73                let mut color_refs = Vec::with_capacity(e.key().colors.len());
74                let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
75                let mut ds_ref = None;
76                let samples = vk::SampleCountFlags::from_raw(e.key().sample_count);
77                let unused = vk::AttachmentReference {
78                    attachment: vk::ATTACHMENT_UNUSED,
79                    layout: vk::ImageLayout::UNDEFINED,
80                };
81                for cat in e.key().colors.iter() {
82                    let (color_ref, resolve_ref) = if let Some(cat) = cat.as_ref() {
83                        let color_ref = vk::AttachmentReference {
84                            attachment: vk_attachments.len() as u32,
85                            layout: cat.base.layout,
86                        };
87                        vk_attachments.push({
88                            let (load_op, store_op) = conv::map_attachment_ops(cat.base.ops);
89                            vk::AttachmentDescription::builder()
90                                .format(cat.base.format)
91                                .samples(samples)
92                                .load_op(load_op)
93                                .store_op(store_op)
94                                .initial_layout(cat.base.layout)
95                                .final_layout(cat.base.layout)
96                                .build()
97                        });
98                        let resolve_ref = if let Some(ref rat) = cat.resolve {
99                            let (load_op, store_op) = conv::map_attachment_ops(rat.ops);
100                            let vk_attachment = vk::AttachmentDescription::builder()
101                                .format(rat.format)
102                                .samples(vk::SampleCountFlags::TYPE_1)
103                                .load_op(load_op)
104                                .store_op(store_op)
105                                .initial_layout(rat.layout)
106                                .final_layout(rat.layout)
107                                .build();
108                            vk_attachments.push(vk_attachment);
109
110                            vk::AttachmentReference {
111                                attachment: vk_attachments.len() as u32 - 1,
112                                layout: rat.layout,
113                            }
114                        } else {
115                            unused
116                        };
117
118                        (color_ref, resolve_ref)
119                    } else {
120                        (unused, unused)
121                    };
122
123                    color_refs.push(color_ref);
124                    resolve_refs.push(resolve_ref);
125                }
126
127                if let Some(ref ds) = e.key().depth_stencil {
128                    ds_ref = Some(vk::AttachmentReference {
129                        attachment: vk_attachments.len() as u32,
130                        layout: ds.base.layout,
131                    });
132                    let (load_op, store_op) = conv::map_attachment_ops(ds.base.ops);
133                    let (stencil_load_op, stencil_store_op) =
134                        conv::map_attachment_ops(ds.stencil_ops);
135                    let vk_attachment = vk::AttachmentDescription::builder()
136                        .format(ds.base.format)
137                        .samples(samples)
138                        .load_op(load_op)
139                        .store_op(store_op)
140                        .stencil_load_op(stencil_load_op)
141                        .stencil_store_op(stencil_store_op)
142                        .initial_layout(ds.base.layout)
143                        .final_layout(ds.base.layout)
144                        .build();
145                    vk_attachments.push(vk_attachment);
146                }
147
148                let vk_subpasses = [{
149                    let mut vk_subpass = vk::SubpassDescription::builder()
150                        .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
151                        .color_attachments(&color_refs)
152                        .resolve_attachments(&resolve_refs);
153
154                    if self
155                        .workarounds
156                        .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
157                        && resolve_refs.is_empty()
158                    {
159                        vk_subpass.p_resolve_attachments = ptr::null();
160                    }
161
162                    if let Some(ref reference) = ds_ref {
163                        vk_subpass = vk_subpass.depth_stencil_attachment(reference)
164                    }
165                    vk_subpass.build()
166                }];
167
168                let mut vk_info = vk::RenderPassCreateInfo::builder()
169                    .attachments(&vk_attachments)
170                    .subpasses(&vk_subpasses);
171
172                let mut multiview_info;
173                let mask;
174                if let Some(multiview) = e.key().multiview {
175                    // Sanity checks, better to panic here than cause a driver crash
176                    assert!(multiview.get() <= 8);
177                    assert!(multiview.get() > 1);
178
179                    // Right now we enable all bits on the view masks and correlation masks.
180                    // This means we're rendering to all views in the subpass, and that all views
181                    // can be rendered concurrently.
182                    mask = [(1 << multiview.get()) - 1];
183
184                    // On Vulkan 1.1 or later, this is an alias for core functionality
185                    multiview_info = vk::RenderPassMultiviewCreateInfoKHR::builder()
186                        .view_masks(&mask)
187                        .correlation_masks(&mask)
188                        .build();
189                    vk_info = vk_info.push_next(&mut multiview_info);
190                }
191
192                let raw = unsafe { self.raw.create_render_pass(&vk_info, None)? };
193
194                *e.insert(raw)
195            }
196        })
197    }
198
199    pub fn make_framebuffer(
200        &self,
201        key: super::FramebufferKey,
202        raw_pass: vk::RenderPass,
203        pass_label: crate::Label,
204    ) -> Result<vk::Framebuffer, crate::DeviceError> {
205        Ok(match self.framebuffers.lock().entry(key) {
206            Entry::Occupied(e) => *e.get(),
207            Entry::Vacant(e) => {
208                let vk_views = e
209                    .key()
210                    .attachments
211                    .iter()
212                    .map(|at| at.raw)
213                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
214                let vk_view_formats = e
215                    .key()
216                    .attachments
217                    .iter()
218                    .map(|at| self.private_caps.map_texture_format(at.view_format))
219                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
220                let vk_view_formats_list = e
221                    .key()
222                    .attachments
223                    .iter()
224                    .map(|at| at.raw_view_formats.clone())
225                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
226
227                let vk_image_infos = e
228                    .key()
229                    .attachments
230                    .iter()
231                    .enumerate()
232                    .map(|(i, at)| {
233                        let mut info = vk::FramebufferAttachmentImageInfo::builder()
234                            .usage(conv::map_texture_usage(at.view_usage))
235                            .flags(at.raw_image_flags)
236                            .width(e.key().extent.width)
237                            .height(e.key().extent.height)
238                            .layer_count(e.key().extent.depth_or_array_layers);
239                        // https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkRenderPassBeginInfo.html#VUID-VkRenderPassBeginInfo-framebuffer-03214
240                        if vk_view_formats_list[i].is_empty() {
241                            info = info.view_formats(&vk_view_formats[i..i + 1]);
242                        } else {
243                            info = info.view_formats(&vk_view_formats_list[i]);
244                        };
245                        info.build()
246                    })
247                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
248
249                let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::builder()
250                    .attachment_image_infos(&vk_image_infos)
251                    .build();
252                let mut vk_info = vk::FramebufferCreateInfo::builder()
253                    .render_pass(raw_pass)
254                    .width(e.key().extent.width)
255                    .height(e.key().extent.height)
256                    .layers(e.key().extent.depth_or_array_layers);
257
258                if self.private_caps.imageless_framebuffers {
259                    //TODO: https://github.com/MaikKlein/ash/issues/450
260                    vk_info = vk_info
261                        .flags(vk::FramebufferCreateFlags::IMAGELESS_KHR)
262                        .push_next(&mut vk_attachment_info);
263                    vk_info.attachment_count = e.key().attachments.len() as u32;
264                } else {
265                    vk_info = vk_info.attachments(&vk_views);
266                }
267
268                *e.insert(unsafe {
269                    let raw = self.raw.create_framebuffer(&vk_info, None).unwrap();
270                    if let Some(label) = pass_label {
271                        self.set_object_name(vk::ObjectType::FRAMEBUFFER, raw, label);
272                    }
273                    raw
274                })
275            }
276        })
277    }
278
279    fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
280        &self,
281        buffer: &'a super::Buffer,
282        ranges: I,
283    ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange>> {
284        let block = buffer.block.as_ref()?.lock();
285        let mask = self.private_caps.non_coherent_map_mask;
286        Some(ranges.map(move |range| {
287            vk::MappedMemoryRange::builder()
288                .memory(*block.memory())
289                .offset((block.offset() + range.start) & !mask)
290                .size((range.end - range.start + mask) & !mask)
291                .build()
292        }))
293    }
294
295    unsafe fn free_resources(&self) {
296        for &raw in self.render_passes.lock().values() {
297            unsafe { self.raw.destroy_render_pass(raw, None) };
298        }
299        for &raw in self.framebuffers.lock().values() {
300            unsafe { self.raw.destroy_framebuffer(raw, None) };
301        }
302        if self.handle_is_owned {
303            unsafe { self.raw.destroy_device(None) };
304        }
305    }
306}
307
308impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
309    unsafe fn allocate_memory(
310        &self,
311        size: u64,
312        memory_type: u32,
313        flags: gpu_alloc::AllocationFlags,
314    ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
315        let mut info = vk::MemoryAllocateInfo::builder()
316            .allocation_size(size)
317            .memory_type_index(memory_type);
318
319        let mut info_flags;
320
321        if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
322            info_flags = vk::MemoryAllocateFlagsInfo::builder()
323                .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
324            info = info.push_next(&mut info_flags);
325        }
326
327        match unsafe { self.raw.allocate_memory(&info, None) } {
328            Ok(memory) => Ok(memory),
329            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
330                Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
331            }
332            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
333                Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
334            }
335            Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => panic!("Too many objects"),
336            Err(err) => panic!("Unexpected Vulkan error: `{err}`"),
337        }
338    }
339
340    unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
341        unsafe { self.raw.free_memory(memory, None) };
342    }
343
344    unsafe fn map_memory(
345        &self,
346        memory: &mut vk::DeviceMemory,
347        offset: u64,
348        size: u64,
349    ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
350        match unsafe {
351            self.raw
352                .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
353        } {
354            Ok(ptr) => Ok(ptr::NonNull::new(ptr as *mut u8)
355                .expect("Pointer to memory mapping must not be null")),
356            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
357                Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
358            }
359            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
360                Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
361            }
362            Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
363            Err(err) => panic!("Unexpected Vulkan error: `{err}`"),
364        }
365    }
366
367    unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
368        unsafe { self.raw.unmap_memory(*memory) };
369    }
370
371    unsafe fn invalidate_memory_ranges(
372        &self,
373        _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
374    ) -> Result<(), gpu_alloc::OutOfMemory> {
375        // should never be called
376        unimplemented!()
377    }
378
379    unsafe fn flush_memory_ranges(
380        &self,
381        _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
382    ) -> Result<(), gpu_alloc::OutOfMemory> {
383        // should never be called
384        unimplemented!()
385    }
386}
387
388impl
389    gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
390    for super::DeviceShared
391{
392    unsafe fn create_descriptor_pool(
393        &self,
394        descriptor_count: &gpu_descriptor::DescriptorTotalCount,
395        max_sets: u32,
396        flags: gpu_descriptor::DescriptorPoolCreateFlags,
397    ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
398        //Note: ignoring other types, since they can't appear here
399        let unfiltered_counts = [
400            (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
401            (
402                vk::DescriptorType::SAMPLED_IMAGE,
403                descriptor_count.sampled_image,
404            ),
405            (
406                vk::DescriptorType::STORAGE_IMAGE,
407                descriptor_count.storage_image,
408            ),
409            (
410                vk::DescriptorType::UNIFORM_BUFFER,
411                descriptor_count.uniform_buffer,
412            ),
413            (
414                vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
415                descriptor_count.uniform_buffer_dynamic,
416            ),
417            (
418                vk::DescriptorType::STORAGE_BUFFER,
419                descriptor_count.storage_buffer,
420            ),
421            (
422                vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
423                descriptor_count.storage_buffer_dynamic,
424            ),
425        ];
426
427        let filtered_counts = unfiltered_counts
428            .iter()
429            .cloned()
430            .filter(|&(_, count)| count != 0)
431            .map(|(ty, count)| vk::DescriptorPoolSize {
432                ty,
433                descriptor_count: count,
434            })
435            .collect::<ArrayVec<_, 8>>();
436
437        let mut vk_flags =
438            if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
439                vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
440            } else {
441                vk::DescriptorPoolCreateFlags::empty()
442            };
443        if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
444            vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
445        }
446        let vk_info = vk::DescriptorPoolCreateInfo::builder()
447            .max_sets(max_sets)
448            .flags(vk_flags)
449            .pool_sizes(&filtered_counts)
450            .build();
451
452        match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
453            Ok(pool) => Ok(pool),
454            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
455                Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
456            }
457            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
458                Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
459            }
460            Err(vk::Result::ERROR_FRAGMENTATION) => {
461                Err(gpu_descriptor::CreatePoolError::Fragmentation)
462            }
463            Err(other) => {
464                log::error!("create_descriptor_pool: {:?}", other);
465                Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
466            }
467        }
468    }
469
470    unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
471        unsafe { self.raw.destroy_descriptor_pool(pool, None) }
472    }
473
474    unsafe fn alloc_descriptor_sets<'a>(
475        &self,
476        pool: &mut vk::DescriptorPool,
477        layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
478        sets: &mut impl Extend<vk::DescriptorSet>,
479    ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
480        let result = unsafe {
481            self.raw.allocate_descriptor_sets(
482                &vk::DescriptorSetAllocateInfo::builder()
483                    .descriptor_pool(*pool)
484                    .set_layouts(
485                        &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
486                            layouts.cloned(),
487                        ),
488                    )
489                    .build(),
490            )
491        };
492
493        match result {
494            Ok(vk_sets) => {
495                sets.extend(vk_sets);
496                Ok(())
497            }
498            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
499            | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
500                Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
501            }
502            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
503                Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
504            }
505            Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
506                Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
507            }
508            Err(other) => {
509                log::error!("allocate_descriptor_sets: {:?}", other);
510                Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
511            }
512        }
513    }
514
515    unsafe fn dealloc_descriptor_sets<'a>(
516        &self,
517        pool: &mut vk::DescriptorPool,
518        sets: impl Iterator<Item = vk::DescriptorSet>,
519    ) {
520        let result = unsafe {
521            self.raw.free_descriptor_sets(
522                *pool,
523                &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
524            )
525        };
526        match result {
527            Ok(()) => {}
528            Err(err) => log::error!("free_descriptor_sets: {:?}", err),
529        }
530    }
531}
532
533struct CompiledStage {
534    create_info: vk::PipelineShaderStageCreateInfo,
535    _entry_point: CString,
536    temp_raw_module: Option<vk::ShaderModule>,
537}
538
539impl super::Device {
540    pub(super) unsafe fn create_swapchain(
541        &self,
542        surface: &mut super::Surface,
543        config: &crate::SurfaceConfiguration,
544        provided_old_swapchain: Option<super::Swapchain>,
545    ) -> Result<super::Swapchain, crate::SurfaceError> {
546        profiling::scope!("Device::create_swapchain");
547        let functor = khr::Swapchain::new(&surface.instance.raw, &self.shared.raw);
548
549        let old_swapchain = match provided_old_swapchain {
550            Some(osc) => osc.raw,
551            None => vk::SwapchainKHR::null(),
552        };
553
554        let color_space = if config.format == wgt::TextureFormat::Rgba16Float {
555            // Enable wide color gamut mode
556            // Vulkan swapchain for Android only supports DISPLAY_P3_NONLINEAR_EXT and EXTENDED_SRGB_LINEAR_EXT
557            vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT
558        } else {
559            vk::ColorSpaceKHR::SRGB_NONLINEAR
560        };
561
562        let original_format = self.shared.private_caps.map_texture_format(config.format);
563        let mut raw_flags = vk::SwapchainCreateFlagsKHR::empty();
564        let mut raw_view_formats: Vec<vk::Format> = vec![];
565        let mut wgt_view_formats = vec![];
566        if !config.view_formats.is_empty() {
567            raw_flags |= vk::SwapchainCreateFlagsKHR::MUTABLE_FORMAT;
568            raw_view_formats = config
569                .view_formats
570                .iter()
571                .map(|f| self.shared.private_caps.map_texture_format(*f))
572                .collect();
573            raw_view_formats.push(original_format);
574
575            wgt_view_formats = config.view_formats.clone();
576            wgt_view_formats.push(config.format);
577        }
578
579        let mut info = vk::SwapchainCreateInfoKHR::builder()
580            .flags(raw_flags)
581            .surface(surface.raw)
582            .min_image_count(config.swap_chain_size)
583            .image_format(original_format)
584            .image_color_space(color_space)
585            .image_extent(vk::Extent2D {
586                width: config.extent.width,
587                height: config.extent.height,
588            })
589            .image_array_layers(config.extent.depth_or_array_layers)
590            .image_usage(conv::map_texture_usage(config.usage))
591            .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
592            .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
593            .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
594            .present_mode(conv::map_present_mode(config.present_mode))
595            .clipped(true)
596            .old_swapchain(old_swapchain);
597
598        let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
599        if !raw_view_formats.is_empty() {
600            format_list_info = format_list_info.view_formats(&raw_view_formats);
601            info = info.push_next(&mut format_list_info);
602        }
603
604        let result = {
605            profiling::scope!("vkCreateSwapchainKHR");
606            unsafe { functor.create_swapchain(&info, None) }
607        };
608
609        // doing this before bailing out with error
610        if old_swapchain != vk::SwapchainKHR::null() {
611            unsafe { functor.destroy_swapchain(old_swapchain, None) }
612        }
613
614        let raw = match result {
615            Ok(swapchain) => swapchain,
616            Err(error) => {
617                return Err(match error {
618                    vk::Result::ERROR_SURFACE_LOST_KHR => crate::SurfaceError::Lost,
619                    vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => {
620                        crate::SurfaceError::Other("Native window is in use")
621                    }
622                    other => crate::DeviceError::from(other).into(),
623                })
624            }
625        };
626
627        let images =
628            unsafe { functor.get_swapchain_images(raw) }.map_err(crate::DeviceError::from)?;
629
630        let vk_info = vk::FenceCreateInfo::builder().build();
631        let fence = unsafe { self.shared.raw.create_fence(&vk_info, None) }
632            .map_err(crate::DeviceError::from)?;
633
634        Ok(super::Swapchain {
635            raw,
636            raw_flags,
637            functor,
638            device: Arc::clone(&self.shared),
639            fence,
640            images,
641            config: config.clone(),
642            view_formats: wgt_view_formats,
643        })
644    }
645
646    /// # Safety
647    ///
648    /// - `vk_image` must be created respecting `desc`
649    /// - If `drop_guard` is `Some`, the application must manually destroy the image handle. This
650    ///   can be done inside the `Drop` impl of `drop_guard`.
651    /// - If the `ImageCreateFlags` does not contain `MUTABLE_FORMAT`, the `view_formats` of `desc` must be empty.
652    pub unsafe fn texture_from_raw(
653        vk_image: vk::Image,
654        desc: &crate::TextureDescriptor,
655        drop_guard: Option<crate::DropGuard>,
656    ) -> super::Texture {
657        let mut raw_flags = vk::ImageCreateFlags::empty();
658        let mut view_formats = vec![];
659        for tf in desc.view_formats.iter() {
660            if *tf == desc.format {
661                continue;
662            }
663            view_formats.push(*tf);
664        }
665        if !view_formats.is_empty() {
666            raw_flags |=
667                vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
668            view_formats.push(desc.format)
669        }
670
671        super::Texture {
672            raw: vk_image,
673            drop_guard,
674            block: None,
675            usage: desc.usage,
676            format: desc.format,
677            raw_flags: vk::ImageCreateFlags::empty(),
678            copy_size: desc.copy_extent(),
679            view_formats,
680        }
681    }
682
683    /// # Safety
684    ///
685    /// - `vk_buffer`'s memory must be managed by the caller
686    /// - Externally imported buffers can't be mapped by `wgpu`
687    pub unsafe fn buffer_from_raw(vk_buffer: vk::Buffer) -> super::Buffer {
688        super::Buffer {
689            raw: vk_buffer,
690            block: None,
691        }
692    }
693
694    fn create_shader_module_impl(
695        &self,
696        spv: &[u32],
697    ) -> Result<vk::ShaderModule, crate::DeviceError> {
698        let vk_info = vk::ShaderModuleCreateInfo::builder()
699            .flags(vk::ShaderModuleCreateFlags::empty())
700            .code(spv);
701
702        let raw = unsafe {
703            profiling::scope!("vkCreateShaderModule");
704            self.shared.raw.create_shader_module(&vk_info, None)?
705        };
706        Ok(raw)
707    }
708
709    fn compile_stage(
710        &self,
711        stage: &crate::ProgrammableStage<super::Api>,
712        naga_stage: naga::ShaderStage,
713        binding_map: &naga::back::spv::BindingMap,
714    ) -> Result<CompiledStage, crate::PipelineError> {
715        let stage_flags = crate::auxil::map_naga_stage(naga_stage);
716        let vk_module = match *stage.module {
717            super::ShaderModule::Raw(raw) => raw,
718            super::ShaderModule::Intermediate {
719                ref naga_shader,
720                runtime_checks,
721            } => {
722                let pipeline_options = naga::back::spv::PipelineOptions {
723                    entry_point: stage.entry_point.to_string(),
724                    shader_stage: naga_stage,
725                };
726                let needs_temp_options = !runtime_checks
727                    || !binding_map.is_empty()
728                    || naga_shader.debug_source.is_some();
729                let mut temp_options;
730                let options = if needs_temp_options {
731                    temp_options = self.naga_options.clone();
732                    if !runtime_checks {
733                        temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
734                            index: naga::proc::BoundsCheckPolicy::Unchecked,
735                            buffer: naga::proc::BoundsCheckPolicy::Unchecked,
736                            image_load: naga::proc::BoundsCheckPolicy::Unchecked,
737                            image_store: naga::proc::BoundsCheckPolicy::Unchecked,
738                            binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
739                        };
740                    }
741                    if !binding_map.is_empty() {
742                        temp_options.binding_map = binding_map.clone();
743                    }
744
745                    if let Some(ref debug) = naga_shader.debug_source {
746                        temp_options.debug_info = Some(naga::back::spv::DebugInfo {
747                            source_code: &debug.source_code,
748                            file_name: debug.file_name.as_ref().as_ref(),
749                        })
750                    }
751
752                    &temp_options
753                } else {
754                    &self.naga_options
755                };
756                let spv = {
757                    profiling::scope!("naga::spv::write_vec");
758                    naga::back::spv::write_vec(
759                        &naga_shader.module,
760                        &naga_shader.info,
761                        options,
762                        Some(&pipeline_options),
763                    )
764                }
765                .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
766                self.create_shader_module_impl(&spv)?
767            }
768        };
769
770        let entry_point = CString::new(stage.entry_point).unwrap();
771        let create_info = vk::PipelineShaderStageCreateInfo::builder()
772            .stage(conv::map_shader_stage(stage_flags))
773            .module(vk_module)
774            .name(&entry_point)
775            .build();
776
777        Ok(CompiledStage {
778            create_info,
779            _entry_point: entry_point,
780            temp_raw_module: match *stage.module {
781                super::ShaderModule::Raw(_) => None,
782                super::ShaderModule::Intermediate { .. } => Some(vk_module),
783            },
784        })
785    }
786
787    /// Returns the queue family index of the device's internal queue.
788    ///
789    /// This is useful for constructing memory barriers needed for queue family ownership transfer when
790    /// external memory is involved (from/to `VK_QUEUE_FAMILY_EXTERNAL_KHR` and `VK_QUEUE_FAMILY_FOREIGN_EXT`
791    /// for example).
792    pub fn queue_family_index(&self) -> u32 {
793        self.shared.family_index
794    }
795
796    pub fn queue_index(&self) -> u32 {
797        self.shared.queue_index
798    }
799
800    pub fn raw_device(&self) -> &ash::Device {
801        &self.shared.raw
802    }
803
804    pub fn raw_physical_device(&self) -> ash::vk::PhysicalDevice {
805        self.shared.physical_device
806    }
807
808    pub fn raw_queue(&self) -> ash::vk::Queue {
809        self.shared.raw_queue
810    }
811
812    pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
813        &self.shared.enabled_extensions
814    }
815
816    pub fn shared_instance(&self) -> &super::InstanceShared {
817        &self.shared.instance
818    }
819}
820
821impl crate::Device<super::Api> for super::Device {
822    unsafe fn exit(self, queue: super::Queue) {
823        unsafe { self.mem_allocator.into_inner().cleanup(&*self.shared) };
824        unsafe { self.desc_allocator.into_inner().cleanup(&*self.shared) };
825        for &sem in queue.relay_semaphores.iter() {
826            unsafe { self.shared.raw.destroy_semaphore(sem, None) };
827        }
828        unsafe { self.shared.free_resources() };
829    }
830
831    unsafe fn create_buffer(
832        &self,
833        desc: &crate::BufferDescriptor,
834    ) -> Result<super::Buffer, crate::DeviceError> {
835        let vk_info = vk::BufferCreateInfo::builder()
836            .size(desc.size)
837            .usage(conv::map_buffer_usage(desc.usage))
838            .sharing_mode(vk::SharingMode::EXCLUSIVE);
839
840        let raw = unsafe { self.shared.raw.create_buffer(&vk_info, None)? };
841        let req = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
842
843        let mut alloc_usage = if desc
844            .usage
845            .intersects(crate::BufferUses::MAP_READ | crate::BufferUses::MAP_WRITE)
846        {
847            let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
848            //TODO: find a way to use `crate::MemoryFlags::PREFER_COHERENT`
849            flags.set(
850                gpu_alloc::UsageFlags::DOWNLOAD,
851                desc.usage.contains(crate::BufferUses::MAP_READ),
852            );
853            flags.set(
854                gpu_alloc::UsageFlags::UPLOAD,
855                desc.usage.contains(crate::BufferUses::MAP_WRITE),
856            );
857            flags
858        } else {
859            gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
860        };
861        alloc_usage.set(
862            gpu_alloc::UsageFlags::TRANSIENT,
863            desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
864        );
865
866        let block = unsafe {
867            self.mem_allocator.lock().alloc(
868                &*self.shared,
869                gpu_alloc::Request {
870                    size: req.size,
871                    align_mask: req.alignment - 1,
872                    usage: alloc_usage,
873                    memory_types: req.memory_type_bits & self.valid_ash_memory_types,
874                },
875            )?
876        };
877
878        unsafe {
879            self.shared
880                .raw
881                .bind_buffer_memory(raw, *block.memory(), block.offset())?
882        };
883
884        if let Some(label) = desc.label {
885            unsafe {
886                self.shared
887                    .set_object_name(vk::ObjectType::BUFFER, raw, label)
888            };
889        }
890
891        Ok(super::Buffer {
892            raw,
893            block: Some(Mutex::new(block)),
894        })
895    }
896    unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
897        unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
898        if let Some(block) = buffer.block {
899            unsafe {
900                self.mem_allocator
901                    .lock()
902                    .dealloc(&*self.shared, block.into_inner())
903            };
904        }
905    }
906
907    unsafe fn map_buffer(
908        &self,
909        buffer: &super::Buffer,
910        range: crate::MemoryRange,
911    ) -> Result<crate::BufferMapping, crate::DeviceError> {
912        if let Some(ref block) = buffer.block {
913            let size = range.end - range.start;
914            let mut block = block.lock();
915            let ptr = unsafe { block.map(&*self.shared, range.start, size as usize)? };
916            let is_coherent = block
917                .props()
918                .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
919            Ok(crate::BufferMapping { ptr, is_coherent })
920        } else {
921            Err(crate::DeviceError::OutOfMemory)
922        }
923    }
924    unsafe fn unmap_buffer(&self, buffer: &super::Buffer) -> Result<(), crate::DeviceError> {
925        if let Some(ref block) = buffer.block {
926            unsafe { block.lock().unmap(&*self.shared) };
927            Ok(())
928        } else {
929            Err(crate::DeviceError::OutOfMemory)
930        }
931    }
932
933    unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
934    where
935        I: Iterator<Item = crate::MemoryRange>,
936    {
937        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
938            unsafe {
939                self.shared
940                    .raw
941                    .flush_mapped_memory_ranges(
942                        &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
943                    )
944            }
945            .unwrap();
946        }
947    }
948    unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
949    where
950        I: Iterator<Item = crate::MemoryRange>,
951    {
952        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
953            unsafe {
954                self.shared
955                    .raw
956                    .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
957                        [vk::MappedMemoryRange; 32],
958                    >::from_iter(vk_ranges))
959            }
960            .unwrap();
961        }
962    }
963
964    unsafe fn create_texture(
965        &self,
966        desc: &crate::TextureDescriptor,
967    ) -> Result<super::Texture, crate::DeviceError> {
968        let copy_size = desc.copy_extent();
969
970        let mut raw_flags = vk::ImageCreateFlags::empty();
971        if desc.is_cube_compatible() {
972            raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
973        }
974
975        let original_format = self.shared.private_caps.map_texture_format(desc.format);
976        let mut vk_view_formats = vec![];
977        let mut wgt_view_formats = vec![];
978        if !desc.view_formats.is_empty() {
979            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
980            wgt_view_formats = desc.view_formats.clone();
981            wgt_view_formats.push(desc.format);
982
983            if self.shared.private_caps.image_format_list {
984                vk_view_formats = desc
985                    .view_formats
986                    .iter()
987                    .map(|f| self.shared.private_caps.map_texture_format(*f))
988                    .collect();
989                vk_view_formats.push(original_format)
990            }
991        }
992
993        let mut vk_info = vk::ImageCreateInfo::builder()
994            .flags(raw_flags)
995            .image_type(conv::map_texture_dimension(desc.dimension))
996            .format(original_format)
997            .extent(conv::map_copy_extent(&copy_size))
998            .mip_levels(desc.mip_level_count)
999            .array_layers(desc.array_layer_count())
1000            .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
1001            .tiling(vk::ImageTiling::OPTIMAL)
1002            .usage(conv::map_texture_usage(desc.usage))
1003            .sharing_mode(vk::SharingMode::EXCLUSIVE)
1004            .initial_layout(vk::ImageLayout::UNDEFINED);
1005
1006        let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
1007        if !vk_view_formats.is_empty() {
1008            format_list_info = format_list_info.view_formats(&vk_view_formats);
1009            vk_info = vk_info.push_next(&mut format_list_info);
1010        }
1011
1012        let raw = unsafe { self.shared.raw.create_image(&vk_info, None)? };
1013        let req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
1014
1015        let block = unsafe {
1016            self.mem_allocator.lock().alloc(
1017                &*self.shared,
1018                gpu_alloc::Request {
1019                    size: req.size,
1020                    align_mask: req.alignment - 1,
1021                    usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
1022                    memory_types: req.memory_type_bits & self.valid_ash_memory_types,
1023                },
1024            )?
1025        };
1026
1027        unsafe {
1028            self.shared
1029                .raw
1030                .bind_image_memory(raw, *block.memory(), block.offset())?
1031        };
1032
1033        if let Some(label) = desc.label {
1034            unsafe {
1035                self.shared
1036                    .set_object_name(vk::ObjectType::IMAGE, raw, label)
1037            };
1038        }
1039
1040        Ok(super::Texture {
1041            raw,
1042            drop_guard: None,
1043            block: Some(block),
1044            usage: desc.usage,
1045            format: desc.format,
1046            raw_flags,
1047            copy_size,
1048            view_formats: wgt_view_formats,
1049        })
1050    }
1051    unsafe fn destroy_texture(&self, texture: super::Texture) {
1052        if texture.drop_guard.is_none() {
1053            unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1054        }
1055        if let Some(block) = texture.block {
1056            unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1057        }
1058    }
1059
1060    unsafe fn create_texture_view(
1061        &self,
1062        texture: &super::Texture,
1063        desc: &crate::TextureViewDescriptor,
1064    ) -> Result<super::TextureView, crate::DeviceError> {
1065        let subresource_range = conv::map_subresource_range(&desc.range, desc.format);
1066        let mut vk_info = vk::ImageViewCreateInfo::builder()
1067            .flags(vk::ImageViewCreateFlags::empty())
1068            .image(texture.raw)
1069            .view_type(conv::map_view_dimension(desc.dimension))
1070            .format(self.shared.private_caps.map_texture_format(desc.format))
1071            .subresource_range(subresource_range);
1072        let layers =
1073            NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1074
1075        let mut image_view_info;
1076        let view_usage = if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1077            image_view_info = vk::ImageViewUsageCreateInfo::builder()
1078                .usage(conv::map_texture_usage(desc.usage))
1079                .build();
1080            vk_info = vk_info.push_next(&mut image_view_info);
1081            desc.usage
1082        } else {
1083            texture.usage
1084        };
1085
1086        let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }?;
1087
1088        if let Some(label) = desc.label {
1089            unsafe {
1090                self.shared
1091                    .set_object_name(vk::ObjectType::IMAGE_VIEW, raw, label)
1092            };
1093        }
1094
1095        let attachment = super::FramebufferAttachment {
1096            raw: if self.shared.private_caps.imageless_framebuffers {
1097                vk::ImageView::null()
1098            } else {
1099                raw
1100            },
1101            raw_image_flags: texture.raw_flags,
1102            view_usage,
1103            view_format: desc.format,
1104            raw_view_formats: texture
1105                .view_formats
1106                .iter()
1107                .map(|tf| self.shared.private_caps.map_texture_format(*tf))
1108                .collect(),
1109        };
1110
1111        Ok(super::TextureView {
1112            raw,
1113            layers,
1114            attachment,
1115        })
1116    }
1117    unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1118        if !self.shared.private_caps.imageless_framebuffers {
1119            let mut fbuf_lock = self.shared.framebuffers.lock();
1120            for (key, &raw_fbuf) in fbuf_lock.iter() {
1121                if key.attachments.iter().any(|at| at.raw == view.raw) {
1122                    unsafe { self.shared.raw.destroy_framebuffer(raw_fbuf, None) };
1123                }
1124            }
1125            fbuf_lock.retain(|key, _| !key.attachments.iter().any(|at| at.raw == view.raw));
1126        }
1127        unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1128    }
1129
1130    unsafe fn create_sampler(
1131        &self,
1132        desc: &crate::SamplerDescriptor,
1133    ) -> Result<super::Sampler, crate::DeviceError> {
1134        let mut vk_info = vk::SamplerCreateInfo::builder()
1135            .flags(vk::SamplerCreateFlags::empty())
1136            .mag_filter(conv::map_filter_mode(desc.mag_filter))
1137            .min_filter(conv::map_filter_mode(desc.min_filter))
1138            .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1139            .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1140            .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1141            .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1142            .min_lod(desc.lod_clamp.start)
1143            .max_lod(desc.lod_clamp.end);
1144
1145        if let Some(fun) = desc.compare {
1146            vk_info = vk_info
1147                .compare_enable(true)
1148                .compare_op(conv::map_comparison(fun));
1149        }
1150
1151        if desc.anisotropy_clamp != 1 {
1152            // We only enable anisotropy if it is supported, and wgpu-hal interface guarantees
1153            // the clamp is in the range [1, 16] which is always supported if anisotropy is.
1154            vk_info = vk_info
1155                .anisotropy_enable(true)
1156                .max_anisotropy(desc.anisotropy_clamp as f32);
1157        }
1158
1159        if let Some(color) = desc.border_color {
1160            vk_info = vk_info.border_color(conv::map_border_color(color));
1161        }
1162
1163        let raw = unsafe { self.shared.raw.create_sampler(&vk_info, None)? };
1164
1165        if let Some(label) = desc.label {
1166            unsafe {
1167                self.shared
1168                    .set_object_name(vk::ObjectType::SAMPLER, raw, label)
1169            };
1170        }
1171
1172        Ok(super::Sampler { raw })
1173    }
1174    unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1175        unsafe { self.shared.raw.destroy_sampler(sampler.raw, None) };
1176    }
1177
1178    unsafe fn create_command_encoder(
1179        &self,
1180        desc: &crate::CommandEncoderDescriptor<super::Api>,
1181    ) -> Result<super::CommandEncoder, crate::DeviceError> {
1182        let vk_info = vk::CommandPoolCreateInfo::builder()
1183            .queue_family_index(desc.queue.family_index)
1184            .flags(vk::CommandPoolCreateFlags::TRANSIENT)
1185            .build();
1186        let raw = unsafe { self.shared.raw.create_command_pool(&vk_info, None)? };
1187
1188        Ok(super::CommandEncoder {
1189            raw,
1190            device: Arc::clone(&self.shared),
1191            active: vk::CommandBuffer::null(),
1192            bind_point: vk::PipelineBindPoint::default(),
1193            temp: super::Temp::default(),
1194            free: Vec::new(),
1195            discarded: Vec::new(),
1196            rpass_debug_marker_active: false,
1197            end_of_pass_timer_query: None,
1198        })
1199    }
1200    unsafe fn destroy_command_encoder(&self, cmd_encoder: super::CommandEncoder) {
1201        unsafe {
1202            // `vkDestroyCommandPool` also frees any command buffers allocated
1203            // from that pool, so there's no need to explicitly call
1204            // `vkFreeCommandBuffers` on `cmd_encoder`'s `free` and `discarded`
1205            // fields.
1206            self.shared.raw.destroy_command_pool(cmd_encoder.raw, None);
1207        }
1208    }
1209
1210    unsafe fn create_bind_group_layout(
1211        &self,
1212        desc: &crate::BindGroupLayoutDescriptor,
1213    ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1214        let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1215        let mut types = Vec::new();
1216        for entry in desc.entries {
1217            let count = entry.count.map_or(1, |c| c.get());
1218            if entry.binding as usize >= types.len() {
1219                types.resize(
1220                    entry.binding as usize + 1,
1221                    (vk::DescriptorType::INPUT_ATTACHMENT, 0),
1222                );
1223            }
1224            types[entry.binding as usize] = (
1225                conv::map_binding_type(entry.ty),
1226                entry.count.map_or(1, |c| c.get()),
1227            );
1228
1229            match entry.ty {
1230                wgt::BindingType::Buffer {
1231                    ty,
1232                    has_dynamic_offset,
1233                    ..
1234                } => match ty {
1235                    wgt::BufferBindingType::Uniform => {
1236                        if has_dynamic_offset {
1237                            desc_count.uniform_buffer_dynamic += count;
1238                        } else {
1239                            desc_count.uniform_buffer += count;
1240                        }
1241                    }
1242                    wgt::BufferBindingType::Storage { .. } => {
1243                        if has_dynamic_offset {
1244                            desc_count.storage_buffer_dynamic += count;
1245                        } else {
1246                            desc_count.storage_buffer += count;
1247                        }
1248                    }
1249                },
1250                wgt::BindingType::Sampler { .. } => {
1251                    desc_count.sampler += count;
1252                }
1253                wgt::BindingType::Texture { .. } => {
1254                    desc_count.sampled_image += count;
1255                }
1256                wgt::BindingType::StorageTexture { .. } => {
1257                    desc_count.storage_image += count;
1258                }
1259            }
1260        }
1261
1262        //Note: not bothering with on stack array here as it's low frequency
1263        let vk_bindings = desc
1264            .entries
1265            .iter()
1266            .map(|entry| vk::DescriptorSetLayoutBinding {
1267                binding: entry.binding,
1268                descriptor_type: types[entry.binding as usize].0,
1269                descriptor_count: types[entry.binding as usize].1,
1270                stage_flags: conv::map_shader_stage(entry.visibility),
1271                p_immutable_samplers: ptr::null(),
1272            })
1273            .collect::<Vec<_>>();
1274
1275        let vk_info = vk::DescriptorSetLayoutCreateInfo::builder().bindings(&vk_bindings);
1276
1277        let binding_arrays = desc
1278            .entries
1279            .iter()
1280            .enumerate()
1281            .filter_map(|(idx, entry)| entry.count.map(|count| (idx as u32, count)))
1282            .collect();
1283
1284        let mut binding_flag_info;
1285        let binding_flag_vec;
1286
1287        let partially_bound = desc
1288            .flags
1289            .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1290
1291        let vk_info = if partially_bound {
1292            binding_flag_vec = desc
1293                .entries
1294                .iter()
1295                .map(|entry| {
1296                    let mut flags = vk::DescriptorBindingFlags::empty();
1297
1298                    if partially_bound && entry.count.is_some() {
1299                        flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1300                    }
1301
1302                    flags
1303                })
1304                .collect::<Vec<_>>();
1305
1306            binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::builder()
1307                .binding_flags(&binding_flag_vec);
1308
1309            vk_info.push_next(&mut binding_flag_info)
1310        } else {
1311            vk_info
1312        };
1313
1314        let raw = unsafe {
1315            self.shared
1316                .raw
1317                .create_descriptor_set_layout(&vk_info, None)?
1318        };
1319
1320        if let Some(label) = desc.label {
1321            unsafe {
1322                self.shared
1323                    .set_object_name(vk::ObjectType::DESCRIPTOR_SET_LAYOUT, raw, label)
1324            };
1325        }
1326
1327        Ok(super::BindGroupLayout {
1328            raw,
1329            desc_count,
1330            types: types.into_boxed_slice(),
1331            binding_arrays,
1332        })
1333    }
1334    unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1335        unsafe {
1336            self.shared
1337                .raw
1338                .destroy_descriptor_set_layout(bg_layout.raw, None)
1339        };
1340    }
1341
1342    unsafe fn create_pipeline_layout(
1343        &self,
1344        desc: &crate::PipelineLayoutDescriptor<super::Api>,
1345    ) -> Result<super::PipelineLayout, crate::DeviceError> {
1346        //Note: not bothering with on stack array here as it's low frequency
1347        let vk_set_layouts = desc
1348            .bind_group_layouts
1349            .iter()
1350            .map(|bgl| bgl.raw)
1351            .collect::<Vec<_>>();
1352        let vk_push_constant_ranges = desc
1353            .push_constant_ranges
1354            .iter()
1355            .map(|pcr| vk::PushConstantRange {
1356                stage_flags: conv::map_shader_stage(pcr.stages),
1357                offset: pcr.range.start,
1358                size: pcr.range.end - pcr.range.start,
1359            })
1360            .collect::<Vec<_>>();
1361
1362        let vk_info = vk::PipelineLayoutCreateInfo::builder()
1363            .flags(vk::PipelineLayoutCreateFlags::empty())
1364            .set_layouts(&vk_set_layouts)
1365            .push_constant_ranges(&vk_push_constant_ranges);
1366
1367        let raw = {
1368            profiling::scope!("vkCreatePipelineLayout");
1369            unsafe { self.shared.raw.create_pipeline_layout(&vk_info, None)? }
1370        };
1371
1372        if let Some(label) = desc.label {
1373            unsafe {
1374                self.shared
1375                    .set_object_name(vk::ObjectType::PIPELINE_LAYOUT, raw, label)
1376            };
1377        }
1378
1379        let mut binding_arrays = BTreeMap::new();
1380        for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1381            for &(binding, binding_array_size) in &layout.binding_arrays {
1382                binding_arrays.insert(
1383                    naga::ResourceBinding {
1384                        group: group as u32,
1385                        binding,
1386                    },
1387                    naga::back::spv::BindingInfo {
1388                        binding_array_size: Some(binding_array_size.get()),
1389                    },
1390                );
1391            }
1392        }
1393
1394        Ok(super::PipelineLayout {
1395            raw,
1396            binding_arrays,
1397        })
1398    }
1399    unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1400        unsafe {
1401            self.shared
1402                .raw
1403                .destroy_pipeline_layout(pipeline_layout.raw, None)
1404        };
1405    }
1406
1407    unsafe fn create_bind_group(
1408        &self,
1409        desc: &crate::BindGroupDescriptor<super::Api>,
1410    ) -> Result<super::BindGroup, crate::DeviceError> {
1411        let mut vk_sets = unsafe {
1412            self.desc_allocator.lock().allocate(
1413                &*self.shared,
1414                &desc.layout.raw,
1415                gpu_descriptor::DescriptorSetLayoutCreateFlags::empty(),
1416                &desc.layout.desc_count,
1417                1,
1418            )?
1419        };
1420
1421        let set = vk_sets.pop().unwrap();
1422        if let Some(label) = desc.label {
1423            unsafe {
1424                self.shared
1425                    .set_object_name(vk::ObjectType::DESCRIPTOR_SET, *set.raw(), label)
1426            };
1427        }
1428
1429        let mut writes = Vec::with_capacity(desc.entries.len());
1430        let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1431        let mut sampler_infos = Vec::with_capacity(desc.samplers.len());
1432        let mut image_infos = Vec::with_capacity(desc.textures.len());
1433        for entry in desc.entries {
1434            let (ty, size) = desc.layout.types[entry.binding as usize];
1435            if size == 0 {
1436                continue; // empty slot
1437            }
1438            let mut write = vk::WriteDescriptorSet::builder()
1439                .dst_set(*set.raw())
1440                .dst_binding(entry.binding)
1441                .descriptor_type(ty);
1442            write = match ty {
1443                vk::DescriptorType::SAMPLER => {
1444                    let index = sampler_infos.len();
1445                    let start = entry.resource_index;
1446                    let end = start + entry.count;
1447                    sampler_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1448                        |binding| {
1449                            vk::DescriptorImageInfo::builder()
1450                                .sampler(binding.raw)
1451                                .build()
1452                        },
1453                    ));
1454                    write.image_info(&sampler_infos[index..])
1455                }
1456                vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
1457                    let index = image_infos.len();
1458                    let start = entry.resource_index;
1459                    let end = start + entry.count;
1460                    image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1461                        |binding| {
1462                            let layout = conv::derive_image_layout(
1463                                binding.usage,
1464                                binding.view.attachment.view_format,
1465                            );
1466                            vk::DescriptorImageInfo::builder()
1467                                .image_view(binding.view.raw)
1468                                .image_layout(layout)
1469                                .build()
1470                        },
1471                    ));
1472                    write.image_info(&image_infos[index..])
1473                }
1474                vk::DescriptorType::UNIFORM_BUFFER
1475                | vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
1476                | vk::DescriptorType::STORAGE_BUFFER
1477                | vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
1478                    let index = buffer_infos.len();
1479                    let start = entry.resource_index;
1480                    let end = start + entry.count;
1481                    buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1482                        |binding| {
1483                            vk::DescriptorBufferInfo::builder()
1484                                .buffer(binding.buffer.raw)
1485                                .offset(binding.offset)
1486                                .range(binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get))
1487                                .build()
1488                        },
1489                    ));
1490                    write.buffer_info(&buffer_infos[index..])
1491                }
1492                _ => unreachable!(),
1493            };
1494            writes.push(write.build());
1495        }
1496
1497        unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1498        Ok(super::BindGroup { set })
1499    }
1500    unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1501        unsafe {
1502            self.desc_allocator
1503                .lock()
1504                .free(&*self.shared, Some(group.set))
1505        };
1506    }
1507
1508    unsafe fn create_shader_module(
1509        &self,
1510        desc: &crate::ShaderModuleDescriptor,
1511        shader: crate::ShaderInput,
1512    ) -> Result<super::ShaderModule, crate::ShaderError> {
1513        let spv = match shader {
1514            crate::ShaderInput::Naga(naga_shader) => {
1515                if self
1516                    .shared
1517                    .workarounds
1518                    .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1519                {
1520                    return Ok(super::ShaderModule::Intermediate {
1521                        naga_shader,
1522                        runtime_checks: desc.runtime_checks,
1523                    });
1524                }
1525                let mut naga_options = self.naga_options.clone();
1526                naga_options.debug_info =
1527                    naga_shader
1528                        .debug_source
1529                        .as_ref()
1530                        .map(|d| naga::back::spv::DebugInfo {
1531                            source_code: d.source_code.as_ref(),
1532                            file_name: d.file_name.as_ref().as_ref(),
1533                        });
1534                if !desc.runtime_checks {
1535                    naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1536                        index: naga::proc::BoundsCheckPolicy::Unchecked,
1537                        buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1538                        image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1539                        image_store: naga::proc::BoundsCheckPolicy::Unchecked,
1540                        binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1541                    };
1542                }
1543                Cow::Owned(
1544                    naga::back::spv::write_vec(
1545                        &naga_shader.module,
1546                        &naga_shader.info,
1547                        &naga_options,
1548                        None,
1549                    )
1550                    .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?,
1551                )
1552            }
1553            crate::ShaderInput::SpirV(spv) => Cow::Borrowed(spv),
1554        };
1555
1556        let raw = self.create_shader_module_impl(&spv)?;
1557
1558        if let Some(label) = desc.label {
1559            unsafe {
1560                self.shared
1561                    .set_object_name(vk::ObjectType::SHADER_MODULE, raw, label)
1562            };
1563        }
1564
1565        Ok(super::ShaderModule::Raw(raw))
1566    }
1567    unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1568        match module {
1569            super::ShaderModule::Raw(raw) => {
1570                unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1571            }
1572            super::ShaderModule::Intermediate { .. } => {}
1573        }
1574    }
1575
1576    unsafe fn create_render_pipeline(
1577        &self,
1578        desc: &crate::RenderPipelineDescriptor<super::Api>,
1579    ) -> Result<super::RenderPipeline, crate::PipelineError> {
1580        let dynamic_states = [
1581            vk::DynamicState::VIEWPORT,
1582            vk::DynamicState::SCISSOR,
1583            vk::DynamicState::BLEND_CONSTANTS,
1584            vk::DynamicState::STENCIL_REFERENCE,
1585        ];
1586        let mut compatible_rp_key = super::RenderPassKey {
1587            sample_count: desc.multisample.count,
1588            multiview: desc.multiview,
1589            ..Default::default()
1590        };
1591        let mut stages = ArrayVec::<_, 2>::new();
1592        let mut vertex_buffers = Vec::with_capacity(desc.vertex_buffers.len());
1593        let mut vertex_attributes = Vec::new();
1594
1595        for (i, vb) in desc.vertex_buffers.iter().enumerate() {
1596            vertex_buffers.push(vk::VertexInputBindingDescription {
1597                binding: i as u32,
1598                stride: vb.array_stride as u32,
1599                input_rate: match vb.step_mode {
1600                    wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1601                    wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1602                },
1603            });
1604            for at in vb.attributes {
1605                vertex_attributes.push(vk::VertexInputAttributeDescription {
1606                    location: at.shader_location,
1607                    binding: i as u32,
1608                    format: conv::map_vertex_format(at.format),
1609                    offset: at.offset as u32,
1610                });
1611            }
1612        }
1613
1614        let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::builder()
1615            .vertex_binding_descriptions(&vertex_buffers)
1616            .vertex_attribute_descriptions(&vertex_attributes)
1617            .build();
1618
1619        let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::builder()
1620            .topology(conv::map_topology(desc.primitive.topology))
1621            .primitive_restart_enable(desc.primitive.strip_index_format.is_some())
1622            .build();
1623
1624        let compiled_vs = self.compile_stage(
1625            &desc.vertex_stage,
1626            naga::ShaderStage::Vertex,
1627            &desc.layout.binding_arrays,
1628        )?;
1629        stages.push(compiled_vs.create_info);
1630        let compiled_fs = match desc.fragment_stage {
1631            Some(ref stage) => {
1632                let compiled = self.compile_stage(
1633                    stage,
1634                    naga::ShaderStage::Fragment,
1635                    &desc.layout.binding_arrays,
1636                )?;
1637                stages.push(compiled.create_info);
1638                Some(compiled)
1639            }
1640            None => None,
1641        };
1642
1643        let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::builder()
1644            .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1645            .front_face(conv::map_front_face(desc.primitive.front_face))
1646            .line_width(1.0)
1647            .depth_clamp_enable(desc.primitive.unclipped_depth);
1648        if let Some(face) = desc.primitive.cull_mode {
1649            vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1650        }
1651        let mut vk_rasterization_conservative_state =
1652            vk::PipelineRasterizationConservativeStateCreateInfoEXT::builder()
1653                .conservative_rasterization_mode(vk::ConservativeRasterizationModeEXT::OVERESTIMATE)
1654                .build();
1655        if desc.primitive.conservative {
1656            vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1657        }
1658
1659        let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::builder();
1660        if let Some(ref ds) = desc.depth_stencil {
1661            let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1662            let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1663                vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1664            } else {
1665                vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1666            };
1667            compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1668                base: super::AttachmentKey::compatible(vk_format, vk_layout),
1669                stencil_ops: crate::AttachmentOps::all(),
1670            });
1671
1672            if ds.is_depth_enabled() {
1673                vk_depth_stencil = vk_depth_stencil
1674                    .depth_test_enable(true)
1675                    .depth_write_enable(ds.depth_write_enabled)
1676                    .depth_compare_op(conv::map_comparison(ds.depth_compare));
1677            }
1678            if ds.stencil.is_enabled() {
1679                let s = &ds.stencil;
1680                let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1681                let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1682                vk_depth_stencil = vk_depth_stencil
1683                    .stencil_test_enable(true)
1684                    .front(front)
1685                    .back(back);
1686            }
1687
1688            if ds.bias.is_enabled() {
1689                vk_rasterization = vk_rasterization
1690                    .depth_bias_enable(true)
1691                    .depth_bias_constant_factor(ds.bias.constant as f32)
1692                    .depth_bias_clamp(ds.bias.clamp)
1693                    .depth_bias_slope_factor(ds.bias.slope_scale);
1694            }
1695        }
1696
1697        let vk_viewport = vk::PipelineViewportStateCreateInfo::builder()
1698            .flags(vk::PipelineViewportStateCreateFlags::empty())
1699            .scissor_count(1)
1700            .viewport_count(1)
1701            .build();
1702
1703        let vk_sample_mask = [
1704            desc.multisample.mask as u32,
1705            (desc.multisample.mask >> 32) as u32,
1706        ];
1707        let vk_multisample = vk::PipelineMultisampleStateCreateInfo::builder()
1708            .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
1709            .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
1710            .sample_mask(&vk_sample_mask)
1711            .build();
1712
1713        let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
1714        for cat in desc.color_targets {
1715            let (key, attarchment) = if let Some(cat) = cat.as_ref() {
1716                let mut vk_attachment = vk::PipelineColorBlendAttachmentState::builder()
1717                    .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
1718                if let Some(ref blend) = cat.blend {
1719                    let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
1720                    let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
1721                    vk_attachment = vk_attachment
1722                        .blend_enable(true)
1723                        .color_blend_op(color_op)
1724                        .src_color_blend_factor(color_src)
1725                        .dst_color_blend_factor(color_dst)
1726                        .alpha_blend_op(alpha_op)
1727                        .src_alpha_blend_factor(alpha_src)
1728                        .dst_alpha_blend_factor(alpha_dst);
1729                }
1730
1731                let vk_format = self.shared.private_caps.map_texture_format(cat.format);
1732                (
1733                    Some(super::ColorAttachmentKey {
1734                        base: super::AttachmentKey::compatible(
1735                            vk_format,
1736                            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
1737                        ),
1738                        resolve: None,
1739                    }),
1740                    vk_attachment.build(),
1741                )
1742            } else {
1743                (None, vk::PipelineColorBlendAttachmentState::default())
1744            };
1745
1746            compatible_rp_key.colors.push(key);
1747            vk_attachments.push(attarchment);
1748        }
1749
1750        let vk_color_blend = vk::PipelineColorBlendStateCreateInfo::builder()
1751            .attachments(&vk_attachments)
1752            .build();
1753
1754        let vk_dynamic_state = vk::PipelineDynamicStateCreateInfo::builder()
1755            .dynamic_states(&dynamic_states)
1756            .build();
1757
1758        let raw_pass = self
1759            .shared
1760            .make_render_pass(compatible_rp_key)
1761            .map_err(crate::DeviceError::from)?;
1762
1763        let vk_infos = [{
1764            vk::GraphicsPipelineCreateInfo::builder()
1765                .layout(desc.layout.raw)
1766                .stages(&stages)
1767                .vertex_input_state(&vk_vertex_input)
1768                .input_assembly_state(&vk_input_assembly)
1769                .rasterization_state(&vk_rasterization)
1770                .viewport_state(&vk_viewport)
1771                .multisample_state(&vk_multisample)
1772                .depth_stencil_state(&vk_depth_stencil)
1773                .color_blend_state(&vk_color_blend)
1774                .dynamic_state(&vk_dynamic_state)
1775                .render_pass(raw_pass)
1776                .build()
1777        }];
1778
1779        let mut raw_vec = {
1780            profiling::scope!("vkCreateGraphicsPipelines");
1781            unsafe {
1782                self.shared
1783                    .raw
1784                    .create_graphics_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1785                    .map_err(|(_, e)| crate::DeviceError::from(e))
1786            }?
1787        };
1788
1789        let raw = raw_vec.pop().unwrap();
1790        if let Some(label) = desc.label {
1791            unsafe {
1792                self.shared
1793                    .set_object_name(vk::ObjectType::PIPELINE, raw, label)
1794            };
1795        }
1796
1797        if let Some(raw_module) = compiled_vs.temp_raw_module {
1798            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1799        }
1800        if let Some(CompiledStage {
1801            temp_raw_module: Some(raw_module),
1802            ..
1803        }) = compiled_fs
1804        {
1805            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1806        }
1807
1808        Ok(super::RenderPipeline { raw })
1809    }
1810    unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
1811        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
1812    }
1813
1814    unsafe fn create_compute_pipeline(
1815        &self,
1816        desc: &crate::ComputePipelineDescriptor<super::Api>,
1817    ) -> Result<super::ComputePipeline, crate::PipelineError> {
1818        let compiled = self.compile_stage(
1819            &desc.stage,
1820            naga::ShaderStage::Compute,
1821            &desc.layout.binding_arrays,
1822        )?;
1823
1824        let vk_infos = [{
1825            vk::ComputePipelineCreateInfo::builder()
1826                .layout(desc.layout.raw)
1827                .stage(compiled.create_info)
1828                .build()
1829        }];
1830
1831        let mut raw_vec = {
1832            profiling::scope!("vkCreateComputePipelines");
1833            unsafe {
1834                self.shared
1835                    .raw
1836                    .create_compute_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1837                    .map_err(|(_, e)| crate::DeviceError::from(e))
1838            }?
1839        };
1840
1841        let raw = raw_vec.pop().unwrap();
1842        if let Some(label) = desc.label {
1843            unsafe {
1844                self.shared
1845                    .set_object_name(vk::ObjectType::PIPELINE, raw, label)
1846            };
1847        }
1848
1849        if let Some(raw_module) = compiled.temp_raw_module {
1850            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1851        }
1852
1853        Ok(super::ComputePipeline { raw })
1854    }
1855    unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
1856        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
1857    }
1858
1859    unsafe fn create_query_set(
1860        &self,
1861        desc: &wgt::QuerySetDescriptor<crate::Label>,
1862    ) -> Result<super::QuerySet, crate::DeviceError> {
1863        let (vk_type, pipeline_statistics) = match desc.ty {
1864            wgt::QueryType::Occlusion => (
1865                vk::QueryType::OCCLUSION,
1866                vk::QueryPipelineStatisticFlags::empty(),
1867            ),
1868            wgt::QueryType::PipelineStatistics(statistics) => (
1869                vk::QueryType::PIPELINE_STATISTICS,
1870                conv::map_pipeline_statistics(statistics),
1871            ),
1872            wgt::QueryType::Timestamp => (
1873                vk::QueryType::TIMESTAMP,
1874                vk::QueryPipelineStatisticFlags::empty(),
1875            ),
1876        };
1877
1878        let vk_info = vk::QueryPoolCreateInfo::builder()
1879            .query_type(vk_type)
1880            .query_count(desc.count)
1881            .pipeline_statistics(pipeline_statistics)
1882            .build();
1883
1884        let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }?;
1885        if let Some(label) = desc.label {
1886            unsafe {
1887                self.shared
1888                    .set_object_name(vk::ObjectType::QUERY_POOL, raw, label)
1889            };
1890        }
1891
1892        Ok(super::QuerySet { raw })
1893    }
1894    unsafe fn destroy_query_set(&self, set: super::QuerySet) {
1895        unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
1896    }
1897
1898    unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
1899        Ok(if self.shared.private_caps.timeline_semaphores {
1900            let mut sem_type_info =
1901                vk::SemaphoreTypeCreateInfo::builder().semaphore_type(vk::SemaphoreType::TIMELINE);
1902            let vk_info = vk::SemaphoreCreateInfo::builder().push_next(&mut sem_type_info);
1903            let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }?;
1904            super::Fence::TimelineSemaphore(raw)
1905        } else {
1906            super::Fence::FencePool {
1907                last_completed: 0,
1908                active: Vec::new(),
1909                free: Vec::new(),
1910            }
1911        })
1912    }
1913    unsafe fn destroy_fence(&self, fence: super::Fence) {
1914        match fence {
1915            super::Fence::TimelineSemaphore(raw) => {
1916                unsafe { self.shared.raw.destroy_semaphore(raw, None) };
1917            }
1918            super::Fence::FencePool {
1919                active,
1920                free,
1921                last_completed: _,
1922            } => {
1923                for (_, raw) in active {
1924                    unsafe { self.shared.raw.destroy_fence(raw, None) };
1925                }
1926                for raw in free {
1927                    unsafe { self.shared.raw.destroy_fence(raw, None) };
1928                }
1929            }
1930        }
1931    }
1932    unsafe fn get_fence_value(
1933        &self,
1934        fence: &super::Fence,
1935    ) -> Result<crate::FenceValue, crate::DeviceError> {
1936        fence.get_latest(
1937            &self.shared.raw,
1938            self.shared.extension_fns.timeline_semaphore.as_ref(),
1939        )
1940    }
1941    unsafe fn wait(
1942        &self,
1943        fence: &super::Fence,
1944        wait_value: crate::FenceValue,
1945        timeout_ms: u32,
1946    ) -> Result<bool, crate::DeviceError> {
1947        let timeout_ns = timeout_ms as u64 * super::MILLIS_TO_NANOS;
1948        match *fence {
1949            super::Fence::TimelineSemaphore(raw) => {
1950                let semaphores = [raw];
1951                let values = [wait_value];
1952                let vk_info = vk::SemaphoreWaitInfo::builder()
1953                    .semaphores(&semaphores)
1954                    .values(&values);
1955                let result = match self.shared.extension_fns.timeline_semaphore {
1956                    Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
1957                        ext.wait_semaphores(&vk_info, timeout_ns)
1958                    },
1959                    Some(super::ExtensionFn::Promoted) => unsafe {
1960                        self.shared.raw.wait_semaphores(&vk_info, timeout_ns)
1961                    },
1962                    None => unreachable!(),
1963                };
1964                match result {
1965                    Ok(()) => Ok(true),
1966                    Err(vk::Result::TIMEOUT) => Ok(false),
1967                    Err(other) => Err(other.into()),
1968                }
1969            }
1970            super::Fence::FencePool {
1971                last_completed,
1972                ref active,
1973                free: _,
1974            } => {
1975                if wait_value <= last_completed {
1976                    Ok(true)
1977                } else {
1978                    match active.iter().find(|&&(value, _)| value >= wait_value) {
1979                        Some(&(_, raw)) => {
1980                            match unsafe {
1981                                self.shared.raw.wait_for_fences(&[raw], true, timeout_ns)
1982                            } {
1983                                Ok(()) => Ok(true),
1984                                Err(vk::Result::TIMEOUT) => Ok(false),
1985                                Err(other) => Err(other.into()),
1986                            }
1987                        }
1988                        None => {
1989                            log::error!("No signals reached value {}", wait_value);
1990                            Err(crate::DeviceError::Lost)
1991                        }
1992                    }
1993                }
1994            }
1995        }
1996    }
1997
1998    unsafe fn start_capture(&self) -> bool {
1999        #[cfg(feature = "renderdoc")]
2000        {
2001            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2002            let raw_vk_instance =
2003                ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2004            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2005            unsafe {
2006                self.render_doc
2007                    .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2008            }
2009        }
2010        #[cfg(not(feature = "renderdoc"))]
2011        false
2012    }
2013    unsafe fn stop_capture(&self) {
2014        #[cfg(feature = "renderdoc")]
2015        {
2016            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2017            let raw_vk_instance =
2018                ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2019            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2020
2021            unsafe {
2022                self.render_doc
2023                    .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2024            }
2025        }
2026    }
2027}
2028
2029impl From<gpu_alloc::AllocationError> for crate::DeviceError {
2030    fn from(error: gpu_alloc::AllocationError) -> Self {
2031        use gpu_alloc::AllocationError as Ae;
2032        match error {
2033            Ae::OutOfDeviceMemory | Ae::OutOfHostMemory => Self::OutOfMemory,
2034            _ => {
2035                log::error!("memory allocation: {:?}", error);
2036                Self::Lost
2037            }
2038        }
2039    }
2040}
2041impl From<gpu_alloc::MapError> for crate::DeviceError {
2042    fn from(error: gpu_alloc::MapError) -> Self {
2043        use gpu_alloc::MapError as Me;
2044        match error {
2045            Me::OutOfDeviceMemory | Me::OutOfHostMemory => Self::OutOfMemory,
2046            _ => {
2047                log::error!("memory mapping: {:?}", error);
2048                Self::Lost
2049            }
2050        }
2051    }
2052}
2053impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2054    fn from(error: gpu_descriptor::AllocationError) -> Self {
2055        log::error!("descriptor allocation: {:?}", error);
2056        Self::OutOfMemory
2057    }
2058}