voodoo/
device.rs

1use std::sync::Arc;
2use std::mem;
3use std::ptr;
4use std::marker::PhantomData;
5use std::ffi::CStr;
6use libc::{c_void};
7use smallvec::SmallVec;
8use vks;
9use ::{error, VdResult, Instance, PhysicalDevice, DeviceQueueCreateInfo, CharStrs,
10    PhysicalDeviceFeatures, PRINT, Handle, SubmitInfo, QueueHandle, MemoryAllocateInfo,
11    DeviceMemoryHandle, MemoryMapFlags, SwapchainKhrHandle, SwapchainCreateInfoKhr,
12    ShaderModuleCreateInfo, ShaderModuleHandle, SemaphoreCreateInfo, SemaphoreHandle,
13    SamplerCreateInfo, SamplerHandle, RenderPassCreateInfo, RenderPassHandle, BufferCreateInfo,
14    BufferHandle, BufferViewCreateInfo, BufferViewHandle, ImageViewCreateInfo, ImageViewHandle,
15    ImageCreateInfo, ImageHandle, FramebufferCreateInfo, FramebufferHandle,
16    DescriptorSetLayoutCreateInfo, DescriptorSetLayoutHandle, DescriptorPoolCreateInfo,
17    DescriptorPoolHandle, CommandPoolCreateInfo, CommandPoolHandle, CommandBufferAllocateInfo,
18    CommandBufferHandle, PipelineLayoutCreateInfo, PipelineLayoutHandle, FenceCreateInfo,
19    FenceHandle, EventCreateInfo, EventHandle, PipelineCacheCreateInfo, PipelineCacheHandle,
20    MemoryRequirements, DeviceSize, CommandBufferBeginInfo, GraphicsPipelineCreateInfo,
21    PipelineHandle, ComputePipelineCreateInfo, PipelineStageFlags, DependencyFlags, MemoryBarrier,
22    BufferMemoryBarrier, ImageMemoryBarrier, WriteDescriptorSet, CopyDescriptorSet,
23    BufferImageCopy, ImageLayout, BufferCopy, CommandBufferResetFlags, PipelineBindPoint, Viewport,
24    Rect2d, StencilFaceFlags, DebugMarkerMarkerInfoExt, DescriptorSetHandle, QueryPoolHandle,
25    QueryResultFlags, ShaderStageFlags, RenderPassBeginInfo, SubpassContents, ImageCopy, IndexType,
26    ImageBlit, Filter, ClearColorValue, ImageSubresourceRange, ClearDepthStencilValue,
27    ClearAttachment, ImageResolve, QueryControlFlags, ClearRect, PresentInfoKhr, MappedMemoryRange,
28    SparseImageMemoryRequirements, BindSparseInfo, CallResult, QueryPoolCreateInfo,
29    ImageSubresource, SubresourceLayout, DescriptorSetAllocateInfo, DescriptorPoolResetFlags,
30    Extent2d, CommandPoolResetFlags, CommandPoolTrimFlagsKhr, MemoryGetWin32HandleInfoKhr,
31    ExternalMemoryHandleTypeFlagsKhr, HANDLE, MemoryGetFdInfoKhr,
32    ImportSemaphoreWin32HandleInfoKhr, SemaphoreGetWin32HandleInfoKhr, ImportSemaphoreFdInfoKhr,
33    SemaphoreGetFdInfoKhr, PipelineLayout, BufferMemoryRequirementsInfo2Khr,
34    ImportFenceWin32HandleInfoKhr, FenceGetWin32HandleInfoKhr, ImportFenceFdInfoKhr,
35    FenceGetFdInfoKhr, ImageMemoryRequirementsInfo2Khr, ImageSparseMemoryRequirementsInfo2Khr,
36    DebugMarkerObjectTagInfoExt, DebugMarkerObjectNameInfoExt, DisplayPowerInfoExt,
37    DisplayKhrHandle, DeviceEventInfoExt, DisplayEventInfoExt, HdrMetadataExt,
38    SurfaceCounterFlagsExt, Queue};
39
40// #[cfg(feature = "experimental")]
41// use ::{};
42
43#[cfg(feature = "unimplemented")]
44use ::{SamplerYcbcrConversionCreateInfoKhr, IndirectCommandsLayoutNvxCreateInfo,
45    ObjectTableNvxCreateInfo, ValidationCacheExtCreateInfo, DescriptorUpdateTemplateCreateInfoKhr,
46    DescriptorUpdateTemplateKhrHandle, SamplerYcbcrConversionKhrHandle, IndirectCommandsLayoutNvxHandle,
47    ValidationCacheExtHandle, ObjectTableNvxHandle, SampleLocationsInfoExt, ValidationCacheExt,};
48
49
50/// A logical device handle.
51//
52#[derive(Clone, Copy, Debug, Eq, PartialEq)]
53#[repr(C)]
54pub struct DeviceHandle(pub(crate) vks::VkDevice);
55
56impl DeviceHandle {
57    #[inline(always)]
58    pub fn to_raw(&self) -> vks::VkDevice {
59        self.0
60    }
61}
62
63unsafe impl Handle for DeviceHandle {
64    type Target = DeviceHandle;
65
66    #[inline(always)]
67    fn handle(&self) -> Self::Target {
68        *self
69    }
70}
71
72fn get_device_queue(proc_addr_loader: &vks::DeviceProcAddrLoader, device: DeviceHandle,
73        queue_family_index: u32, queue_index: u32) -> Option<QueueHandle> {
74    let mut handle = ptr::null_mut();
75    unsafe {
76        proc_addr_loader.vk.vkGetDeviceQueue(device.to_raw(),
77            queue_family_index, queue_index, &mut handle);
78    }
79    if !handle.is_null() {
80        Some(QueueHandle(handle))
81    } else {
82        None
83    }
84}
85
86
87#[derive(Debug)]
88struct Inner {
89    handle: DeviceHandle,
90    physical_device: PhysicalDevice,
91    // features: vks::VkPhysicalDeviceFeatures,
92    queues: SmallVec<[Queue; 16]>,
93    instance: Instance,
94    loader: vks::DeviceProcAddrLoader,
95}
96
97impl Drop for Inner {
98    fn drop(&mut self) {
99        if PRINT { println!("Destroying device..."); }
100        unsafe {
101            self.instance.destroy_device(self.handle, None);
102        }
103    }
104}
105
106
107/// A logical device.
108///
109///
110/// ### Destruction
111/// 
112/// Dropping this `Device` will cause `Instance::destroy_device` to be called, 
113/// automatically releasing any resources associated with it.
114///
115#[derive(Debug, Clone)]
116pub struct Device {
117    inner: Arc<Inner>,
118}
119
120impl Device {
121    /// Returns a new `DeviceBuilder`.
122    pub fn builder<'db>() -> DeviceBuilder<'db> {
123        DeviceBuilder::new()
124    }
125
126    /// Returns one of this device's associated queue.
127    ///
128    /// `device_queue_index` does not correspond to the queue family index or
129    /// any other index used when creating this device.
130    #[inline]
131    pub fn queue(&self, device_queue_index: usize) -> Option<&Queue> {
132        self.inner.queues.get(device_queue_index)
133    }
134
135    /// Returns a list of all queues associated with this device.
136    #[inline]
137    pub fn queues(&self) -> &[Queue] {
138        &self.inner.queues
139    }
140
141    /// Returns a reference to the associated `DeviceProcAddrLoader`
142    #[inline]
143    pub fn proc_addr_loader(&self) -> &vks::DeviceProcAddrLoader {
144        &self.inner.loader
145    }
146
147    /// Returns the handle for this device.
148    #[inline]
149    pub fn handle(&self) -> DeviceHandle {
150        self.inner.handle
151    }
152
153    /// Returns a reference to the associated physical device.
154    #[inline]
155    pub fn physical_device(&self) -> &PhysicalDevice {
156        &self.inner.physical_device
157    }
158
159    /// Returns a reference to the associated instance.
160    #[inline]
161    pub fn instance(&self) -> &Instance {
162        &self.inner.instance
163    }
164
165    /// Waits for this device to become idle.
166    ///
167    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDeviceWaitIdle.html
168    //
169    #[inline]
170    pub fn wait_idle(&self) {
171        self.device_wait_idle()
172    }
173
174    /// Returns the memory type index on this device matching the provided
175    /// type filter and properties.
176    //
177    pub fn memory_type_index(&self, type_filter: u32, properties: ::MemoryPropertyFlags)
178            -> VdResult<u32> {
179        let mem_props = self.physical_device().memory_properties();
180
181        for i in 0..mem_props.memory_type_count() {
182            if (type_filter & (1 << i)) != 0 &&
183                (mem_props.memory_types()[i as usize].property_flags() & properties) == properties
184            {
185                return Ok(i);
186            }
187        }
188        panic!("failed to find suitable memory type index with: type_filter: '{}', properties: '{:?}'",
189            type_filter, properties);
190    }
191
192
193    /// Get a queue handle from a device.
194    ///
195    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetDeviceQueue.html
196    //
197    // *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex,
198    // uint32_t queueIndex, VkQueue* pQueue);
199    pub fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> Option<QueueHandle> {
200        get_device_queue(self.proc_addr_loader(), self.inner.handle, queue_family_index, queue_index)
201    }
202
203
204    /// Submits a sequence of semaphores or command buffers to a queue.
205    ///
206    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkQueueSubmit.html
207    //
208    // *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const
209    // VkSubmitInfo* pSubmits, VkFence fence);
210    //
211    // queue is the queue that the command buffers will be submitted to.
212    //
213    // submitCount is the number of elements in the pSubmits array.
214    //
215    // pSubmits is a pointer to an array of VkSubmitInfo structures, each
216    // specifying a command buffer submission batch.
217    //
218    // fence is an optional handle to a fence to be signaled. If fence is not
219    // VK_NULL_HANDLE, it defines a fence signal operation.
220    pub unsafe fn queue_submit<Q>(&self, queue: Q, submit_info: &[SubmitInfo],
221            fence: Option<FenceHandle>) -> VdResult<()>
222            where Q: Handle<Target=QueueHandle> {
223        let fence_handle_raw = fence.map(|f| f.to_raw()).unwrap_or(0);
224        let result = self.proc_addr_loader().vk.vkQueueSubmit(queue.handle().to_raw(),
225            submit_info.len() as u32, submit_info.as_ptr() as *const vks::VkSubmitInfo,
226            fence_handle_raw);
227        error::check(result, "vkQueueSubmit", ())
228    }
229
230    /// Waits for a queue to become idle.
231    ///
232    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkQueueWaitIdle.html
233    //
234    // *PFN_vkQueueWaitIdle)(VkQueue queue);
235    pub fn queue_wait_idle<Q>(&self, queue: Q)
236            where Q: Handle<Target=QueueHandle> {
237        unsafe {
238            self.proc_addr_loader().vk.vkQueueWaitIdle(queue.handle().to_raw());
239        }
240    }
241
242    /// Waits for this device to become idle.
243    ///
244    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDeviceWaitIdle.html
245    //
246    // *PFN_vkDeviceWaitIdle)(VkDevice device);
247    pub fn device_wait_idle(&self) {
248        unsafe {
249            self.proc_addr_loader().vk.vkDeviceWaitIdle(self.handle().to_raw());
250        }
251    }
252
253    /// Allocates GPU memory.
254    ///
255    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkAllocateMemory.html
256    //
257    // *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo*
258    // pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory*
259    // pMemory);
260    pub unsafe fn allocate_memory(&self, allocate_info: &MemoryAllocateInfo,
261            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<DeviceMemoryHandle> {
262        let allocator = allocator.unwrap_or(ptr::null());
263        let mut handle = 0;
264        let result = self.proc_addr_loader().vk.vkAllocateMemory(self.handle().0,
265            allocate_info.as_raw(), allocator, &mut handle);
266        error::check(result, "vkAllocateMemory", DeviceMemoryHandle(handle))
267    }
268
269    /// Frees GPU memory.
270    ///
271    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkFreeMemory.html
272    //
273    // *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const
274    // VkAllocationCallbacks* pAllocator);
275    pub unsafe fn free_memory(&self, memory: DeviceMemoryHandle,
276            allocator: Option<*const vks::VkAllocationCallbacks>) {
277        let allocator = allocator.unwrap_or(ptr::null());
278        self.proc_addr_loader().vk.vkFreeMemory(self.handle().0,
279            memory.handle().to_raw(), allocator);
280    }
281
282    /// Maps a memory object into application address space.
283    ///
284    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkMapMemory.html
285    //
286    // *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize
287    // offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
288    pub unsafe fn map_memory<T>(&self, memory: DeviceMemoryHandle, offset_bytes: u64, size_bytes: u64,
289            flags: MemoryMapFlags) -> VdResult<*mut T> {
290        let mut data = ptr::null_mut();
291        let result = self.proc_addr_loader().vk.vkMapMemory(self.handle().to_raw(),
292            memory.to_raw(), offset_bytes, size_bytes, flags.bits(), &mut data);
293        error::check(result, "vkMapMemory", data as *mut T)
294    }
295
296    /// Unmaps a previously mapped memory object.
297    ///
298    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkUnmapMemory.html
299    //
300    // *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory);
301    pub unsafe fn unmap_memory(&self, memory: DeviceMemoryHandle) {
302        self.proc_addr_loader().vk.vkUnmapMemory(self.handle().0, memory.to_raw());
303    }
304
305    /// Flushes mapped memory ranges.
306    ///
307    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkFlushMappedMemoryRanges.html
308    //
309    // *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t
310    // memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
311    pub unsafe fn flush_mapped_memory_ranges(&self, memory_ranges: &[MappedMemoryRange])
312            -> VdResult<()> {
313        let result = self.proc_addr_loader().vk.vkFlushMappedMemoryRanges(self.handle().to_raw(),
314            memory_ranges.len() as u32, memory_ranges.as_ptr() as *const vks::VkMappedMemoryRange);
315        error::check(result, "vkFlushMappedMemoryRanges", ())
316    }
317
318    /// Invalidates ranges of mapped memory objects.
319    ///
320    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkInvalidateMappedMemoryRanges.html
321    //
322    // *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t
323    // memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
324    pub unsafe fn invalidate_mapped_memory_ranges(&self, memory_ranges: &[MappedMemoryRange])
325            -> VdResult<()> {
326        let result = self.proc_addr_loader().vk.vkInvalidateMappedMemoryRanges(self.handle().to_raw(),
327            memory_ranges.len() as u32, memory_ranges.as_ptr() as *const vks::VkMappedMemoryRange);
328        error::check(result, "vkInvalidateMappedMemoryRanges", ())
329    }
330
331    /// Queries the current commitment for a VkDeviceMemory.
332    ///
333    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetDeviceMemoryCommitment.html
334    //
335    // *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory
336    // memory, VkDeviceSize* pCommittedMemoryInBytes);
337    pub unsafe fn get_device_memory_commitment<Dm>(&self, memory: Dm)
338            -> DeviceSize
339            where Dm: Handle<Target=DeviceMemoryHandle> {
340        let mut committed_memory_in_bytes = 0;
341        self.proc_addr_loader().vk.vkGetDeviceMemoryCommitment(self.handle().to_raw(),
342            memory.handle().to_raw(), &mut committed_memory_in_bytes);
343        committed_memory_in_bytes
344    }
345
346    /// Binds device memory to a buffer object.
347    ///
348    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkBindBufferMemory.html
349    //
350    // *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer,
351    // VkDeviceMemory memory, VkDeviceSize memoryOffset);
352    pub unsafe fn bind_buffer_memory(&self, buffer: BufferHandle, memory: DeviceMemoryHandle,
353            memory_offset: DeviceSize) -> VdResult<()> {
354        let result = self.proc_addr_loader().vk.vkBindBufferMemory(
355            self.handle().to_raw(), buffer.to_raw(), memory.to_raw(), memory_offset);
356        error::check(result, "vkBindBufferMemory", ())
357    }
358
359    /// Binds device memory to an image object.
360    ///
361    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkBindImageMemory.html
362    //
363    // *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory
364    // memory, VkDeviceSize memoryOffset);
365    pub unsafe fn bind_image_memory(&self, image: ImageHandle, memory: DeviceMemoryHandle,
366            memory_offset: DeviceSize) -> VdResult<()> {
367        let result = self.proc_addr_loader().vk.vkBindImageMemory(
368            self.handle().to_raw(), image.to_raw(), memory.to_raw(), memory_offset);
369        error::check(result, "vkBindImageMemory", ())
370    }
371
372    /// Returns the memory requirements for specified Vulkan object.
373    ///
374    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetBufferMemoryRequirements.html
375    //
376    // *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer,
377    // VkMemoryRequirements* pMemoryRequirements);
378    pub unsafe fn get_buffer_memory_requirements(&self, buffer: BufferHandle) -> MemoryRequirements {
379        let mut memory_requirements: vks::VkMemoryRequirements;
380        memory_requirements = mem::uninitialized();
381        self.proc_addr_loader().vk.vkGetBufferMemoryRequirements(self.handle().to_raw(),
382            buffer.to_raw(), &mut memory_requirements);
383        MemoryRequirements::from_raw(memory_requirements)
384    }
385
386    /// Returns the memory requirements for specified Vulkan object.
387    ///
388    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetImageMemoryRequirements.html
389    //
390    // *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image,
391    // VkMemoryRequirements* pMemoryRequirements);
392    pub unsafe fn get_image_memory_requirements<I>(&self, image: I) -> MemoryRequirements
393            where I: Handle<Target=ImageHandle> {
394        let mut memory_requirements: vks::VkMemoryRequirements;
395        memory_requirements = mem::uninitialized();
396        self.proc_addr_loader().vk.vkGetImageMemoryRequirements(self.handle().to_raw(),
397            image.handle().to_raw(), &mut memory_requirements);
398        MemoryRequirements::from_raw(memory_requirements)
399    }
400
401    /// Queries the memory requirements for a sparse image.
402    ///
403    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetImageSparseMemoryRequirements.html
404    //
405    // *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage
406    // image, uint32_t* pSparseMemoryRequirementCount,
407    // VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
408    pub unsafe fn get_image_sparse_memory_requirements<I>(&self, image: I)
409            -> SmallVec<[SparseImageMemoryRequirements; 32]>
410            where I: Handle<Target=ImageHandle> {
411        let mut sparse_memory_requirement_count = 0u32;
412        let mut sparse_memory_requirements: SmallVec<[SparseImageMemoryRequirements; 32]> = SmallVec::new();
413        self.proc_addr_loader().vk.vkGetImageSparseMemoryRequirements(self.handle().to_raw(),
414            image.handle().to_raw(), &mut sparse_memory_requirement_count, ptr::null_mut());
415        sparse_memory_requirements.reserve_exact(sparse_memory_requirement_count as usize);
416        sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize);
417        self.proc_addr_loader().vk.vkGetImageSparseMemoryRequirements(self.handle().to_raw(),
418            image.handle().to_raw(), &mut sparse_memory_requirement_count,
419            sparse_memory_requirements.as_mut_ptr() as *mut vks::VkSparseImageMemoryRequirements);
420        sparse_memory_requirements
421    }
422
423    /// Binds device memory to a sparse resource object.
424    ///
425    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkQueueBindSparse.html
426    //
427    // *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const
428    // VkBindSparseInfo* pBindInfo, VkFence fence);
429    pub unsafe fn queue_bind_sparse<Q, F>(&self, queue: Q, bind_info: &[BindSparseInfo], fence: F)
430            -> VdResult<()>
431            where Q: Handle<Target=QueueHandle>, F: Handle<Target=FenceHandle> {
432        let result = self.proc_addr_loader().vk.vkQueueBindSparse(queue.handle().to_raw(),
433            bind_info.len() as u32, bind_info.as_ptr() as *const _ as *const vks::VkBindSparseInfo,
434            fence.handle().to_raw());
435        error::check(result, "vkQueueBindSparse", ())
436    }
437
438    /// Creates a new fence object
439    ///
440    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateFence.html
441    //
442    // *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo*
443    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
444    pub unsafe fn create_fence(&self, create_info: &FenceCreateInfo,
445            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<FenceHandle> {
446        let allocator = allocator.unwrap_or(ptr::null());
447        let mut handle = 0;
448        let result = self.proc_addr_loader().vk.vkCreateFence(self.handle().to_raw(),
449            create_info.as_raw(), allocator, &mut handle);
450        error::check(result, "vkCreateFence", FenceHandle(handle))
451    }
452
453    /// Destroys a fence object.
454    ///
455    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyFence.html
456    //
457    // *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const
458    // VkAllocationCallbacks* pAllocator);
459    pub unsafe fn destroy_fence(&self, fence: FenceHandle,
460            allocator: Option<*const vks::VkAllocationCallbacks>) {
461        let allocator = allocator.unwrap_or(ptr::null());
462        self.proc_addr_loader().vk.vkDestroyFence(self.handle().to_raw(),
463            fence.to_raw(), allocator);
464    }
465
466    /// Resets one or more fence objects.
467    ///
468    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkResetFences.html
469    //
470    // *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const
471    // VkFence* pFences);
472    pub unsafe fn reset_fences(&self, fences: &[FenceHandle]) -> VdResult<()> {
473        let result = self.proc_addr_loader().vk.vkResetFences(self.handle().to_raw(),
474            fences.len() as u32, fences.as_ptr() as *const vks::VkFence);
475        error::check(result, "vkResetFences", ())
476    }
477
478    /// Returns the status of a fence.
479    ///
480    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetFenceStatus.html
481    //
482    // *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence);
483    pub unsafe fn get_fence_status<F>(&self, fence: F) -> VdResult<CallResult>
484            where F: Handle<Target=FenceHandle> {
485        let result = self.proc_addr_loader().vk.vkGetFenceStatus(self.handle().to_raw(), fence.handle().to_raw());
486        error::check(result, "vkGetFenceStatus", CallResult::from(result))
487    }
488
489    /// Waits for one or more fences to become signaled.
490    ///
491    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkWaitForFences.html
492    //
493    // *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const
494    // VkFence* pFences, VkBool32 waitAll, uint64_t timeout);
495    pub unsafe fn wait_for_fences(&self, fences: &[FenceHandle], wait_all: bool, timeout: u64)
496            -> VdResult<()> {
497        let result = self.proc_addr_loader().vk.vkWaitForFences(self.handle().to_raw(),
498            fences.len() as u32, fences.as_ptr() as *const vks::VkFence,
499            wait_all as vks::VkBool32, timeout);
500        error::check(result, "vkWaitForFences", ())
501    }
502
503    /// Creates a new queue semaphore object.
504    ///
505    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateSemaphore.html
506    //
507    // *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo*
508    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore*
509    // pSemaphore);
510    pub unsafe fn create_semaphore(&self, create_info: &SemaphoreCreateInfo,
511            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<SemaphoreHandle> {
512        let allocator = allocator.unwrap_or(ptr::null());
513        let mut handle = 0;
514        let result = self.proc_addr_loader().vk.vkCreateSemaphore(self.handle().to_raw(),
515            create_info.as_raw(), allocator, &mut handle);
516        error::check(result, "vkCreateSemaphore", SemaphoreHandle(handle))
517    }
518
519    /// Destroys a semaphore object.
520    ///
521    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroySemaphore.html
522    //
523    // *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const
524    // VkAllocationCallbacks* pAllocator);
525    pub unsafe fn destroy_semaphore(&self, shader_module: SemaphoreHandle,
526            allocator: Option<*const vks::VkAllocationCallbacks>) {
527        let allocator = allocator.unwrap_or(ptr::null());
528        self.proc_addr_loader().vk.vkDestroySemaphore(self.handle().to_raw(),
529            shader_module.to_raw(), allocator);
530    }
531
532    /// Creates a new event object.
533    ///
534    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateEvent.html
535    //
536    // *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo*
537    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent);
538    pub unsafe fn create_event(&self, create_info: &EventCreateInfo,
539            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<EventHandle> {
540        let allocator = allocator.unwrap_or(ptr::null());
541        let mut handle = 0;
542        let result = self.proc_addr_loader().vk.vkCreateEvent(self.handle().to_raw(),
543            create_info.as_raw(), allocator, &mut handle);
544        error::check(result, "vkCreateEvent", EventHandle(handle))
545    }
546
547    /// Destroys an event object.
548    ///
549    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyEvent.html
550    //
551    // *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const
552    // VkAllocationCallbacks* pAllocator);
553    pub unsafe fn destroy_event(&self, event: EventHandle,
554            allocator: Option<*const vks::VkAllocationCallbacks>) {
555        let allocator = allocator.unwrap_or(ptr::null());
556        self.proc_addr_loader().vk.vkDestroyEvent(self.handle().to_raw(),
557            event.to_raw(), allocator);
558    }
559
560    /// Retrieves the status of an event object.
561    ///
562    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetEventStatus.html
563    //
564    // *PFN_vkGetEventStatus)(VkDevice device, VkEvent event);
565    pub unsafe fn get_event_status<E>(&self, event: E) -> VdResult<CallResult>
566            where E: Handle<Target=EventHandle> {
567        let result = self.proc_addr_loader().vk.vkGetEventStatus(self.handle().to_raw(),
568            event.handle().to_raw());
569        error::check(result, "vkGetEventStatus", CallResult::from(result))
570    }
571
572    /// Sets an event to signaled state.
573    ///
574    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkSetEvent.html
575    //
576    // *PFN_vkSetEvent)(VkDevice device, VkEvent event);
577    pub unsafe fn set_event<E>(&self, event: E) -> VdResult<()>
578            where E: Handle<Target=EventHandle> {
579        let result = self.proc_addr_loader().vk.vkSetEvent(self.handle().to_raw(),
580            event.handle().to_raw());
581        error::check(result, "vkSetEvent", ())
582    }
583
584    /// Resets an event to non-signaled state.
585    ///
586    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkResetEvent.html
587    //
588    // *PFN_vkResetEvent)(VkDevice device, VkEvent event);
589    pub unsafe fn reset_event<E>(&self, event: E) -> VdResult<()>
590            where E: Handle<Target=EventHandle> {
591        let result = self.proc_addr_loader().vk.vkResetEvent(self.handle().to_raw(),
592            event.handle().to_raw());
593        error::check(result, "vkResetEvent", ())
594    }
595
596    /// Creates a new query pool object.
597    ///
598    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateQueryPool.html
599    //
600    // *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo*
601    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool*
602    // pQueryPool);
603    pub unsafe fn create_query_pool(&self, create_info: &QueryPoolCreateInfo,
604            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<QueryPoolHandle> {
605        let allocator = allocator.unwrap_or(ptr::null());
606        let mut handle = 0;
607        let result = self.proc_addr_loader().vk.vkCreateQueryPool(self.handle().to_raw(),
608            create_info.as_raw(), allocator, &mut handle);
609        error::check(result, "vkCreateQueryPool", QueryPoolHandle(handle))
610    }
611
612    /// Destroys a query pool object
613    ///
614    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyQueryPool.html
615    //
616    // *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const
617    // VkAllocationCallbacks* pAllocator);
618    pub unsafe fn destroy_query_pool(&self, query_pool: QueryPoolHandle,
619            allocator: Option<*const vks::VkAllocationCallbacks>) {
620        let allocator = allocator.unwrap_or(ptr::null());
621        self.proc_addr_loader().vk.vkDestroyQueryPool(self.handle().to_raw(),
622            query_pool.to_raw(), allocator);
623    }
624
625    /// Copies results of queries in a query pool to a host memory region
626    ///
627    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetQueryPoolResults.html
628    //
629    // *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool,
630    // uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData,
631    // VkDeviceSize stride, VkQueryResultFlags flags);
632    pub unsafe fn get_query_pool_results<Q>(&self, query_pool: Q, first_query: u32, query_count: u32,
633            data_size: usize, data: *mut c_void, stride: DeviceSize, flags: QueryResultFlags)
634            -> VdResult<()>
635            where Q: Handle<Target=QueryPoolHandle> {
636        let result = self.proc_addr_loader().vk.vkGetQueryPoolResults(self.handle().to_raw(),
637            query_pool.handle().to_raw(), first_query, query_count, data_size, data, stride,
638            flags.bits());
639        error::check(result, "vkGetQueryPoolResults", ())
640    }
641
642    /// Creates a new buffer object
643    ///
644    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateBuffer.html
645    //
646    // *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo*
647    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer*
648    // pBuffer);
649    pub unsafe fn create_buffer(&self, create_info: &BufferCreateInfo,
650            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<BufferHandle> {
651        let allocator = allocator.unwrap_or(ptr::null());
652        let mut handle = 0;
653        let result = self.proc_addr_loader().vk.vkCreateBuffer(self.handle().to_raw(),
654            create_info.as_raw(), allocator, &mut handle);
655        error::check(result, "vkCreateBuffer", BufferHandle(handle))
656    }
657
658    /// Destroys a buffer object.
659    ///
660    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyBuffer.html
661    //
662    // *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const
663    // VkAllocationCallbacks* pAllocator);
664    pub unsafe fn destroy_buffer(&self, buffer: BufferHandle,
665            allocator: Option<*const vks::VkAllocationCallbacks>) {
666        let allocator = allocator.unwrap_or(ptr::null());
667        self.proc_addr_loader().vk.vkDestroyBuffer(self.handle().to_raw(),
668            buffer.to_raw(), allocator);
669    }
670
671    /// Creates a new buffer view object.
672    ///
673    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateBufferView.html
674    //
675    // *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo*
676    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView*
677    // pView);
678    pub unsafe fn create_buffer_view(&self, create_info: &BufferViewCreateInfo,
679            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<BufferViewHandle> {
680        let allocator = allocator.unwrap_or(ptr::null());
681        let mut handle = 0;
682        let result = self.proc_addr_loader().vk.vkCreateBufferView(self.handle().to_raw(),
683            create_info.as_raw(), allocator, &mut handle);
684        error::check(result, "vkCreateBufferView", BufferViewHandle(handle))
685    }
686
687    /// Destroys a buffer view object.
688    ///
689    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyBufferView.html
690    //
691    // *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView,
692    // const VkAllocationCallbacks* pAllocator);
693    pub unsafe fn destroy_buffer_view(&self, buffer_view: BufferViewHandle,
694            allocator: Option<*const vks::VkAllocationCallbacks>) {
695        let allocator = allocator.unwrap_or(ptr::null());
696        self.proc_addr_loader().vk.vkDestroyBufferView(self.handle().to_raw(),
697            buffer_view.to_raw(), allocator);
698    }
699
700    /// Creates a new image object
701    ///
702    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateImage.html
703    //
704    // *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo*
705    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage);
706    pub unsafe fn create_image(&self, create_info: &ImageCreateInfo,
707            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<ImageHandle> {
708        let allocator = allocator.unwrap_or(ptr::null());
709        let mut handle = 0;
710        let result = self.proc_addr_loader().vk.vkCreateImage(self.handle().to_raw(),
711            create_info.as_raw(), allocator, &mut handle);
712        error::check(result, "vkCreateImage", ImageHandle(handle))
713    }
714
715    /// Destroys an image object.
716    ///
717    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyImage.html
718    //
719    // *PFN_vkDestroyImage)(VkDevice device, VkImage image, const
720    // VkAllocationCallbacks* pAllocator);
721    pub unsafe fn destroy_image(&self, image: ImageHandle,
722            allocator: Option<*const vks::VkAllocationCallbacks>) {
723        let allocator = allocator.unwrap_or(ptr::null());
724        self.proc_addr_loader().vk.vkDestroyImage(self.handle().to_raw(),
725            image.to_raw(), allocator);
726    }
727
728    /// Retrieves information about an image subresource.
729    ///
730    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetImageSubresourceLayout.html
731    //
732    // *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const
733    // VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout);
734    pub unsafe fn get_image_subresource_layout<I>(&self, image: I, subresource: &ImageSubresource)
735            -> SubresourceLayout
736            where I: Handle<Target=ImageHandle> {
737        let mut layout = mem::uninitialized();
738        self.proc_addr_loader().vk.vkGetImageSubresourceLayout(self.handle().to_raw(),
739            image.handle().to_raw(), subresource.as_raw(),
740            &mut layout as *mut _ as *mut vks::VkSubresourceLayout);
741        layout
742    }
743
744    /// Creates an image view from an existing image.
745    ///
746    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateImageView.html
747    //
748    // *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo*
749    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView*
750    // pView);
751    pub unsafe fn create_image_view(&self, create_info: &ImageViewCreateInfo,
752            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<ImageViewHandle> {
753        let allocator = allocator.unwrap_or(ptr::null());
754        let mut handle = 0;
755        let result = self.proc_addr_loader().vk.vkCreateImageView(self.handle().to_raw(),
756            create_info.as_raw(), allocator, &mut handle);
757        error::check(result, "vkCreateImageView", ImageViewHandle(handle))
758    }
759
760    /// Destroys an image view object.
761    ///
762    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyImageView.html
763    //
764    // *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const
765    // VkAllocationCallbacks* pAllocator);
766    pub unsafe fn destroy_image_view(&self, image_view: ImageViewHandle,
767            allocator: Option<*const vks::VkAllocationCallbacks>) {
768        let allocator = allocator.unwrap_or(ptr::null());
769        self.proc_addr_loader().vk.vkDestroyImageView(self.handle().to_raw(),
770            image_view.to_raw(), allocator);
771    }
772
773    /// Creates a new shader module object.
774    ///
775    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateShaderModule.html
776    //
777    // *PFN_vkCreateShaderModule)(VkDevice device, const
778    // VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks*
779    // pAllocator, VkShaderModule* pShaderModule);
780    pub unsafe fn create_shader_module(&self, create_info: &ShaderModuleCreateInfo,
781            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<ShaderModuleHandle> {
782        let allocator = allocator.unwrap_or(ptr::null());
783        let mut handle = 0;
784        let result = self.proc_addr_loader().vk.vkCreateShaderModule(self.handle().to_raw(),
785            create_info.as_raw(), allocator, &mut handle);
786        error::check(result, "vkCreateShaderModule", ShaderModuleHandle(handle))
787    }
788
789    /// Destroys a shader module module.
790    ///
791    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyShaderModule.html
792    //
793    // *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule
794    // shaderModule, const VkAllocationCallbacks* pAllocator);
795    pub unsafe fn destroy_shader_module(&self, shader_module: ShaderModuleHandle,
796            allocator: Option<*const vks::VkAllocationCallbacks>) {
797        let allocator = allocator.unwrap_or(ptr::null());
798        self.proc_addr_loader().vk.vkDestroyShaderModule(self.handle().to_raw(),
799            shader_module.to_raw(), allocator);
800    }
801
802    /// Creates a new pipeline cache
803    ///
804    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreatePipelineCache.html
805    //
806    // *PFN_vkCreatePipelineCache)(VkDevice device, const
807    // VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks*
808    // pAllocator, VkPipelineCache* pPipelineCache);
809    pub unsafe fn create_pipeline_cache(&self, create_info: &PipelineCacheCreateInfo,
810            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<PipelineCacheHandle> {
811        let allocator = allocator.unwrap_or(ptr::null());
812        let mut handle = 0;
813        let result = self.proc_addr_loader().vk.vkCreatePipelineCache(self.handle().to_raw(),
814            create_info.as_raw(), allocator, &mut handle);
815        error::check(result, "vkCreatePipelineCache", PipelineCacheHandle(handle))
816    }
817
818    /// Destroys a pipeline cache object.
819    ///
820    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyPipelineCache.html
821    //
822    // *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache
823    // pipelineCache, const VkAllocationCallbacks* pAllocator);
824    pub unsafe fn destroy_pipeline_cache(&self, pipeline_cache: PipelineCacheHandle,
825            allocator: Option<*const vks::VkAllocationCallbacks>) {
826        let allocator = allocator.unwrap_or(ptr::null());
827        self.proc_addr_loader().vk.vkDestroyPipelineCache(self.handle().to_raw(),
828            pipeline_cache.to_raw(), allocator);
829    }
830
831    /// Gets the data store from a pipeline cache.
832    ///
833    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetPipelineCacheData.html
834    //
835    // *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache
836    // pipelineCache, size_t* pDataSize, void* pData);
837    pub unsafe fn get_pipeline_cache_data<Pc>(&self, pipeline_cache: Pc, data_size: *mut usize,
838            data: *mut c_void) -> VdResult<()>
839            where Pc: Handle<Target=PipelineCacheHandle> {
840        let result = self.proc_addr_loader().vk.vkGetPipelineCacheData(self.handle().to_raw(),
841            pipeline_cache.handle().to_raw(), data_size, data);
842        error::check(result, "vkGetPipelineCacheData", ())
843    }
844
845    /// Combines the data stores of pipeline caches.
846    ///
847    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkMergePipelineCaches.html
848    //
849    // *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache,
850    // uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches);
851    pub unsafe fn merge_pipeline_caches<Pc>(&self, dst_cache: Pc, src_caches: &[PipelineCacheHandle])
852            -> VdResult<()>
853            where Pc: Handle<Target=PipelineCacheHandle> {
854        let result = self.proc_addr_loader().vk.vkMergePipelineCaches(self.handle().to_raw(),
855            dst_cache.handle().to_raw(), src_caches.len() as u32,
856            src_caches.as_ptr() as *const vks::VkPipelineCache);
857        error::check(result, "vkMergePipelineCaches", ())
858    }
859
860    /// Creates graphics pipelines.
861    ///
862    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateGraphicsPipelines.html
863    //
864    // *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache
865    // pipelineCache, uint32_t createInfoCount, const
866    // VkGraphicsPipelineCreateInfo* pCreateInfos, const
867    // VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
868    pub unsafe fn create_graphics_pipelines(&self, pipeline_cache: Option<PipelineCacheHandle>,
869            create_infos: &[GraphicsPipelineCreateInfo],
870            allocator: Option<*const vks::VkAllocationCallbacks>)
871            -> VdResult<SmallVec<[PipelineHandle; 4]>> {
872        let allocator = allocator.unwrap_or(ptr::null());
873        let pipeline_cache = pipeline_cache.map(|pc| pc.to_raw()).unwrap_or(0);
874        let mut pipelines = SmallVec::<[PipelineHandle; 4]>::new();
875        pipelines.reserve_exact(create_infos.len());
876        pipelines.set_len(create_infos.len());
877        let result = self.proc_addr_loader().vk.vkCreateGraphicsPipelines(self.handle().to_raw(),
878            pipeline_cache, create_infos.len() as u32,
879            create_infos.as_ptr() as *const vks::VkGraphicsPipelineCreateInfo,
880            allocator,
881            pipelines.as_mut_ptr() as *mut vks::VkPipeline);
882        error::check(result, "vkCreateGraphicsPipelines", pipelines)
883    }
884
885    /// Creates a new compute pipeline object.
886    ///
887    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateComputePipelines.html
888    //
889    // *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache
890    // pipelineCache, uint32_t createInfoCount, const
891    // VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks*
892    // pAllocator, VkPipeline* pPipelines);
893    pub unsafe fn create_compute_pipelines(&self, pipeline_cache: Option<PipelineCacheHandle>,
894            create_infos: &[ComputePipelineCreateInfo],
895            allocator: Option<*const vks::VkAllocationCallbacks>)
896            -> VdResult<SmallVec<[PipelineHandle; 4]>> {
897        let allocator = allocator.unwrap_or(ptr::null());
898        let pipeline_cache = pipeline_cache.map(|pc| pc.to_raw()).unwrap_or(0);
899        let mut pipelines = SmallVec::<[PipelineHandle; 4]>::new();
900        pipelines.reserve_exact(create_infos.len());
901        pipelines.set_len(create_infos.len());
902        let result = self.proc_addr_loader().vk.vkCreateComputePipelines(self.handle().to_raw(),
903            pipeline_cache, create_infos.len() as u32,
904            create_infos.as_ptr() as *const vks::VkComputePipelineCreateInfo,
905            allocator,
906            pipelines.as_mut_ptr() as *mut vks::VkPipeline);
907        error::check(result, "vkCreateComputePipelines", pipelines)
908    }
909
910    /// Destroys a pipeline object.
911    ///
912    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyPipeline.html
913    //
914    // *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const
915    // VkAllocationCallbacks* pAllocator);
916    pub unsafe fn destroy_pipeline(&self, pipeline: PipelineHandle,
917            allocator: Option<*const vks::VkAllocationCallbacks>) {
918        let allocator = allocator.unwrap_or(ptr::null());
919        self.proc_addr_loader().vk.vkDestroyPipeline(self.handle().to_raw(),
920            pipeline.to_raw(), allocator);
921    }
922
923    /// Creates a new pipeline layout object.
924    ///
925    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreatePipelineLayout.html
926    //
927    // *PFN_vkCreatePipelineLayout)(VkDevice device, const
928    // VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks*
929    // pAllocator, VkPipelineLayout* pPipelineLayout);
930    pub unsafe fn create_pipeline_layout(&self, create_info: &PipelineLayoutCreateInfo,
931            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<PipelineLayoutHandle> {
932        let allocator = allocator.unwrap_or(ptr::null());
933        let mut handle = 0;
934        let result = self.proc_addr_loader().vk.vkCreatePipelineLayout(self.handle().to_raw(),
935            create_info.as_raw(), allocator, &mut handle);
936        error::check(result, "vkCreatePipelineLayout", PipelineLayoutHandle(handle))
937    }
938
939    /// Destroys a pipeline layout object.
940    ///
941    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyPipelineLayout.html
942    //
943    // *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout
944    // pipelineLayout, const VkAllocationCallbacks* pAllocator);
945    pub unsafe fn destroy_pipeline_layout(&self, pipeline_layout: PipelineLayoutHandle,
946            allocator: Option<*const vks::VkAllocationCallbacks>) {
947        let allocator = allocator.unwrap_or(ptr::null());
948        self.proc_addr_loader().vk.vkDestroyPipelineLayout(self.handle().to_raw(),
949            pipeline_layout.to_raw(), allocator);
950    }
951
952    /// Creates a new sampler object.
953    ///
954    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateSampler.html
955    //
956    // *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo*
957    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler*
958    // pSampler);
959    pub unsafe fn create_sampler(&self, create_info: &SamplerCreateInfo,
960            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<SamplerHandle> {
961        let allocator = allocator.unwrap_or(ptr::null());
962        let mut handle = 0;
963        let result = self.proc_addr_loader().vk.vkCreateSampler(self.handle().to_raw(),
964            create_info.as_raw(), allocator, &mut handle);
965        error::check(result, "vkCreateSampler", SamplerHandle(handle))
966    }
967
968    /// Destroys a sampler object.
969    ///
970    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroySampler.html
971    //
972    // *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const
973    // VkAllocationCallbacks* pAllocator);
974    pub unsafe fn destroy_sampler(&self, sampler: SamplerHandle,
975            allocator: Option<*const vks::VkAllocationCallbacks>) {
976        let allocator = allocator.unwrap_or(ptr::null());
977        self.proc_addr_loader().vk.vkDestroySampler(self.handle().to_raw(),
978            sampler.to_raw(), allocator);
979    }
980
981    /// Creates a new descriptor set layout.
982    ///
983    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateDescriptorSetLayout.html
984    //
985    // *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const
986    // VkDescriptorSetLayoutCreateInfo* pCreateInfo, const
987    // VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
988    pub unsafe fn create_descriptor_set_layout(&self, create_info: &DescriptorSetLayoutCreateInfo,
989            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<DescriptorSetLayoutHandle> {
990        let allocator = allocator.unwrap_or(ptr::null());
991        let mut handle = 0;
992        let result = self.proc_addr_loader().vk.vkCreateDescriptorSetLayout(self.handle().to_raw(),
993            create_info.as_raw(), allocator, &mut handle);
994        error::check(result, "vkCreateDescriptorSetLayout", DescriptorSetLayoutHandle(handle))
995    }
996
997    /// Destroys a descriptor set layout object.
998    ///
999    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyDescriptorSetLayout.html
1000    //
1001    // *PFN_vkDestroyDescriptorSetLayout)(VkDevice device,
1002    // VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks*
1003    // pAllocator);
1004    pub unsafe fn destroy_descriptor_set_layout(&self, descriptor_set_layout: DescriptorSetLayoutHandle,
1005            allocator: Option<*const vks::VkAllocationCallbacks>) {
1006        let allocator = allocator.unwrap_or(ptr::null());
1007        self.proc_addr_loader().vk.vkDestroyDescriptorSetLayout(self.handle().to_raw(),
1008            descriptor_set_layout.to_raw(), allocator);
1009    }
1010
1011    /// Creates a descriptor pool object.
1012    ///
1013    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateDescriptorPool.html
1014    //
1015    // *PFN_vkCreateDescriptorPool)(VkDevice device, const
1016    // VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks*
1017    // pAllocator, VkDescriptorPool* pDescriptorPool);
1018    pub unsafe fn create_descriptor_pool(&self, create_info: &DescriptorPoolCreateInfo,
1019            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<DescriptorPoolHandle> {
1020        let allocator = allocator.unwrap_or(ptr::null());
1021        let mut handle = 0;
1022        let result = self.proc_addr_loader().vk.vkCreateDescriptorPool(self.handle().to_raw(),
1023            create_info.as_raw(), allocator, &mut handle);
1024        error::check(result, "vkCreateDescriptorPool", DescriptorPoolHandle(handle))
1025    }
1026
1027    /// Destroys a descriptor pool object.
1028    ///
1029    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyDescriptorPool.html
1030    //
1031    // *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool
1032    // descriptorPool, const VkAllocationCallbacks* pAllocator);
1033    pub unsafe fn destroy_descriptor_pool(&self, descriptor_pool: DescriptorPoolHandle,
1034            allocator: Option<*const vks::VkAllocationCallbacks>) {
1035        let allocator = allocator.unwrap_or(ptr::null());
1036        self.proc_addr_loader().vk.vkDestroyDescriptorPool(self.handle().to_raw(),
1037            descriptor_pool.to_raw(), allocator);
1038    }
1039
1040    /// Resets a descriptor pool object.
1041    ///
1042    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkResetDescriptorPool.html
1043    //
1044    // *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool
1045    // descriptorPool, VkDescriptorPoolResetFlags flags);
1046    pub unsafe fn reset_descriptor_pool<Dp>(&self, descriptor_pool: Dp,
1047            flags: DescriptorPoolResetFlags) -> VdResult<()>
1048            where Dp: Handle<Target=DescriptorPoolHandle> {
1049        let result = self.proc_addr_loader().vk.vkResetDescriptorPool(self.handle().to_raw(),
1050            descriptor_pool.handle().to_raw(), flags.bits());
1051        error::check(result, "vkResetDescriptorPool", ())
1052    }
1053
1054    /// Allocates one or more descriptor sets.
1055    ///
1056    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkAllocateDescriptorSets.html
1057    //
1058    // *PFN_vkAllocateDescriptorSets)(VkDevice device, const
1059    // VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet*
1060    // pDescriptorSets);
1061    pub unsafe fn allocate_descriptor_sets(&self, allocate_info: &DescriptorSetAllocateInfo)
1062            -> VdResult<SmallVec<[DescriptorSetHandle; 8]>> {
1063        let mut descriptor_sets = SmallVec::<[DescriptorSetHandle; 8]>::new();
1064        let count = allocate_info.set_layouts().len();
1065        descriptor_sets.reserve_exact(count);
1066        descriptor_sets.set_len(count);
1067        let result = self.proc_addr_loader().vk.vkAllocateDescriptorSets(
1068            self.handle().to_raw(), allocate_info.as_raw(),
1069            descriptor_sets.as_mut_ptr() as *mut vks::VkDescriptorSet);
1070        error::check(result, "vkAllocateDescriptorSets", descriptor_sets)
1071    }
1072
1073    /// Frees one or more descriptor sets.
1074    ///
1075    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkFreeDescriptorSets.html
1076    //
1077    // *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool
1078    // descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet*
1079    // pDescriptorSets);
1080    pub unsafe fn free_descriptor_sets<Dp>(&self, descriptor_pool: Dp,
1081            descriptor_sets: &[DescriptorSetHandle]) -> VdResult<()>
1082            where Dp: Handle<Target=DescriptorPoolHandle> {
1083        let result = self.proc_addr_loader().vk.vkFreeDescriptorSets(self.handle().to_raw(),
1084            descriptor_pool.handle().to_raw(), descriptor_sets.len() as u32,
1085            descriptor_sets.as_ptr() as *const vks::VkDescriptorSet);
1086        error::check(result, "vkFreeDescriptorSets", ())
1087    }
1088
1089    /// Updates the contents of a descriptor set object.
1090    ///
1091    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkUpdateDescriptorSets.html
1092    //
1093    // *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t
1094    //descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites,
1095    //uint32_t descriptorCopyCount, const VkCopyDescriptorSet*
1096    //pDescriptorCopies); / Updates descriptor sets.
1097    pub fn update_descriptor_sets(&self, descriptor_writes: &[WriteDescriptorSet],
1098            descriptor_copies: &[CopyDescriptorSet]) {
1099        unsafe {
1100            self.proc_addr_loader().vk.vkUpdateDescriptorSets(self.handle().0,
1101                descriptor_writes.len() as u32,
1102                descriptor_writes.as_ptr() as *const vks::VkWriteDescriptorSet,
1103                descriptor_copies.len() as u32,
1104                descriptor_copies.as_ptr() as *const vks::VkCopyDescriptorSet);
1105        }
1106    }
1107
1108    /// Creates a new framebuffer object.
1109    ///
1110    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateFramebuffer.html
1111    //
1112    // *PFN_vkCreateFramebuffer)(VkDevice device, const
1113    // VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks*
1114    // pAllocator, VkFramebuffer* pFramebuffer);
1115    pub unsafe fn create_framebuffer(&self, create_info: &FramebufferCreateInfo,
1116            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<FramebufferHandle> {
1117        let allocator = allocator.unwrap_or(ptr::null());
1118        let mut handle = 0;
1119        let result = self.proc_addr_loader().vk.vkCreateFramebuffer(self.handle().to_raw(),
1120            create_info.as_raw(), allocator, &mut handle);
1121        error::check(result, "vkCreateFramebuffer", FramebufferHandle(handle))
1122    }
1123
1124    /// Destroys a framebuffer object.
1125    ///
1126    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyFramebuffer.html
1127    //
1128    // *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer,
1129    // const VkAllocationCallbacks* pAllocator);
1130    pub unsafe fn destroy_framebuffer(&self, framebuffer: FramebufferHandle,
1131            allocator: Option<*const vks::VkAllocationCallbacks>) {
1132        let allocator = allocator.unwrap_or(ptr::null());
1133        self.proc_addr_loader().vk.vkDestroyFramebuffer(self.handle().to_raw(),
1134            framebuffer.to_raw(), allocator);
1135    }
1136
1137    /// Creates a new render pass object.
1138    ///
1139    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateRenderPass.html
1140    //
1141    // *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo*
1142    // pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass*
1143    // pRenderPass);
1144    pub unsafe fn create_render_pass(&self, create_info: &RenderPassCreateInfo,
1145            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<RenderPassHandle> {
1146        let allocator = allocator.unwrap_or(ptr::null());
1147        let mut handle = 0;
1148        let result = self.proc_addr_loader().vk.vkCreateRenderPass(self.handle().to_raw(),
1149            create_info.as_raw(), allocator, &mut handle);
1150        error::check(result, "vkCreateRenderPass", RenderPassHandle(handle))
1151    }
1152
1153    /// Destroys a render pass object.
1154    ///
1155    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyRenderPass.html
1156    //
1157    // *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass,
1158    // const VkAllocationCallbacks* pAllocator);
1159    pub unsafe fn destroy_render_pass(&self, render_pass: RenderPassHandle,
1160            allocator: Option<*const vks::VkAllocationCallbacks>) {
1161        let allocator = allocator.unwrap_or(ptr::null());
1162        self.proc_addr_loader().vk.vkDestroyRenderPass(self.handle().to_raw(),
1163            render_pass.to_raw(), allocator);
1164    }
1165
1166    /// Returns the granularity for optimal render area.
1167    ///
1168    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkGetRenderAreaGranularity.html
1169    //
1170    // *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass
1171    // renderPass, VkExtent2D* pGranularity);
1172    pub unsafe fn get_render_area_granularity<Rp>(&self, render_pass: Rp)
1173            -> Extent2d
1174            where Rp: Handle<Target=RenderPassHandle> {
1175        let mut granularity = mem::uninitialized();
1176        self.proc_addr_loader().vk.vkGetRenderAreaGranularity(self.handle().to_raw(),
1177            render_pass.handle().to_raw(), &mut granularity as *mut _ as *mut vks::VkExtent2D);
1178        granularity
1179    }
1180
1181    /// Creates a new command pool object.
1182    ///
1183    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCreateCommandPool.html
1184    //
1185    // *PFN_vkCreateCommandPool)(VkDevice device, const
1186    // VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks*
1187    // pAllocator, VkCommandPool* pCommandPool);
1188    pub unsafe fn create_command_pool(&self, create_info: &CommandPoolCreateInfo,
1189            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<CommandPoolHandle> {
1190        let allocator = allocator.unwrap_or(ptr::null());
1191        let mut handle = 0;
1192        let result = self.proc_addr_loader().vk.vkCreateCommandPool(self.handle().to_raw(),
1193            create_info.as_raw(), allocator, &mut handle);
1194        error::check(result, "vkCreateCommandPool", CommandPoolHandle(handle))
1195    }
1196
1197    /// Destroys a command pool object.
1198    ///
1199    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkDestroyCommandPool.html
1200    //
1201    // *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool,
1202    // const VkAllocationCallbacks* pAllocator);
1203    pub unsafe fn destroy_command_pool(&self, command_pool: CommandPoolHandle,
1204            allocator: Option<*const vks::VkAllocationCallbacks>) {
1205        let allocator = allocator.unwrap_or(ptr::null());
1206        self.proc_addr_loader().vk.vkDestroyCommandPool(self.handle().to_raw(),
1207            command_pool.to_raw(), allocator);
1208    }
1209
1210    /// Resets a command pool.
1211    ///
1212    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkResetCommandPool.html
1213    //
1214    // *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool,
1215    // VkCommandPoolResetFlags flags);
1216    pub unsafe fn reset_command_pool<Cp>(&self, command_pool: Cp, flags: CommandPoolResetFlags)
1217            -> VdResult<()>
1218            where Cp: Handle<Target=CommandPoolHandle> {
1219        let result = self.proc_addr_loader().vk.vkResetCommandPool(self.handle().to_raw(),
1220            command_pool.handle().to_raw(), flags.bits());
1221        error::check(result, "vkResetCommandPool", ())
1222    }
1223
1224    /// Allocates command buffers from an existing command pool.
1225    ///
1226    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkAllocateCommandBuffers.html
1227    //
1228    // *PFN_vkAllocateCommandBuffers)(VkDevice device, const
1229    // VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer*
1230    // pCommandBuffers);
1231    pub unsafe fn allocate_command_buffers(&self, allocate_info: &CommandBufferAllocateInfo)
1232            -> VdResult<SmallVec<[CommandBufferHandle; 16]>> {
1233        let mut command_buffers: SmallVec<[CommandBufferHandle; 16]> = SmallVec::new();
1234        command_buffers.reserve_exact(allocate_info.command_buffer_count() as usize);
1235        command_buffers.set_len(allocate_info.command_buffer_count() as usize);
1236        let result = self.proc_addr_loader().vk.vkAllocateCommandBuffers(
1237            self.handle().to_raw(), allocate_info.as_raw(),
1238            command_buffers.as_mut_ptr() as *mut vks::VkCommandBuffer);
1239        error::check(result, "vkAllocateCommandBuffers", command_buffers)
1240    }
1241
1242    /// Frees command buffers.
1243    ///
1244    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkFreeCommandBuffers.html
1245    //
1246    // *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool,
1247    // uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
1248    pub unsafe fn free_command_buffers<Cp>(&self, command_pool: Cp, command_buffers: &[CommandBufferHandle])
1249            where Cp: Handle<Target=CommandPoolHandle> {
1250        self.proc_addr_loader().vk.vkFreeCommandBuffers(self.handle().to_raw(),
1251            command_pool.handle().to_raw(), command_buffers.len() as u32,
1252            command_buffers.as_ptr() as *const vks::VkCommandBuffer);
1253    }
1254
1255    /// Starts recording a command buffer.
1256    ///
1257    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkBeginCommandBuffer.html
1258    //
1259    // *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const
1260    // VkCommandBufferBeginInfo* pBeginInfo);
1261    pub unsafe fn begin_command_buffer(&self, command_buffer: CommandBufferHandle,
1262            begin_info: &CommandBufferBeginInfo) -> VdResult<()> {
1263        let result = self.proc_addr_loader().vk.vkBeginCommandBuffer(command_buffer.to_raw(), begin_info.as_raw());
1264        error::check(result, "vkBeginCommandBuffer", ())
1265    }
1266
1267    /// Finishes recording a command buffer.
1268    ///
1269    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkEndCommandBuffer.html
1270    //
1271    // *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer);
1272    pub unsafe fn end_command_buffer(&self, command_buffer: CommandBufferHandle) -> VdResult<()> {
1273        let result = self.proc_addr_loader().vk.vkEndCommandBuffer(command_buffer.to_raw());
1274        error::check(result, "vkEndCommandBuffer", ())
1275    }
1276
1277    /// Resets a command buffer to the initial state.
1278    ///
1279    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkResetCommandBuffer.html
1280    //
1281    // *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer,
1282    // VkCommandBufferResetFlags flags);
1283    pub unsafe fn cmd_reset_command_buffer(&self, command_buffer: CommandBufferHandle,
1284            flags: CommandBufferResetFlags) -> VdResult<()> {
1285        let result = self.proc_addr_loader().vk.vkResetCommandBuffer(command_buffer.to_raw(), flags.bits());
1286        error::check(result, "vkResetCommandBuffer", ())
1287    }
1288
1289    /// Binds a pipeline object to a command buffer.
1290    ///
1291    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdBindPipeline.html
1292    //
1293    // *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer,
1294    // VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
1295    pub unsafe fn cmd_bind_pipeline(&self, command_buffer: CommandBufferHandle,
1296            pipeline_bind_point: PipelineBindPoint, pipeline: PipelineHandle) {
1297        self.proc_addr_loader().vk.vkCmdBindPipeline(command_buffer.to_raw(),
1298            pipeline_bind_point.into(), pipeline.handle().to_raw());
1299    }
1300
1301    /// Sets the viewport on a command buffer.
1302    ///
1303    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetViewport.html
1304    //
1305    // *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t
1306    // firstViewport, uint32_t viewportCount, const VkViewport* pViewports);
1307    pub unsafe fn cmd_set_viewport(&self, command_buffer: CommandBufferHandle,
1308            first_viewport: u32, viewports: &[Viewport]) {
1309        self.proc_addr_loader().vk.vkCmdSetViewport(command_buffer.to_raw(),
1310            first_viewport, viewports.len() as u32, viewports.as_ptr() as *const vks::VkViewport);
1311    }
1312
1313    /// Sets the dynamic scissor rectangles on a command buffer.
1314    ///
1315    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetScissor.html
1316    //
1317    // *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t
1318    // firstScissor, uint32_t scissorCount, const VkRect2D* pScissors);
1319    pub unsafe fn cmd_set_scissor(&self, command_buffer: CommandBufferHandle, first_scissor: u32,
1320            scissors: &[Rect2d]) {
1321        self.proc_addr_loader().vk.vkCmdSetScissor(command_buffer.to_raw(),
1322            first_scissor, scissors.len() as u32, scissors.as_ptr() as *const vks::VkRect2D);
1323    }
1324
1325    /// Sets the dynamic line width state.
1326    ///
1327    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetLineWidth.html
1328    //
1329    // *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float
1330    // lineWidth);
1331    pub unsafe fn cmd_set_line_width(&self, command_buffer: CommandBufferHandle, line_width: f32) {
1332        self.proc_addr_loader().vk.vkCmdSetLineWidth(command_buffer.to_raw(), line_width);
1333    }
1334
1335
1336    /// Sets the depth bias dynamic state.
1337    ///
1338    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetDepthBias.html
1339    //
1340    // *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float
1341    // depthBiasConstantFactor, float depthBiasClamp, float
1342    // depthBiasSlopeFactor);
1343    pub unsafe fn cmd_set_depth_bias(&self, command_buffer: CommandBufferHandle,
1344            depth_bias_constant_factor: f32, depth_bias_clamp: f32, depth_bias_slope_factor: f32) {
1345        self.proc_addr_loader().vk.vkCmdSetDepthBias(command_buffer.to_raw(),
1346            depth_bias_constant_factor, depth_bias_clamp, depth_bias_slope_factor);
1347    }
1348
1349    /// Sets the values of blend constants.
1350    ///
1351    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetBlendConstants.html
1352    //
1353    // *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float
1354    // blendConstants[4]);
1355    pub unsafe fn cmd_set_blend_constants(&self, command_buffer: CommandBufferHandle,
1356            blend_constants: [f32; 4]) {
1357        self.proc_addr_loader().vk.vkCmdSetBlendConstants(command_buffer.to_raw(),
1358            blend_constants.as_ptr());
1359    }
1360
1361    /// Sets the depth bounds test values for a command buffer.
1362    ///
1363    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetDepthBounds.html
1364    //
1365    // *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float
1366    // minDepthBounds, float maxDepthBounds);
1367    pub unsafe fn cmd_set_depth_bounds(&self, command_buffer: CommandBufferHandle,
1368            min_depth_bounds: f32, max_depth_bounds: f32) {
1369        self.proc_addr_loader().vk.vkCmdSetDepthBounds(command_buffer.to_raw(),
1370            min_depth_bounds, max_depth_bounds);
1371    }
1372
1373    /// Sets the stencil compare mask dynamic state.
1374    ///
1375    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetStencilCompareMask.html
1376    //
1377    // *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer,
1378    // VkStencilFaceFlags faceMask, uint32_t compareMask);
1379    pub unsafe fn cmd_set_stencil_compare_mask(&self, command_buffer: CommandBufferHandle,
1380            face_mask: StencilFaceFlags, compare_mask: u32) {
1381        self.proc_addr_loader().vk.vkCmdSetStencilCompareMask(command_buffer.to_raw(),
1382            face_mask.bits(), compare_mask);
1383    }
1384
1385    /// Sets the stencil write mask dynamic state
1386    ///
1387    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetStencilWriteMask.html
1388    //
1389    // *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer,
1390    // VkStencilFaceFlags faceMask, uint32_t writeMask);
1391    pub unsafe fn cmd_set_stencil_write_mask(&self, command_buffer: CommandBufferHandle,
1392            face_mask: StencilFaceFlags, write_mask: u32) {
1393        self.proc_addr_loader().vk.vkCmdSetStencilWriteMask(command_buffer.to_raw(),
1394            face_mask.bits(), write_mask);
1395    }
1396
1397    /// Sets the stencil reference dynamic state.
1398    ///
1399    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetStencilReference.html
1400    //
1401    // *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer,
1402    // VkStencilFaceFlags faceMask, uint32_t reference);
1403    pub unsafe fn cmd_set_stencil_reference(&self, command_buffer: CommandBufferHandle,
1404            face_mask: StencilFaceFlags, reference: u32) {
1405        self.proc_addr_loader().vk.vkCmdSetStencilReference(command_buffer.to_raw(),
1406            face_mask.bits(), reference);
1407    }
1408
1409    /// Binds descriptor sets to a command buffer.
1410    ///
1411    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdBindDescriptorSets.html
1412    //
1413    // *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer,
1414    // VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
1415    // uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet*
1416    // pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t*
1417    // pDynamicOffsets);
1418    pub unsafe fn cmd_bind_descriptor_sets(&self, command_buffer: CommandBufferHandle,
1419            pipeline_bind_point: PipelineBindPoint, layout: PipelineLayoutHandle,
1420            first_set: u32, descriptor_sets: &[DescriptorSetHandle],
1421            dynamic_offsets: &[u32]) {
1422        self.proc_addr_loader().vk.vkCmdBindDescriptorSets(command_buffer.to_raw(), pipeline_bind_point.into(),
1423            layout.handle().to_raw(), first_set, descriptor_sets.len() as u32,
1424            descriptor_sets.as_ptr() as *const vks::VkDescriptorSet,
1425            dynamic_offsets.len() as u32, dynamic_offsets.as_ptr());
1426    }
1427
1428    /// Binds an index buffer to a command buffer.
1429    ///
1430    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdBindIndexBuffer.html
1431    //
1432    // *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer
1433    // buffer, VkDeviceSize offset, VkIndexType indexType);
1434    pub unsafe fn cmd_bind_index_buffer(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1435            offset: u64, index_type: IndexType) {
1436            self.proc_addr_loader().vk.vkCmdBindIndexBuffer(command_buffer.to_raw(),
1437                buffer.handle().to_raw(), offset, index_type.into());
1438    }
1439
1440    /// Binds vertex buffers to a command buffer.
1441    ///
1442    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdBindVertexBuffers.html
1443    //
1444    // *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t
1445    // firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const
1446    // VkDeviceSize* pOffsets);
1447    pub unsafe fn cmd_bind_vertex_buffers(&self, command_buffer: CommandBufferHandle, first_binding: u32,
1448            buffers: &[BufferHandle], offsets: &[u64]) {
1449        self.proc_addr_loader().vk.vkCmdBindVertexBuffers(command_buffer.to_raw(),
1450            first_binding, buffers.len() as u32, buffers.as_ptr() as *const vks::VkBuffer,
1451            offsets.as_ptr());
1452    }
1453
1454    /// Draws primitives.
1455    ///
1456    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdDraw.html
1457    //
1458    // *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount,
1459    // uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
1460    pub unsafe fn cmd_draw(&self, command_buffer: CommandBufferHandle, vertex_count: u32, instance_count: u32,
1461            first_vertex: u32, first_instance: u32) {
1462        self.proc_addr_loader().vk.vkCmdDraw(command_buffer.to_raw(), vertex_count, instance_count,
1463            first_vertex, first_instance);
1464    }
1465
1466    /// Issues an indexed draw into a command buffer.
1467    ///
1468    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdDrawIndexed.html
1469    //
1470    // *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t
1471    // indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t
1472    // vertexOffset, uint32_t firstInstance);
1473    pub unsafe fn cmd_draw_indexed(&self, command_buffer: CommandBufferHandle, index_count: u32,
1474            instance_count: u32, first_index: u32, vertex_offset: i32, first_instance: u32) {
1475        self.proc_addr_loader().vk.vkCmdDrawIndexed(command_buffer.to_raw(), index_count,
1476            instance_count, first_index, vertex_offset, first_instance);
1477    }
1478
1479    /// Issues an indirect draw into a command buffer.
1480    ///
1481    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdDrawIndirect.html
1482    //
1483    // *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer,
1484    // VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
1485    pub unsafe fn cmd_draw_indirect(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1486            offset: u64, draw_count: u32, stride: u32) {
1487        self.proc_addr_loader().vk.vkCmdDrawIndirect(command_buffer.to_raw(),
1488            buffer.handle().to_raw(), offset, draw_count, stride);
1489    }
1490
1491    /// Performs an indexed indirect draw.
1492    ///
1493    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdDrawIndexedIndirect.html
1494    //
1495    // *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer
1496    // buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
1497    pub unsafe fn cmd_draw_indexed_indirect(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1498            offset: u64, draw_count: u32, stride: u32) {
1499        self.proc_addr_loader().vk.vkCmdDrawIndexedIndirect(command_buffer.to_raw(),
1500            buffer.handle().to_raw(), offset, draw_count, stride);
1501    }
1502
1503    /// Dispatches compute work items.
1504    ///
1505    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdDispatch.html
1506    //
1507    // *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t
1508    // groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
1509    pub unsafe fn cmd_dispatch(&self, command_buffer: CommandBufferHandle, group_count_x: u32,
1510            group_count_y: u32, group_count_z: u32) {
1511        self.proc_addr_loader().vk.vkCmdDispatch(command_buffer.to_raw(), group_count_x,
1512            group_count_y, group_count_z);
1513    }
1514
1515    /// Dispatches compute work items using indirect parameters.
1516    ///
1517    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdDispatchIndirect.html
1518    //
1519    // *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer
1520    // buffer, VkDeviceSize offset);
1521    pub unsafe fn cmd_dispatch_indirect(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1522            offset: u64) {
1523        self.proc_addr_loader().vk.vkCmdDispatchIndirect(command_buffer.to_raw(),
1524            buffer.handle().to_raw(), offset);
1525    }
1526
1527    /// Copies data between buffer regions.
1528    ///
1529    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdCopyBuffer.html
1530    //
1531    // *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer
1532    // srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const
1533    // VkBufferCopy* pRegions);
1534    pub unsafe fn cmd_copy_buffer(&self, command_buffer: CommandBufferHandle, src_buffer: BufferHandle,
1535            dst_buffer: BufferHandle, regions: &[BufferCopy]) {
1536        self.proc_addr_loader().vk.vkCmdCopyBuffer(
1537            command_buffer.to_raw(),
1538            src_buffer.to_raw(),
1539            dst_buffer.to_raw(),
1540            regions.len() as u32,
1541            regions.as_ptr() as *const vks::VkBufferCopy,
1542        );
1543    }
1544
1545    /// Copies data between images.
1546    ///
1547    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdCopyImage.html
1548    //
1549    // *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage,
1550    // VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout
1551    // dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
1552    pub unsafe fn cmd_copy_image(&self, command_buffer: CommandBufferHandle, src_image: ImageHandle,
1553            src_image_layout: ImageLayout, dst_image: ImageHandle, dst_image_layout: ImageLayout,
1554            regions: &[ImageCopy]) {
1555        self.proc_addr_loader().vk.vkCmdCopyImage(command_buffer.to_raw(),
1556        src_image.to_raw(), src_image_layout.into(), dst_image.to_raw(), dst_image_layout.into(),
1557        regions.len() as u32, regions.as_ptr() as *const vks::VkImageCopy);
1558    }
1559
1560    /// Copies regions of an image, potentially performing format conversion.
1561    ///
1562    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdBlitImage.html
1563    //
1564    // *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage,
1565    // VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout
1566    // dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions,
1567    // VkFilter filter);
1568    pub unsafe fn cmd_blit_image(&self, command_buffer: CommandBufferHandle, src_image: ImageHandle,
1569            src_image_layout: ImageLayout, dst_image: ImageHandle, dst_image_layout: ImageLayout,
1570            regions: &[ImageBlit], filter: Filter) {
1571        self.proc_addr_loader().vk.vkCmdBlitImage(command_buffer.to_raw(),
1572            src_image.to_raw(), src_image_layout.into(), dst_image.to_raw(),
1573            dst_image_layout.into(), regions.len() as u32,
1574            regions.as_ptr() as *const vks::VkImageBlit, filter.into());
1575    }
1576
1577    /// Copies data from a buffer into an image.
1578    ///
1579    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdCopyBufferToImage.html
1580    //
1581    // *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer
1582    // srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t
1583    // regionCount, const VkBufferImageCopy* pRegions);
1584    pub unsafe fn cmd_copy_buffer_to_image(&self, command_buffer: CommandBufferHandle,
1585            src_buffer: BufferHandle, dst_image: ImageHandle, dst_image_layout: ImageLayout,
1586            regions: &[BufferImageCopy]) {
1587        self.proc_addr_loader().vk.vkCmdCopyBufferToImage(
1588            command_buffer.to_raw(),
1589            src_buffer.to_raw(),
1590            dst_image.to_raw(),
1591            dst_image_layout as u32,
1592            regions.len() as u32,
1593            regions.as_ptr() as *const vks::VkBufferImageCopy,
1594        );
1595    }
1596
1597    /// Copies image data into a buffer.
1598    ///
1599    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdCopyImageToBuffer.html
1600    //
1601    // *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage
1602    // srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t
1603    // regionCount, const VkBufferImageCopy* pRegions);
1604    pub unsafe fn cmd_copy_image_to_buffer(&self, command_buffer: CommandBufferHandle,
1605            src_image: ImageHandle, src_image_layout: ImageLayout, dst_buffer: BufferHandle,
1606            regions: &[BufferImageCopy]) {
1607        self.proc_addr_loader().vk.vkCmdCopyImageToBuffer(command_buffer.to_raw(),
1608            src_image.to_raw(), src_image_layout.into(), dst_buffer.to_raw(), regions.len() as u32,
1609            regions.as_ptr() as *const vks::VkBufferImageCopy);
1610    }
1611
1612    /// Updates a buffer's contents from host memory.
1613    ///
1614    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdUpdateBuffer.html
1615    //
1616    // *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer
1617    // dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void*
1618    // pData);
1619    pub unsafe fn cmd_update_buffer(&self, command_buffer: CommandBufferHandle, dst_buffer: BufferHandle,
1620            dst_offset: u64, data: &[u8]) {
1621        self.proc_addr_loader().vk.vkCmdUpdateBuffer(command_buffer.to_raw(),
1622            dst_buffer.to_raw(), dst_offset, data.len() as u64, data.as_ptr() as *const _);
1623    }
1624
1625    /// Fills a region of a buffer with a fixed value.
1626    ///
1627    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdFillBuffer.html
1628    //
1629    // *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer
1630    // dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data);
1631    pub unsafe fn cmd_fill_buffer(&self,command_buffer: CommandBufferHandle,  dst_buffer: BufferHandle,
1632            dst_offset: u64, size: Option<DeviceSize>, data: u32) {
1633        self.proc_addr_loader().vk.vkCmdFillBuffer(command_buffer.to_raw(),
1634            dst_buffer.to_raw(), dst_offset, size.unwrap_or(0), data);
1635    }
1636
1637    /// Clears regions of a color image.
1638    ///
1639    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdClearColorImage.html
1640    //
1641    // *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage
1642    // image, VkImageLayout imageLayout, const VkClearColorValue* pColor,
1643    // uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
1644    pub unsafe fn cmd_clear_color_image(&self, command_buffer: CommandBufferHandle, image: ImageHandle,
1645            image_layout: ImageLayout, color: &ClearColorValue, ranges: &[ImageSubresourceRange]) {
1646        self.proc_addr_loader().vk.vkCmdClearColorImage(command_buffer.to_raw(),
1647            image.to_raw(), image_layout.into(), color, ranges.len() as u32,
1648            ranges.as_ptr() as *const vks::VkImageSubresourceRange);
1649    }
1650
1651    /// Fills regions of a combined depth/stencil image.
1652    ///
1653    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdClearDepthStencilImage.html
1654    //
1655    // *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer,
1656    // VkImage image, VkImageLayout imageLayout, const
1657    // VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const
1658    // VkImageSubresourceRange* pRanges);
1659    pub unsafe fn cmd_clear_depth_stencil_image(&self, command_buffer: CommandBufferHandle,
1660            image: ImageHandle, image_layout: ImageLayout, depth_stencil: &ClearDepthStencilValue,
1661            ranges: &[ImageSubresourceRange]) {
1662        self.proc_addr_loader().vk.vkCmdClearDepthStencilImage(command_buffer.to_raw(),
1663            image.to_raw(), image_layout.into(), depth_stencil.as_raw(), ranges.len() as u32,
1664            ranges.as_ptr() as *const vks::VkImageSubresourceRange);
1665    }
1666
1667    /// Clears regions within currently bound framebuffer attachments.
1668    ///
1669    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdClearAttachments.html
1670    //
1671    // *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t
1672    // attachmentCount, const VkClearAttachment* pAttachments, uint32_t
1673    // rectCount, const VkClearRect* pRects);
1674    pub unsafe fn cmd_clear_attachments(&self, command_buffer: CommandBufferHandle,
1675            attachments: &[ClearAttachment], rects: &[ClearRect]) {
1676        self.proc_addr_loader().vk.vkCmdClearAttachments(command_buffer.to_raw(),
1677            attachments.len() as u32, attachments.as_ptr() as *const vks::VkClearAttachment,
1678            rects.len() as u32, rects.as_ptr() as *const vks::VkClearRect);
1679    }
1680
1681    /// Resolves regions of an image.
1682    ///
1683    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdResolveImage.html
1684    //
1685    // *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage
1686    // srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout
1687    // dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions);
1688    pub unsafe fn cmd_resolve_image(&self, command_buffer: CommandBufferHandle,
1689            src_image: ImageHandle, src_image_layout: ImageLayout, dst_image: ImageHandle,
1690            dst_image_layout: ImageLayout, regions: &[ImageResolve]) {
1691        self.proc_addr_loader().vk.vkCmdResolveImage(command_buffer.to_raw(),
1692            src_image.to_raw(), src_image_layout.into(), dst_image.to_raw(),
1693            dst_image_layout.into(), regions.len() as u32,
1694            regions.as_ptr() as *const vks::VkImageResolve);
1695    }
1696
1697    /// Sets an event object to signaled state.
1698    ///
1699    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdSetEvent.html
1700    //
1701    // *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event,
1702    // VkPipelineStageFlags stageMask);
1703    pub unsafe fn cmd_set_event(&self, command_buffer: CommandBufferHandle, event: EventHandle,
1704            stage_mask: PipelineStageFlags) {
1705        self.proc_addr_loader().vk.vkCmdSetEvent(command_buffer.to_raw(),
1706            event.to_raw(), stage_mask.bits());
1707    }
1708
1709    /// Resets an event object to non-signaled state.
1710    ///
1711    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdResetEvent.html
1712    //
1713    // *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event,
1714    // VkPipelineStageFlags stageMask);
1715    pub unsafe fn cmd_reset_event(&self, command_buffer: CommandBufferHandle, event: EventHandle,
1716            stage_mask: PipelineStageFlags) {
1717        self.proc_addr_loader().vk.vkCmdResetEvent(command_buffer.to_raw(),
1718            event.to_raw(), stage_mask.bits());
1719    }
1720
1721    /// Waits for one or more events and insert a set of memory.
1722    ///
1723    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdWaitEvents.html
1724    //
1725    // *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t
1726    // eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask,
1727    // VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const
1728    // VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
1729    // const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t
1730    // imageMemoryBarrierCount, const VkImageMemoryBarrier*
1731    // pImageMemoryBarriers);
1732    pub unsafe fn cmd_wait_events(&self, command_buffer: CommandBufferHandle,
1733            events: &[EventHandle],
1734            src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
1735            memory_barriers: &[MemoryBarrier],
1736            buffer_memory_barriers: &[BufferMemoryBarrier],
1737            image_memory_barriers: &[ImageMemoryBarrier]) {
1738        self.proc_addr_loader().vk.vkCmdWaitEvents(command_buffer.to_raw(),
1739            events.len() as u32, events.as_ptr() as *const vks::VkEvent,
1740            src_stage_mask.bits(), dst_stage_mask.bits(),
1741            memory_barriers.len() as u32, memory_barriers.as_ptr() as *const vks::VkMemoryBarrier,
1742            buffer_memory_barriers.len() as u32,
1743            buffer_memory_barriers.as_ptr() as *const vks::VkBufferMemoryBarrier,
1744            image_memory_barriers.len() as u32,
1745            image_memory_barriers.as_ptr() as *const vks::VkImageMemoryBarrier,
1746        );
1747    }
1748
1749    /// Inserts a memory dependency.
1750    ///
1751    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdPipelineBarrier.html
1752    //
1753    // *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer,
1754    // VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
1755    // VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const
1756    // VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
1757    // const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t
1758    // imageMemoryBarrierCount, const VkImageMemoryBarrier*
1759    // pImageMemoryBarriers);
1760    pub unsafe fn cmd_pipeline_barrier(&self, command_buffer: CommandBufferHandle,
1761            src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
1762            dependency_flags: DependencyFlags, memory_barriers: &[MemoryBarrier],
1763            buffer_memory_barriers: &[BufferMemoryBarrier],
1764            image_memory_barriers: &[ImageMemoryBarrier]) {
1765        self.proc_addr_loader().vk.vkCmdPipelineBarrier(command_buffer.to_raw(),
1766            src_stage_mask.bits(), dst_stage_mask.bits(), dependency_flags.bits(),
1767            memory_barriers.len() as u32, memory_barriers.as_ptr() as *const vks::VkMemoryBarrier,
1768            buffer_memory_barriers.len() as u32,
1769            buffer_memory_barriers.as_ptr() as *const vks::VkBufferMemoryBarrier,
1770            image_memory_barriers.len() as u32,
1771            image_memory_barriers.as_ptr() as *const vks::VkImageMemoryBarrier,
1772        );
1773    }
1774
1775    /// Begins a query.
1776    ///
1777    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdBeginQuery.html
1778    //
1779    // *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool
1780    // queryPool, uint32_t query, VkQueryControlFlags flags);
1781    pub unsafe fn cmd_begin_query(&self, command_buffer: CommandBufferHandle,
1782            query_pool: QueryPoolHandle, query: u32, flags: QueryControlFlags) {
1783        self.proc_addr_loader().vk.vkCmdBeginQuery(command_buffer.to_raw(),
1784            query_pool.to_raw(), query, flags.bits());
1785    }
1786
1787    /// Ends a query.
1788    ///
1789    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdEndQuery.html
1790    //
1791    // *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool
1792    // queryPool, uint32_t query);
1793    pub unsafe fn cmd_end_query(&self, command_buffer: CommandBufferHandle,
1794            query_pool: QueryPoolHandle, query: u32) {
1795        self.proc_addr_loader().vk.vkCmdEndQuery(command_buffer.to_raw(),
1796            query_pool.to_raw(), query);
1797    }
1798
1799    /// Resets queries in a query pool.
1800    ///
1801    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdResetQueryPool.html
1802    //
1803    // *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool
1804    // queryPool, uint32_t firstQuery, uint32_t queryCount);
1805    pub unsafe fn cmd_reset_query_pool(&self, command_buffer: CommandBufferHandle,
1806            query_pool: QueryPoolHandle, first_query: u32, query_count: u32) {
1807        self.proc_addr_loader().vk.vkCmdResetQueryPool(command_buffer.to_raw(),
1808            query_pool.to_raw(), first_query, query_count);
1809    }
1810
1811    /// Writes a device timestamp into a query object.
1812    ///
1813    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdWriteTimestamp.html
1814    //
1815    // *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer,
1816    // VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t
1817    // query);
1818    pub unsafe fn cmd_write_timestamp(&self, command_buffer: CommandBufferHandle,
1819        pipeline_stage: PipelineStageFlags, query_pool: QueryPoolHandle, query: u32) {
1820        self.proc_addr_loader().vk.vkCmdWriteTimestamp(command_buffer.to_raw(),
1821            pipeline_stage.bits(), query_pool.to_raw(), query);
1822    }
1823
1824    /// Copies the results of queries in a query pool to a buffer object.
1825    ///
1826    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdCopyQueryPoolResults.html
1827    //
1828    // *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer,
1829    // VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
1830    // VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
1831    // VkQueryResultFlags flags);
1832    pub unsafe fn cmd_copy_query_pool_results(&self, command_buffer: CommandBufferHandle,
1833            query_pool: QueryPoolHandle, first_query: u32, query_count: u32,
1834            dst_buffer: BufferHandle, dst_offset: u64, stride: u64, flags: QueryResultFlags) {
1835        self.proc_addr_loader().vk.vkCmdCopyQueryPoolResults(command_buffer.to_raw(),
1836            query_pool.to_raw(), first_query, query_count, dst_buffer.to_raw(), dst_offset, stride,
1837            flags.bits());
1838    }
1839
1840    /// Updates the values of push constants.
1841    ///
1842    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdPushConstants.html
1843    //
1844    // *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer,
1845    // VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t
1846    // offset, uint32_t size, const void* pValues);
1847    pub unsafe fn cmd_push_constants(&self, command_buffer: CommandBufferHandle,
1848            layout: PipelineLayoutHandle, stage_flags: ShaderStageFlags, offset: u32,
1849            values: &[u8]) {
1850        self.proc_addr_loader().vk.vkCmdPushConstants(command_buffer.to_raw(),
1851            layout.to_raw(),
1852            stage_flags.bits(), offset, values.len() as u32, values.as_ptr() as *const c_void);
1853    }
1854
1855    /// Begins a new render pass.
1856    ///
1857    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdBeginRenderPass.html
1858    //
1859    // *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const
1860    // VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents);
1861    pub unsafe fn cmd_begin_render_pass(&self, command_buffer: CommandBufferHandle,
1862            render_pass_begin: &RenderPassBeginInfo, contents: SubpassContents) {
1863        self.proc_addr_loader().vk.vkCmdBeginRenderPass(command_buffer.to_raw(),
1864            render_pass_begin.as_raw(), contents.into());
1865    }
1866
1867    /// Transitions to the next subpass of a render pass.
1868    ///
1869    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdNextSubpass.html
1870    //
1871    // *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents
1872    // contents);
1873    pub unsafe fn cmd_next_subpass(&self, command_buffer: CommandBufferHandle,
1874            contents: SubpassContents) {
1875        self.proc_addr_loader().vk.vkCmdNextSubpass(command_buffer.to_raw(), contents.into());
1876    }
1877
1878    /// Ends the current render pass.
1879    ///
1880    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdEndRenderPass.html
1881    //
1882    // *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer);
1883    pub unsafe fn cmd_end_render_pass(&self, command_buffer: CommandBufferHandle, ) {
1884        self.proc_addr_loader().vk.vkCmdEndRenderPass(command_buffer.to_raw());
1885    }
1886
1887    /// Executes a secondary command buffer from a primary command buffer.
1888    ///
1889    /// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/vkCmdExecuteCommands.html
1890    //
1891    // *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t
1892    // commandBufferCount, const VkCommandBuffer* pCommandBuffers);
1893    pub unsafe fn cmd_execute_commands(&self, command_buffer: CommandBufferHandle,
1894            command_buffers: &[CommandBufferHandle]) {
1895        self.proc_addr_loader().vk.vkCmdExecuteCommands(command_buffer.to_raw(),
1896            command_buffers.len() as u32, command_buffers.as_ptr() as *const vks::VkCommandBuffer);
1897    }
1898
1899    /// Creates a swapchain.
1900    ///
1901    /// https://manned.org/vkCreateSwapchainKHR.3
1902    //
1903    // *PFN_vkCreateSwapchainKHR)(VkDevice device, const
1904    // VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks*
1905    // pAllocator, VkSwapchainKHR* pSwapchain);
1906    pub unsafe fn create_swapchain_khr(&self, create_info: &SwapchainCreateInfoKhr,
1907            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<SwapchainKhrHandle> {
1908        let allocator = allocator.unwrap_or(ptr::null());
1909        let mut handle = 0;
1910        let result = self.proc_addr_loader().khr_swapchain.vkCreateSwapchainKHR(self.handle().to_raw(),
1911            create_info.as_raw(), allocator, &mut handle);
1912        error::check(result, "vkCreateSwapchainKHR", SwapchainKhrHandle(handle))
1913    }
1914
1915    /// Destroys a swapchain object.
1916    ///
1917    /// https://manned.org/vkDestroySwapchainKHR.3
1918    //
1919    // *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain,
1920    // const VkAllocationCallbacks* pAllocator);
1921    pub unsafe fn destroy_swapchain_khr(&mut self, swapchain: SwapchainKhrHandle,
1922            allocator: Option<*const vks::VkAllocationCallbacks>) {
1923        let _allocator = allocator.unwrap_or(ptr::null());
1924        self.proc_addr_loader().khr_swapchain.vkDestroySwapchainKHR(self.handle().to_raw(),
1925            swapchain.to_raw(), ptr::null());
1926    }
1927
1928    /// Obtains the array of presentable images associated with a swapchain.
1929    ///
1930    /// https://manned.org/vkGetSwapchainImagesKHR.3
1931    //
1932    // *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR
1933    // swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages);
1934    pub unsafe fn get_swapchain_images_khr(&self, swapchain: SwapchainKhrHandle)
1935            -> VdResult<SmallVec<[ImageHandle; 4]>> {
1936        let mut image_count = 0;
1937        let mut image_handles = SmallVec::<[ImageHandle; 4]>::new();
1938        let result = self.proc_addr_loader().khr_swapchain.vkGetSwapchainImagesKHR(self.handle().to_raw(),
1939            swapchain.to_raw(), &mut image_count, ptr::null_mut());
1940        error::check(result, "vkGetSwapchainImagesKHR", ())?;
1941        image_handles.reserve_exact(image_count as usize);
1942        image_handles.set_len(image_count as usize);
1943        loop {
1944            let result = self.proc_addr_loader().khr_swapchain.vkGetSwapchainImagesKHR(self.handle().to_raw(),
1945                swapchain.to_raw(), &mut image_count, image_handles.as_mut_ptr() as *mut vks::VkImage);
1946            if result != CallResult::Incomplete as i32 {
1947                return error::check(result, "vkGetSwapchainImagesKHR", image_handles);
1948            }
1949        }
1950    }
1951
1952    /// Retrieves the index of the next available presentable image.
1953    ///
1954    /// https://manned.org/vkAcquireNextImageKHR.3
1955    //
1956    // *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain,
1957    // uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t*
1958    // pImageIndex);
1959    pub unsafe fn acquire_next_image_khr(&self, swapchain: SwapchainKhrHandle, timeout: u64,
1960            semaphore: Option<SemaphoreHandle>, fence: Option<FenceHandle>) -> VdResult<u32> {
1961        let mut image_index = 0;
1962        let result = self.proc_addr_loader().khr_swapchain.vkAcquireNextImageKHR(
1963                self.handle().to_raw(), swapchain.to_raw(), timeout,
1964                semaphore.map(|s| s.to_raw()).unwrap_or(0),
1965                fence.map(|f| f.to_raw()).unwrap_or(0), &mut image_index);
1966        error::check(result, "vkAcquireNextImageKHR", image_index)
1967    }
1968
1969    /// Queues an image for presentation.
1970    ///
1971    /// https://manned.org/vkQueuePresentKHR.3
1972    //
1973    // *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo);
1974    pub unsafe fn queue_present_khr<Q>(&self, queue: Q, present_info: &PresentInfoKhr)
1975            -> VdResult<()>
1976            where Q: Handle<Target=QueueHandle> {
1977        let result = self.proc_addr_loader().khr_swapchain.vkQueuePresentKHR(
1978            queue.handle().to_raw(), present_info.as_raw());
1979        error::check(result, "vkQueuePresentKHR", ())
1980    }
1981
1982    /// Creates multiple swapchains that share presentable images.
1983    ///
1984    /// https://manned.org/vkCreateSharedSwapchainsKHR.3
1985    //
1986    // *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t
1987    // swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const
1988    // VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains);
1989    pub unsafe fn create_shared_swapchains_khr(&self, create_infos: &[SwapchainCreateInfoKhr],
1990            allocator: Option<*const vks::VkAllocationCallbacks>)
1991            -> VdResult<SmallVec<[SwapchainKhrHandle; 4]>> {
1992        let allocator = allocator.unwrap_or(ptr::null());
1993        let mut swapchains = SmallVec::<[SwapchainKhrHandle; 4]>::new();
1994        swapchains.reserve_exact(create_infos.len());
1995        swapchains.set_len(create_infos.len());
1996        let result = self.proc_addr_loader().khr_display_swapchain.vkCreateSharedSwapchainsKHR(self.handle().to_raw(),
1997            create_infos.len() as u32, create_infos as *const _ as *const vks::VkSwapchainCreateInfoKHR,
1998            allocator, swapchains.as_mut_ptr() as *mut vks::VkSwapchainKHR);
1999        error::check(result, "vkCreateSharedSwapchainsKHR", swapchains)
2000    }
2001
2002    ///
2003    ///
2004    ///
2005    //
2006    // *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool,
2007    // VkCommandPoolTrimFlagsKHR flags);
2008    pub unsafe fn trim_command_pool_khr<P>(&self, _command_pool: P, _flags: CommandPoolTrimFlagsKhr)
2009             -> VdResult<()>
2010            where P: Handle<Target=CommandPoolHandle> {
2011        // self.proc_addr_loader().
2012        //     vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags);
2013        unimplemented!();
2014    }
2015
2016    ///
2017    ///
2018    ///
2019    //
2020    // *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const
2021    // VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
2022    pub unsafe fn get_memory_win32_handle_khr(&self,
2023            _get_win32_handle_info: &MemoryGetWin32HandleInfoKhr)
2024             -> VdResult<()> {
2025        // self.proc_addr_loader().
2026        //     vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
2027        unimplemented!();
2028    }
2029
2030    ///
2031    ///
2032    ///
2033    //
2034    // *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device,
2035    // VkExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle,
2036    // VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
2037    pub unsafe fn get_memory_win32_handle_properties_khr(&self,
2038            _handle_type: ExternalMemoryHandleTypeFlagsKhr, _handle: HANDLE) -> VdResult<()> {
2039        // self.proc_addr_loader().
2040        //     vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
2041        unimplemented!();
2042    }
2043
2044    ///
2045    ///
2046    ///
2047    //
2048    // *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR*
2049    // pGetFdInfo, int* pFd);
2050    pub unsafe fn get_memory_fd_khr(&self, _get_fd_info: &MemoryGetFdInfoKhr, _fd: &mut i32)
2051            -> VdResult<()> {
2052        // self.proc_addr_loader().
2053        //     vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd);
2054        unimplemented!();
2055    }
2056
2057    ///
2058    ///
2059    ///
2060    //
2061    // *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device,
2062    // VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd,
2063    // VkMemoryFdPropertiesKHR* pMemoryFdProperties);
2064    pub unsafe fn get_memory_fd_properties_khr(&self, _handle_type: ExternalMemoryHandleTypeFlagsKhr,
2065            _fd: i32) -> VdResult<()> {
2066        // self.proc_addr_loader().
2067        //     vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties);
2068        unimplemented!();
2069    }
2070
2071    ///
2072    ///
2073    ///
2074    //
2075    // *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const
2076    // VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
2077    pub unsafe fn import_semaphore_win32_handle_khr(&self,
2078            _import_semaphore_win32_handle_info: &ImportSemaphoreWin32HandleInfoKhr) -> VdResult<()> {
2079        // self.proc_addr_loader().
2080        //     vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
2081        unimplemented!();
2082    }
2083
2084    ///
2085    ///
2086    ///
2087    //
2088    // *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const
2089    // VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE*
2090    // pHandle);
2091    pub unsafe fn get_semaphore_win32_handle_khr(&self,
2092            _get_win32_handle_info: &SemaphoreGetWin32HandleInfoKhr) -> VdResult<()> {
2093        // self.proc_addr_loader().
2094        //     vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
2095        unimplemented!();
2096    }
2097
2098    ///
2099    ///
2100    ///
2101    //
2102    // *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const
2103    // VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
2104    pub unsafe fn import_semaphore_fd_khr(&self,
2105            _import_semaphore_fd_info: &ImportSemaphoreFdInfoKhr) -> VdResult<()> {
2106        // self.proc_addr_loader().
2107        //     vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
2108        unimplemented!();
2109    }
2110
2111    ///
2112    ///
2113    ///
2114    //
2115    // *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const
2116    // VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd);
2117    pub unsafe fn get_semaphore_fd_khr(&self, _get_fd_info: &SemaphoreGetFdInfoKhr)
2118            -> VdResult<()> {
2119        // self.proc_addr_loader().
2120        //     vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd);
2121        unimplemented!();
2122    }
2123
2124    ///
2125    ///
2126    ///
2127    //
2128    // *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer,
2129    // VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
2130    // uint32_t set, uint32_t descriptorWriteCount, const
2131    // VkWriteDescriptorSet* pDescriptorWrites);
2132    pub unsafe fn cmd_push_descriptor_set_khr<Cb>(&self, _command_buffer: Cb,
2133            _pipeline_bind_point: PipelineBindPoint, _layout: PipelineLayout, _set: u32,
2134            _descriptor_writes: &[WriteDescriptorSet]) -> VdResult<()>
2135            where Cb: Handle<Target=CommandBufferHandle> {
2136        // self.proc_addr_loader().
2137        //     vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites);
2138        unimplemented!();
2139    }
2140
2141    ///
2142    ///
2143    ///
2144    //
2145    // *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const
2146    // VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo, const
2147    // VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplateKHR*
2148    // pDescriptorUpdateTemplate);
2149    #[cfg(feature = "unimplemented")]
2150    pub unsafe fn create_descriptor_update_template_khr(&self,
2151            create_info: &DescriptorUpdateTemplateKhrCreateInfo,
2152            allocator: Option<*const vks::VkAllocationCallbacks>)
2153            -> VdResult<DescriptorUpdateTemplateKhrHandle> {
2154        let allocator = allocator.unwrap_or(ptr::null());
2155        let mut handle = 0;
2156        let result = self.proc_addr_loader().vk.vkCreateDescriptorUpdateTemplateKhr(self.handle().to_raw(),
2157            create_info.as_raw(), allocator, &mut handle);
2158        error::check(result, "vkCreateDescriptorUpdateTemplateKhr",
2159            DescriptorUpdateTemplateKhrHandle(handle))
2160    }
2161
2162    ///
2163    ///
2164    ///
2165    //
2166    // *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device,
2167    // VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const
2168    // VkAllocationCallbacks* pAllocator);
2169    #[cfg(feature = "unimplemented")]
2170    pub unsafe fn destroy_descriptor_update_template_khr(&self,
2171            descriptor_update_template_khr: DescriptorUpdateTemplateKhrHandle,
2172            allocator: Option<*const vks::VkAllocationCallbacks>) {
2173        let allocator = allocator.unwrap_or(ptr::null());
2174        self.proc_addr_loader().vk.vkDestroyDescriptorUpdateTemplateKhr(self.handle().to_raw(),
2175            descriptor_update_template_khr.to_raw(), allocator);
2176    }
2177
2178    ///
2179    ///
2180    ///
2181    //
2182    // *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device,
2183    // VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR
2184    // descriptorUpdateTemplate, const void* pData);
2185    #[cfg(feature = "unimplemented")]
2186    pub unsafe fn update_descriptor_set_with_template_khr<Ds>(&self, descriptor_set: Ds,
2187            descriptor_update_template: DescriptorUpdateTemplateKhrHandle, data: *const c_void)
2188            where Ds: Handle<Target=DescriptorSetHandle> {
2189        // self.proc_addr_loader().
2190        //     vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
2191        unimplemented!();
2192    }
2193
2194    ///
2195    ///
2196    ///
2197    //
2198    // *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer
2199    // commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
2200    // VkPipelineLayout layout, uint32_t set, const void* pData);
2201    #[cfg(feature = "unimplemented")]
2202    pub unsafe fn cmd_push_descriptor_set_with_template_khr<Cb, Pl>(&self, command_buffer: Cb,
2203            descriptor_update_template: DescriptorUpdateTemplateKhr, layout: Pl, set: u32,
2204            data: *const c_void) -> VdResult<()>
2205            where Cb: Handle<Target=CommandBufferHandle>, Pl: Handle<Target=PipelineLayoutHandle> {
2206        // self.proc_addr_loader().
2207        //     vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData);
2208        unimplemented!();
2209    }
2210
2211    ///
2212    ///
2213    ///
2214    //
2215    // *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR
2216    // swapchain);
2217    pub unsafe fn get_swapchain_status_khr<Sk>(&self, _swapchain: Sk) -> VdResult<()>
2218            where Sk: Handle<Target=SwapchainKhrHandle> {
2219        // self.proc_addr_loader().
2220        //     vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain);
2221        unimplemented!();
2222    }
2223
2224    ///
2225    ///
2226    ///
2227    //
2228    // *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const
2229    // VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
2230    pub unsafe fn import_fence_win32_handle_khr(&self,
2231            _import_fence_win32_handle_info: &ImportFenceWin32HandleInfoKhr) -> VdResult<()> {
2232        // self.proc_addr_loader().
2233        //     vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
2234        unimplemented!();
2235    }
2236
2237    ///
2238    ///
2239    ///
2240    //
2241    // *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const
2242    // VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
2243    pub unsafe fn get_fence_win32_handle_khr(&self,
2244            _get_win32_handle_info: &FenceGetWin32HandleInfoKhr) -> VdResult<()> {
2245        // self.proc_addr_loader().
2246        //     vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
2247        unimplemented!();
2248    }
2249
2250    ///
2251    ///
2252    ///
2253    //
2254    // *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR*
2255    // pImportFenceFdInfo);
2256    pub unsafe fn import_fence_fd_khr(&self, _import_fence_fd_info: &ImportFenceFdInfoKhr)
2257            -> VdResult<()> {
2258        // self.proc_addr_loader().
2259        //     vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
2260        unimplemented!();
2261    }
2262
2263    ///
2264    ///
2265    ///
2266    //
2267    // *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR*
2268    // pGetFdInfo, int* pFd);
2269    pub unsafe fn get_fence_fd_khr(&self, _get_fd_info: &FenceGetFdInfoKhr) -> VdResult<()> {
2270        // self.proc_addr_loader().
2271        //     vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd);
2272        unimplemented!();
2273    }
2274
2275    ///
2276    ///
2277    ///
2278    //
2279    // *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const
2280    // VkImageMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR*
2281    // pMemoryRequirements);
2282    pub unsafe fn get_image_memory_requirements_2_khr(&self,
2283            _info: &ImageMemoryRequirementsInfo2Khr) -> VdResult<()> {
2284        unimplemented!();
2285    }
2286
2287    ///
2288    ///
2289    ///
2290    //
2291    // *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const
2292    // VkBufferMemoryRequirementsInfo2KHR* pInfo, VkMemoryRequirements2KHR*
2293    // pMemoryRequirements);
2294    pub fn get_buffer_memory_requirements_2_khr(&self, _info: &BufferMemoryRequirementsInfo2Khr)
2295            -> VdResult<()> {
2296        unimplemented!();
2297    }
2298
2299    ///
2300    ///
2301    ///
2302    //
2303    // *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const
2304    // VkImageSparseMemoryRequirementsInfo2KHR* pInfo, uint32_t*
2305    // pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR*
2306    // pSparseMemoryRequirements);
2307    pub unsafe fn get_image_sparse_memory_requirements_2_khr(&self,
2308            _info: &ImageSparseMemoryRequirementsInfo2Khr) -> VdResult<()> {
2309        unimplemented!();
2310    }
2311
2312    ///
2313    ///
2314    ///
2315    //
2316    // *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const
2317    // VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo, const
2318    // VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversionKHR*
2319    // pYcbcrConversion);
2320    #[cfg(feature = "unimplemented")]
2321    pub unsafe fn create_sampler_ycbcr_conversion_khr(&self,
2322            create_info: &SamplerYcbcrConversionKhrCreateInfo,
2323            allocator: Option<*const vks::VkAllocationCallbacks>)
2324            -> VdResult<SamplerYcbcrConversionKhrHandle> {
2325        let allocator = allocator.unwrap_or(ptr::null());
2326        let mut handle = 0;
2327        let result = self.proc_addr_loader().vk.vkCreateSamplerYcbcrConversionKhr(
2328            self.handle().to_raw(), create_info.as_raw(), allocator, &mut handle);
2329        error::check(result, "vkCreateSamplerYcbcrConversionKhr",
2330            SamplerYcbcrConversionKhrHandle(handle))
2331    }
2332
2333    ///
2334    ///
2335    ///
2336    //
2337    // *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device,
2338    // VkSamplerYcbcrConversionKHR ycbcrConversion, const
2339    // VkAllocationCallbacks* pAllocator);
2340    #[cfg(feature = "unimplemented")]
2341    pub unsafe fn destroy_sampler_ycbcr_conversion_khr(&self,
2342            sampler_ycbcr_conversion_khr: SamplerYcbcrConversionKhrHandle,
2343            allocator: Option<*const vks::VkAllocationCallbacks>) {
2344        let allocator = allocator.unwrap_or(ptr::null());
2345        self.proc_addr_loader().vk.vkDestroySamplerYcbcrConversionKhr(self.handle().to_raw(),
2346            sampler_ycbcr_conversion_khr.to_raw(), allocator);
2347    }
2348
2349    ///
2350    ///
2351    ///
2352    //
2353    // *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount,
2354    // const VkBindBufferMemoryInfoKHR* pBindInfos);
2355    pub unsafe fn bind_buffer_memory_2_khr(&self) {
2356        unimplemented!();
2357    }
2358
2359    ///
2360    ///
2361    ///
2362    //
2363    // *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount,
2364    // const VkBindImageMemoryInfoKHR* pBindInfos);
2365    pub unsafe fn bind_image_memory_2_khr(&self) {
2366        unimplemented!();
2367    }
2368
2369    ///
2370    ///
2371    ///
2372    //
2373    // *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const
2374    // VkDebugMarkerObjectTagInfoEXT* pTagInfo);
2375    pub unsafe fn debug_marker_set_object_tag_ext(&self, _tag_info: &DebugMarkerObjectTagInfoExt)
2376            -> VdResult<()> {
2377        unimplemented!();
2378    }
2379
2380    ///
2381    ///
2382    ///
2383    //
2384    // *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const
2385    // VkDebugMarkerObjectNameInfoEXT* pNameInfo);
2386    pub unsafe fn debug_marker_set_object_name_ext(&self, _name_info: &DebugMarkerObjectNameInfoExt)
2387            -> VdResult<()> {
2388        unimplemented!();
2389    }
2390
2391    ///
2392    ///
2393    ///
2394    //
2395    // *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const
2396    // VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
2397    pub unsafe fn cmd_debug_marker_begin_ext(&self, command_buffer: CommandBufferHandle,
2398            marker_info: &DebugMarkerMarkerInfoExt) {
2399        self.proc_addr_loader().ext_debug_marker.vkCmdDebugMarkerBeginEXT(command_buffer.to_raw(),
2400            marker_info.as_raw());
2401    }
2402
2403    ///
2404    ///
2405    ///
2406    //
2407    // *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer);
2408    pub unsafe fn cmd_debug_marker_end_ext(&self, command_buffer: CommandBufferHandle) {
2409        self.proc_addr_loader().ext_debug_marker.vkCmdDebugMarkerEndEXT(command_buffer.to_raw());
2410    }
2411
2412    ///
2413    ///
2414    ///
2415    //
2416    // *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const
2417    // VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
2418    pub unsafe fn cmd_debug_marker_insert_ext(&self, command_buffer: CommandBufferHandle,
2419            marker_info: &DebugMarkerMarkerInfoExt) {
2420        self.proc_addr_loader().ext_debug_marker.vkCmdDebugMarkerInsertEXT(command_buffer.to_raw(),
2421            marker_info.as_raw());
2422    }
2423
2424    ///
2425    ///
2426    ///
2427    //
2428    // *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer
2429    // buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize
2430    // countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
2431    pub unsafe fn cmd_draw_indirect_count_amd(&self) {
2432        unimplemented!();
2433    }
2434
2435    ///
2436    ///
2437    ///
2438    //
2439    // *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer,
2440    // VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer,
2441    // VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t
2442    // stride);
2443    pub unsafe fn cmd_draw_indexed_indirect_count_amd(&self) {
2444        unimplemented!();
2445    }
2446
2447    ///
2448    ///
2449    ///
2450    //
2451    // *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory,
2452    // VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle);
2453    pub unsafe fn get_memory_win32_handle_nv(&self) {
2454        unimplemented!();
2455    }
2456
2457    ///
2458    ///
2459    ///
2460    //
2461    // *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHX)(VkDevice device, uint32_t
2462    // heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex,
2463    // VkPeerMemoryFeatureFlagsKHX* pPeerMemoryFeatures);
2464    pub unsafe fn get_device_group_peer_memory_features_khx(&self) {
2465        unimplemented!();
2466    }
2467
2468    ///
2469    ///
2470    ///
2471    //
2472    // *PFN_vkCmdSetDeviceMaskKHX)(VkCommandBuffer commandBuffer, uint32_t
2473    // deviceMask);
2474    pub unsafe fn cmd_set_device_mask_khx(&self) {
2475        unimplemented!();
2476    }
2477
2478    ///
2479    ///
2480    ///
2481    //
2482    // *PFN_vkCmdDispatchBaseKHX)(VkCommandBuffer commandBuffer, uint32_t
2483    // baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t
2484    // groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
2485    pub unsafe fn cmd_dispatch_base_khx(&self) {
2486        unimplemented!();
2487    }
2488
2489    ///
2490    ///
2491    ///
2492    //
2493    // *PFN_vkGetDeviceGroupPresentCapabilitiesKHX)(VkDevice device,
2494    // VkDeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities);
2495    pub unsafe fn get_device_group_present_capabilities_khx(&self) {
2496        unimplemented!();
2497    }
2498
2499    ///
2500    ///
2501    ///
2502    //
2503    // *PFN_vkGetDeviceGroupSurfacePresentModesKHX)(VkDevice device,
2504    // VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHX* pModes);
2505    pub unsafe fn get_device_group_surface_present_modes_khx(&self) {
2506        unimplemented!();
2507    }
2508
2509    ///
2510    ///
2511    ///
2512    //
2513    // *PFN_vkAcquireNextImage2KHX)(VkDevice device, const
2514    // VkAcquireNextImageInfoKHX* pAcquireInfo, uint32_t* pImageIndex);
2515    pub unsafe fn acquire_next_image2_khx(&self) {
2516        unimplemented!();
2517    }
2518
2519    ///
2520    ///
2521    ///
2522    //
2523    // *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const
2524    // VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
2525    pub unsafe fn cmd_process_commands_nvx(&self) {
2526        unimplemented!();
2527    }
2528
2529    ///
2530    ///
2531    ///
2532    //
2533    // *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer,
2534    // const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
2535    pub unsafe fn cmd_reserve_space_for_commands_nvx(&self) {
2536        unimplemented!();
2537    }
2538
2539    ///
2540    ///
2541    ///
2542    //
2543    // *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const
2544    // VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const
2545    // VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX*
2546    // pIndirectCommandsLayout);
2547    #[cfg(feature = "unimplemented")]
2548    pub unsafe fn create_indirect_commands_layout_nvx(&self,
2549            create_info: &IndirectCommandsLayoutNvxCreateInfo,
2550            allocator: Option<*const vks::VkAllocationCallbacks>)
2551            -> VdResult<IndirectCommandsLayoutNvxHandle> {
2552        let allocator = allocator.unwrap_or(ptr::null());
2553        let mut handle = 0;
2554        let result = self.proc_addr_loader().vk.vkCreateIndirectCommandsLayoutNvx(
2555            self.handle().to_raw(), create_info.as_raw(), allocator, &mut handle);
2556        error::check(result, "vkCreateIndirectCommandsLayoutNvx",
2557            IndirectCommandsLayoutNvxHandle(handle))
2558    }
2559
2560    ///
2561    ///
2562    ///
2563    //
2564    // *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device,
2565    // VkIndirectCommandsLayoutNVX indirectCommandsLayout, const
2566    // VkAllocationCallbacks* pAllocator);
2567    #[cfg(feature = "unimplemented")]
2568    pub unsafe fn destroy_indirect_commands_layout_nvx(&self,
2569            indirect_commands_layout_nvx: IndirectCommandsLayoutNvxHandle,
2570            allocator: Option<*const vks::VkAllocationCallbacks>) {
2571        let allocator = allocator.unwrap_or(ptr::null());
2572        self.proc_addr_loader().vk.vkDestroyIndirectCommandsLayoutNvx(self.handle().to_raw(),
2573            indirect_commands_layout_nvx.to_raw(), allocator);
2574    }
2575
2576    ///
2577    ///
2578    ///
2579    //
2580    // *PFN_vkCreateObjectTableNVX)(VkDevice device, const
2581    // VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks*
2582    // pAllocator, VkObjectTableNVX* pObjectTable);
2583    #[cfg(feature = "unimplemented")]
2584    pub unsafe fn create_object_table_nvx(&self, create_info: &ObjectTableNvxCreateInfo,
2585            allocator: Option<*const vks::VkAllocationCallbacks>)
2586            -> VdResult<ObjectTableNvxHandle> {
2587        let allocator = allocator.unwrap_or(ptr::null());
2588        let mut handle = 0;
2589        let result = self.proc_addr_loader().vk.vkCreateObjectTableNvx(self.handle().to_raw(),
2590            create_info.as_raw(), allocator, &mut handle);
2591        error::check(result, "vkCreateObjectTableNvx", ObjectTableNvxHandle(handle))
2592    }
2593
2594    ///
2595    ///
2596    ///
2597    //
2598    // *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX
2599    // objectTable, const VkAllocationCallbacks* pAllocator);
2600    #[cfg(feature = "unimplemented")]
2601    pub unsafe fn destroy_object_table_nvx(&self, object_table_nvx: ObjectTableNvxHandle,
2602            allocator: Option<*const vks::VkAllocationCallbacks>) {
2603        let allocator = allocator.unwrap_or(ptr::null());
2604        self.proc_addr_loader().vk.vkDestroyObjectTableNvx(self.handle().to_raw(),
2605            object_table_nvx.to_raw(), allocator);
2606    }
2607
2608    ///
2609    ///
2610    ///
2611    //
2612    // *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX
2613    // objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*
2614    // ppObjectTableEntries, const uint32_t* pObjectIndices);
2615    pub unsafe fn register_objects_nvx(&self) {
2616        unimplemented!();
2617    }
2618
2619    ///
2620    ///
2621    ///
2622    //
2623    // *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX
2624    // objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX*
2625    // pObjectEntryTypes, const uint32_t* pObjectIndices);
2626    pub unsafe fn unregister_objects_nvx(&self) {
2627        unimplemented!();
2628    }
2629
2630    ///
2631    ///
2632    ///
2633    //
2634    // *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer,
2635    // uint32_t firstViewport, uint32_t viewportCount, const
2636    // VkViewportWScalingNV* pViewportWScalings);
2637    pub unsafe fn cmd_set_viewport_w_scaling_nv(&self) {
2638        unimplemented!();
2639    }
2640
2641    ///
2642    ///
2643    ///
2644    //
2645    // *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display,
2646    // const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
2647    pub unsafe fn display_power_control_ext<Dk>(&self, _display: Dk,
2648            _display_power_info: &DisplayPowerInfoExt)
2649            where Dk: Handle<Target=DisplayKhrHandle> {
2650        unimplemented!();
2651    }
2652
2653    ///
2654    ///
2655    ///
2656    //
2657    // *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const
2658    // VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks*
2659    // pAllocator, VkFence* pFence);
2660    pub unsafe fn register_device_event_ext(&self, _device_event_info: &DeviceEventInfoExt,
2661            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<()> {
2662        let _allocator = allocator.unwrap_or(ptr::null());
2663        unimplemented!();
2664    }
2665
2666    ///
2667    ///
2668    ///
2669    //
2670    // *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display,
2671    // const VkDisplayEventInfoEXT* pDisplayEventInfo, const
2672    // VkAllocationCallbacks* pAllocator, VkFence* pFence);
2673    pub unsafe fn register_display_event_ext<Dk>(&self, _display: Dk,
2674            _display_event_info: &DisplayEventInfoExt,
2675            allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<()>
2676            where Dk: Handle<Target=DisplayKhrHandle> {
2677        let _allocator = allocator.unwrap_or(ptr::null());
2678        unimplemented!();
2679    }
2680
2681    ///
2682    ///
2683    ///
2684    //
2685    // *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR
2686    // swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t*
2687    // pCounterValue);
2688    pub unsafe fn get_swapchain_counter_ext<Sk>(&self, _swapchain: Sk,
2689            _counter: SurfaceCounterFlagsExt) -> VdResult<u64>
2690            where Sk: Handle<Target=SwapchainKhrHandle> {
2691        unimplemented!();
2692    }
2693
2694    ///
2695    ///
2696    ///
2697    //
2698    // *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR
2699    // swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
2700    pub unsafe fn get_refresh_cycle_duration_google(&self) {
2701        unimplemented!();
2702    }
2703
2704    ///
2705    ///
2706    ///
2707    //
2708    // *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR
2709    // swapchain, uint32_t* pPresentationTimingCount,
2710    // VkPastPresentationTimingGOOGLE* pPresentationTimings);
2711    pub unsafe fn get_past_presentation_timing_google(&self) {
2712        unimplemented!();
2713    }
2714
2715    ///
2716    ///
2717    ///
2718    //
2719    // *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer,
2720    // uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const
2721    // VkRect2D* pDiscardRectangles);
2722    pub unsafe fn cmd_set_discard_rectangle_ext<Cb>(&self, _command_buffer: Cb,
2723            _first_discard_rectangle: u32, _discard_rectangle_count: u32, _discard_rectangles: &Rect2d)
2724            -> VdResult<()>
2725            where Cb: Handle<Target=CommandBufferHandle> {
2726        unimplemented!();
2727    }
2728
2729    ///
2730    ///
2731    ///
2732    //
2733    // *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount,
2734    // const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata);
2735    pub unsafe fn set_hdr_metadata_ext(&self, _swapchains: &[SwapchainKhrHandle],
2736            _metadata: &HdrMetadataExt) -> VdResult<()> {
2737        unimplemented!();
2738    }
2739
2740    ///
2741    ///
2742    ///
2743    //
2744    // *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const
2745    // VkSampleLocationsInfoEXT* pSampleLocationsInfo);
2746    #[cfg(feature = "unimplemented")]
2747    pub unsafe fn cmd_set_sample_locations_ext<Cb>(&self, command_buffer: Cb,
2748            sample_locations_info: &SampleLocationsInfoExt) -> VdResult<()>
2749            where Cb: Handle<Target=CommandBufferHandle> {
2750        unimplemented!();
2751    }
2752
2753    ///
2754    ///
2755    ///
2756    //
2757    // *PFN_vkCreateValidationCacheEXT)(VkDevice device, const
2758    // VkValidationCacheCreateInfoEXT* pCreateInfo, const
2759    // VkAllocationCallbacks* pAllocator, VkValidationCacheEXT*
2760    // pValidationCache);
2761    #[cfg(feature = "unimplemented")]
2762    pub unsafe fn create_validation_cache_ext(&self,
2763            create_info: &ValidationCacheExtCreateInfo,
2764            allocator: Option<*const vks::VkAllocationCallbacks>)
2765            -> VdResult<ValidationCacheExtHandle> {
2766        let allocator = allocator.unwrap_or(ptr::null());
2767        let mut handle = 0;
2768        let result = self.proc_addr_loader().vk.vkCreateValidationCacheExt(self.handle().to_raw(),
2769            create_info.as_raw(), allocator, &mut handle);
2770        // Ok(ValidationCacheExtHandle(handle))
2771        error::check(result, "vkCreateValidationCacheExt", ValidationCacheExtHandle(handle))
2772    }
2773
2774    ///
2775    ///
2776    ///
2777    //
2778    // *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT
2779    // validationCache, const VkAllocationCallbacks* pAllocator);
2780    #[cfg(feature = "unimplemented")]
2781    pub unsafe fn destroy_validation_cache_ext(&self,
2782            validation_cache_ext: ValidationCacheExtHandle,
2783            allocator: Option<*const vks::VkAllocationCallbacks>) {
2784        let allocator = allocator.unwrap_or(ptr::null());
2785        self.proc_addr_loader().vk.vkDestroyValidationCacheExt(self.handle().to_raw(),
2786            validation_cache_ext.to_raw(), allocator);
2787    }
2788
2789    ///
2790    ///
2791    ///
2792    //
2793    // *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT
2794    // dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT*
2795    // pSrcCaches);
2796    #[cfg(feature = "unimplemented")]
2797    pub unsafe fn merge_validation_caches_ext(&self, dst_cache: ValidationCacheExt,
2798            src_caches: &[ValidationCacheExt]) -> VdResult<()> {
2799        unimplemented!();
2800    }
2801
2802    ///
2803    ///
2804    ///
2805    //
2806    // *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT
2807    // validationCache, size_t* pDataSize, void* pData);
2808    #[cfg(feature = "unimplemented")]
2809    pub unsafe fn get_validation_cache_data_ext(&self, validation_cache: ValidationCacheExt,
2810            data_size: *mut usize, data: *mut c_void) -> VdResult<()> {
2811        unimplemented!();
2812    }
2813}
2814
2815unsafe impl<'h> Handle for &'h Device {
2816    type Target = DeviceHandle;
2817
2818    #[inline(always)]
2819    fn handle(&self) -> Self::Target {
2820        self.inner.handle
2821    }
2822}
2823
2824unsafe impl Send for Device {}
2825unsafe impl Sync for Device {}
2826
2827
2828/// A builder for `Device`.
2829#[derive(Debug, Clone)]
2830pub struct DeviceBuilder<'db> {
2831    create_info: ::DeviceCreateInfo<'db>,
2832    enabled_layer_names: Option<CharStrs<'db>>,
2833    enabled_extension_names: Option<CharStrs<'db>>,
2834    _p: PhantomData<&'db ()>,
2835}
2836
2837impl<'db> DeviceBuilder<'db> {
2838    /// Returns a new instance builder.
2839    pub fn new() -> DeviceBuilder<'db> {
2840        DeviceBuilder {
2841            create_info: ::DeviceCreateInfo::default(),
2842            enabled_layer_names: None,
2843            enabled_extension_names: None,
2844            _p: PhantomData,
2845        }
2846    }
2847
2848    /// Specifies the list of VkDeviceQueueCreateInfo structures describing
2849    /// the queues that are requested to be created along with the logical
2850    /// device.
2851    pub fn queue_create_infos<'s, 'ci>(&'s mut self,
2852            queue_create_infos: &'ci [DeviceQueueCreateInfo])
2853            -> &'s mut DeviceBuilder<'db>
2854            where 'ci: 'db {
2855        debug_assert_eq!(mem::align_of::<DeviceQueueCreateInfo>(),
2856            mem::align_of::<vks::VkDeviceQueueCreateInfo>());
2857        debug_assert_eq!(mem::size_of::<DeviceQueueCreateInfo>(),
2858            mem::size_of::<vks::VkDeviceQueueCreateInfo>());
2859        self.create_info.set_queue_create_infos(queue_create_infos);
2860        self
2861    }
2862
2863    /// Specifies the layer names to enable.
2864    ///
2865    /// Ignored.
2866    #[deprecated(note = "ignored by Vulkan API")]
2867    pub fn enabled_layer_names<'s, 'cs, Cs>(&'s mut self, enabled_layer_names: Cs)
2868            -> &'s mut DeviceBuilder<'db>
2869            where 'cs: 'db, Cs: 'cs + Into<CharStrs<'cs>> {
2870        self.enabled_layer_names = Some(enabled_layer_names.into());
2871        if let Some(ref elns) = self.enabled_layer_names {
2872            self.create_info.set_enabled_layer_names(elns.as_ptr_slice());
2873        }
2874        self
2875    }
2876
2877    /// Specifies the list of names of extensions to enable for the created
2878    /// device.
2879    pub fn enabled_extension_names<'s, 'cs, Cs>(&'s mut self, enabled_extension_names: Cs)
2880            -> &'s mut DeviceBuilder<'db>
2881            where 'cs: 'db, Cs: 'cs + Into<CharStrs<'cs>> {
2882        self.enabled_extension_names = Some(enabled_extension_names.into());
2883        if let Some(ref eens) = self.enabled_extension_names {
2884            self.create_info.set_enabled_extension_names(eens.as_ptr_slice());
2885        }
2886        self
2887    }
2888
2889    /// Specifies the structure that contains boolean indicators of all the
2890    /// features to be enabled.
2891    pub fn enabled_features<'s, 'f>(&'s mut self, enabled_features: &'f PhysicalDeviceFeatures)
2892            -> &'s mut DeviceBuilder<'db>
2893            where 'f: 'db {
2894        self.create_info.set_enabled_features(enabled_features);
2895        self
2896    }
2897
2898    /// Builds and returns a new `Device`.
2899    pub fn build(&self, physical_device: PhysicalDevice) -> VdResult<Device> {
2900        let handle = unsafe {
2901            physical_device.instance().create_device(physical_device.handle(), &self.create_info, None)?
2902        };
2903
2904        let mut loader = vks::DeviceProcAddrLoader::from_get_device_proc_addr(
2905            physical_device.instance().proc_addr_loader().vk.pfn_vkGetDeviceProcAddr);
2906
2907        unsafe {
2908            loader.load_vk(handle.to_raw());
2909        }
2910
2911        unsafe {
2912            if let Some(extension_name_char_strs) = self.enabled_extension_names.as_ref() {
2913                let extension_names = extension_name_char_strs.as_ptr_slice();
2914                for &extension_name in extension_names {
2915                    match CStr::from_ptr(extension_name).to_str().expect("invalid extension name") {
2916                        #[cfg(feature = "unimplemented")]
2917                        "VK_KHR_16bit_storage" => loader.load_khr_16bit_storage(handle.to_raw()),
2918                        #[cfg(feature = "unimplemented")]
2919                        "VK_KHR_android_surface" => loader.load_khr_android_surface(handle.to_raw()),
2920                        #[cfg(feature = "unimplemented")]
2921                        "VK_KHR_bind_memory2" => loader.load_khr_bind_memory2(handle.to_raw()),
2922                        #[cfg(feature = "unimplemented")]
2923                        "VK_KHR_dedicated_allocation" => loader.load_khr_dedicated_allocation(handle.to_raw()),
2924                        #[cfg(feature = "unimplemented")]
2925                        "VK_KHR_descriptor_update_template" => loader.load_khr_descriptor_update_template(handle.to_raw()),
2926                        #[cfg(feature = "unimplemented")]
2927                        "VK_KHR_display" => loader.load_khr_display(handle.to_raw()),
2928                        "VK_KHR_display_swapchain" => loader.load_khr_display_swapchain(handle.to_raw()),
2929                        #[cfg(feature = "unimplemented")]
2930                        "VK_KHR_external_fence" => loader.load_khr_external_fence(handle.to_raw()),
2931                        #[cfg(feature = "unimplemented")]
2932                        "VK_KHR_external_fence_capabilities" => loader.load_khr_external_fence_capabilities(handle.to_raw()),
2933                        "VK_KHR_external_fence_fd" => loader.load_khr_external_fence_fd(handle.to_raw()),
2934                        "VK_KHR_external_fence_win32" => loader.load_khr_external_fence_win32(handle.to_raw()),
2935                        #[cfg(feature = "unimplemented")]
2936                        "VK_KHR_external_memory" => loader.load_khr_external_memory(handle.to_raw()),
2937                        #[cfg(feature = "unimplemented")]
2938                        "VK_KHR_external_memory_capabilities" => loader.load_khr_external_memory_capabilities(handle.to_raw()),
2939                        "VK_KHR_external_memory_fd" => loader.load_khr_external_memory_fd(handle.to_raw()),
2940                        "VK_KHR_external_memory_win32" => loader.load_khr_external_memory_win32(handle.to_raw()),
2941                        #[cfg(feature = "unimplemented")]
2942                        "VK_KHR_external_semaphore" => loader.load_khr_external_semaphore(handle.to_raw()),
2943                        #[cfg(feature = "unimplemented")]
2944                        "VK_KHR_external_semaphore_capabilities" => loader.load_khr_external_semaphore_capabilities(handle.to_raw()),
2945                        "VK_KHR_external_semaphore_fd" => loader.load_khr_external_semaphore_fd(handle.to_raw()),
2946                        "VK_KHR_external_semaphore_win32" => loader.load_khr_external_semaphore_win32(handle.to_raw()),
2947                        "VK_KHR_get_memory_requirements2" => loader.load_khr_get_memory_requirements2(handle.to_raw()),
2948                        #[cfg(feature = "unimplemented")]
2949                        "VK_KHR_get_physical_device_properties2" => loader.load_khr_get_physical_device_properties2(handle.to_raw()),
2950                        #[cfg(feature = "unimplemented")]
2951                        "VK_KHR_get_surface_capabilities2" => loader.load_khr_get_surface_capabilities2(handle.to_raw()),
2952                        #[cfg(feature = "unimplemented")]
2953                        "VK_KHR_image_format_list" => loader.load_khr_image_format_list(handle.to_raw()),
2954                        #[cfg(feature = "unimplemented")]
2955                        "VK_KHR_incremental_present" => loader.load_khr_incremental_present(handle.to_raw()),
2956                        "VK_KHR_maintenance1" => loader.load_khr_maintenance1(handle.to_raw()),
2957                        #[cfg(feature = "unimplemented")]
2958                        "VK_KHR_maintenance2" => loader.load_khr_maintenance2(handle.to_raw()),
2959                        #[cfg(feature = "unimplemented")]
2960                        "VK_KHR_mir_surface" => loader.load_khr_mir_surface(handle.to_raw()),
2961                        "VK_KHR_push_descriptor" => loader.load_khr_push_descriptor(handle.to_raw()),
2962                        #[cfg(feature = "unimplemented")]
2963                        "VK_KHR_relaxed_block_layout" => loader.load_khr_relaxed_block_layout(handle.to_raw()),
2964                        #[cfg(feature = "unimplemented")]
2965                        "VK_KHR_sampler_mirror_clamp_to_edge" => loader.load_khr_sampler_mirror_clamp_to_edge(handle.to_raw()),
2966                        #[cfg(feature = "unimplemented")]
2967                        "VK_KHR_sampler_ycbcr_conversion" => loader.load_khr_sampler_ycbcr_conversion(handle.to_raw()),
2968                        #[cfg(feature = "unimplemented")]
2969                        "VK_KHR_shader_draw_parameters" => loader.load_khr_shader_draw_parameters(handle.to_raw()),
2970                        "VK_KHR_shared_presentable_image" => loader.load_khr_shared_presentable_image(handle.to_raw()),
2971                        #[cfg(feature = "unimplemented")]
2972                        "VK_KHR_storage_buffer_storage_class" => loader.load_khr_storage_buffer_storage_class(handle.to_raw()),
2973                        #[cfg(feature = "unimplemented")]
2974                        "VK_KHR_surface" => loader.load_khr_surface(handle.to_raw()),
2975                        "VK_KHR_swapchain" => loader.load_khr_swapchain(handle.to_raw()),
2976                        #[cfg(feature = "unimplemented")]
2977                        "VK_KHR_variable_pointers" => loader.load_khr_variable_pointers(handle.to_raw()),
2978                        #[cfg(feature = "unimplemented")]
2979                        "VK_KHR_wayland_surface" => loader.load_khr_wayland_surface(handle.to_raw()),
2980                        #[cfg(feature = "unimplemented")]
2981                        "VK_KHR_win32_keyed_mutex" => loader.load_khr_win32_keyed_mutex(handle.to_raw()),
2982                        #[cfg(feature = "unimplemented")]
2983                        "VK_KHR_win32_surface" => loader.load_khr_win32_surface(handle.to_raw()),
2984                        #[cfg(feature = "unimplemented")]
2985                        "VK_KHR_xcb_surface" => loader.load_khr_xcb_surface(handle.to_raw()),
2986                        #[cfg(feature = "unimplemented")]
2987                        "VK_KHR_xlib_surface" => loader.load_khr_xlib_surface(handle.to_raw()),
2988                        #[cfg(feature = "unimplemented")]
2989                        "VK_EXT_acquire_xlib_display" => loader.load_ext_acquire_xlib_display(handle.to_raw()),
2990                        #[cfg(feature = "unimplemented")]
2991                        "VK_EXT_blend_operation_advanced" => loader.load_ext_blend_operation_advanced(handle.to_raw()),
2992                        "VK_EXT_debug_marker" => loader.load_ext_debug_marker(handle.to_raw()),
2993                        #[cfg(feature = "unimplemented")]
2994                        "VK_EXT_debug_report" => loader.load_ext_debug_report(handle.to_raw()),
2995                        #[cfg(feature = "unimplemented")]
2996                        "VK_EXT_depth_range_unrestricted" => loader.load_ext_depth_range_unrestricted(handle.to_raw()),
2997                        #[cfg(feature = "unimplemented")]
2998                        "VK_EXT_direct_mode_display" => loader.load_ext_direct_mode_display(handle.to_raw()),
2999                        "VK_EXT_discard_rectangles" => loader.load_ext_discard_rectangles(handle.to_raw()),
3000                        "VK_EXT_display_control" => loader.load_ext_display_control(handle.to_raw()),
3001                        #[cfg(feature = "unimplemented")]
3002                        "VK_EXT_display_surface_counter" => loader.load_ext_display_surface_counter(handle.to_raw()),
3003                        "VK_EXT_hdr_metadata" => loader.load_ext_hdr_metadata(handle.to_raw()),
3004                        #[cfg(feature = "unimplemented")]
3005                        "VK_EXT_post_depth_coverage" => loader.load_ext_post_depth_coverage(handle.to_raw()),
3006                        #[cfg(feature = "unimplemented")]
3007                        "VK_EXT_sample_locations" => loader.load_ext_sample_locations(handle.to_raw()),
3008                        #[cfg(feature = "unimplemented")]
3009                        "VK_EXT_sampler_filter_minmax" => loader.load_ext_sampler_filter_minmax(handle.to_raw()),
3010                        #[cfg(feature = "unimplemented")]
3011                        "VK_EXT_shader_stencil_export" => loader.load_ext_shader_stencil_export(handle.to_raw()),
3012                        #[cfg(feature = "unimplemented")]
3013                        "VK_EXT_shader_subgroup_ballot" => loader.load_ext_shader_subgroup_ballot(handle.to_raw()),
3014                        #[cfg(feature = "unimplemented")]
3015                        "VK_EXT_shader_subgroup_vote" => loader.load_ext_shader_subgroup_vote(handle.to_raw()),
3016                        #[cfg(feature = "unimplemented")]
3017                        "VK_EXT_shader_viewport_index_layer" => loader.load_ext_shader_viewport_index_layer(handle.to_raw()),
3018                        #[cfg(feature = "unimplemented")]
3019                        "VK_EXT_swapchain_colorspace" => loader.load_ext_swapchain_colorspace(handle.to_raw()),
3020                        #[cfg(feature = "unimplemented")]
3021                        "VK_EXT_validation_cache" => loader.load_ext_validation_cache(handle.to_raw()),
3022                        #[cfg(feature = "unimplemented")]
3023                        "VK_EXT_validation_flags" => loader.load_ext_validation_flags(handle.to_raw()),
3024                        "VK_AMD_draw_indirect_count" => loader.load_amd_draw_indirect_count(handle.to_raw()),
3025                        #[cfg(feature = "unimplemented")]
3026                        "VK_AMD_gcn_shader" => loader.load_amd_gcn_shader(handle.to_raw()),
3027                        #[cfg(feature = "unimplemented")]
3028                        "VK_AMD_gpu_shader_half_float" => loader.load_amd_gpu_shader_half_float(handle.to_raw()),
3029                        #[cfg(feature = "unimplemented")]
3030                        "VK_AMD_gpu_shader_int16" => loader.load_amd_gpu_shader_int16(handle.to_raw()),
3031                        #[cfg(feature = "unimplemented")]
3032                        "VK_AMD_mixed_attachment_samples" => loader.load_amd_mixed_attachment_samples(handle.to_raw()),
3033                        #[cfg(feature = "unimplemented")]
3034                        "VK_AMD_negative_viewport_height" => loader.load_amd_negative_viewport_height(handle.to_raw()),
3035                        #[cfg(feature = "unimplemented")]
3036                        "VK_AMD_rasterization_order" => loader.load_amd_rasterization_order(handle.to_raw()),
3037                        #[cfg(feature = "unimplemented")]
3038                        "VK_AMD_shader_ballot" => loader.load_amd_shader_ballot(handle.to_raw()),
3039                        #[cfg(feature = "unimplemented")]
3040                        "VK_AMD_shader_explicit_vertex_parameter" => loader.load_amd_shader_explicit_vertex_parameter(handle.to_raw()),
3041                        #[cfg(feature = "unimplemented")]
3042                        "VK_AMD_shader_fragment_mask" => loader.load_amd_shader_fragment_mask(handle.to_raw()),
3043                        #[cfg(feature = "unimplemented")]
3044                        "VK_AMD_shader_image_load_store_lod" => loader.load_amd_shader_image_load_store_lod(handle.to_raw()),
3045                        #[cfg(feature = "unimplemented")]
3046                        "VK_AMD_shader_trinary_minmax" => loader.load_amd_shader_trinary_minmax(handle.to_raw()),
3047                        #[cfg(feature = "unimplemented")]
3048                        "VK_AMD_texture_gather_bias_lod" => loader.load_amd_texture_gather_bias_lod(handle.to_raw()),
3049                        "VK_GOOGLE_display_timing" => loader.load_google_display_timing(handle.to_raw()),
3050                        #[cfg(feature = "unimplemented")]
3051                        "VK_IMG_filter_cubic" => loader.load_img_filter_cubic(handle.to_raw()),
3052                        #[cfg(feature = "unimplemented")]
3053                        "VK_IMG_format_pvrtc" => loader.load_img_format_pvrtc(handle.to_raw()),
3054                        #[cfg(feature = "unimplemented")]
3055                        "VK_KHX_device_group" => loader.load_khx_device_group(handle.to_raw()),
3056                        #[cfg(feature = "unimplemented")]
3057                        "VK_KHX_device_group_creation" => loader.load_khx_device_group_creation(handle.to_raw()),
3058                        #[cfg(feature = "unimplemented")]
3059                        "VK_KHX_multiview" => loader.load_khx_multiview(handle.to_raw()),
3060                        #[cfg(feature = "unimplemented")]
3061                        "VK_MVK_ios_surface" => loader.load_mvk_ios_surface(handle.to_raw()),
3062                        #[cfg(feature = "unimplemented")]
3063                        "VK_MVK_macos_surface" => loader.load_mvk_macos_surface(handle.to_raw()),
3064                        #[cfg(feature = "unimplemented")]
3065                        "VK_NN_vi_surface" => loader.load_nn_vi_surface(handle.to_raw()),
3066                        "VK_NV_clip_space_w_scaling" => loader.load_nv_clip_space_w_scaling(handle.to_raw()),
3067                        #[cfg(feature = "unimplemented")]
3068                        "VK_NV_dedicated_allocation" => loader.load_nv_dedicated_allocation(handle.to_raw()),
3069                        #[cfg(feature = "unimplemented")]
3070                        "VK_NV_external_memory" => loader.load_nv_external_memory(handle.to_raw()),
3071                        #[cfg(feature = "unimplemented")]
3072                        "VK_NV_external_memory_capabilities" => loader.load_nv_external_memory_capabilities(handle.to_raw()),
3073                        "VK_NV_external_memory_win32" => loader.load_nv_external_memory_win32(handle.to_raw()),
3074                        #[cfg(feature = "unimplemented")]
3075                        "VK_NV_fill_rectangle" => loader.load_nv_fill_rectangle(handle.to_raw()),
3076                        #[cfg(feature = "unimplemented")]
3077                        "VK_NV_fragment_coverage_to_color" => loader.load_nv_fragment_coverage_to_color(handle.to_raw()),
3078                        #[cfg(feature = "unimplemented")]
3079                        "VK_NV_framebuffer_mixed_samples" => loader.load_nv_framebuffer_mixed_samples(handle.to_raw()),
3080                        #[cfg(feature = "unimplemented")]
3081                        "VK_NV_geometry_shader_passthrough" => loader.load_nv_geometry_shader_passthrough(handle.to_raw()),
3082                        #[cfg(feature = "unimplemented")]
3083                        "VK_NV_glsl_shader" => loader.load_nv_glsl_shader(handle.to_raw()),
3084                        #[cfg(feature = "unimplemented")]
3085                        "VK_NV_sample_mask_override_coverage" => loader.load_nv_sample_mask_override_coverage(handle.to_raw()),
3086                        #[cfg(feature = "unimplemented")]
3087                        "VK_NV_viewport_array2" => loader.load_nv_viewport_array2(handle.to_raw()),
3088                        #[cfg(feature = "unimplemented")]
3089                        "VK_NV_viewport_swizzle" => loader.load_nv_viewport_swizzle(handle.to_raw()),
3090                        #[cfg(feature = "unimplemented")]
3091                        "VK_NV_win32_keyed_mutex" => loader.load_nv_win32_keyed_mutex(handle.to_raw()),
3092                        #[cfg(feature = "unimplemented")]
3093                        "VK_NVX_device_generated_commands" => loader.load_nvx_device_generated_commands(handle.to_raw()),
3094                        #[cfg(feature = "unimplemented")]
3095                        "VK_NVX_multiview_per_view_attributes" => loader.load_nvx_multiview_per_view_attributes(handle.to_raw()),
3096                        &_ => (),
3097                    }
3098                }
3099            }
3100        }
3101
3102        let instance = physical_device.instance().clone();
3103
3104        let device = Device {
3105            inner: Arc::new(Inner {
3106                handle,
3107                physical_device,
3108                queues: SmallVec::new(),
3109                instance,
3110                loader,
3111            }),
3112        };
3113
3114        let mut queues: SmallVec<[Queue; 16]> = SmallVec::new();
3115
3116        for qci in self.create_info.queue_create_infos() {
3117            for q_idx in 0..qci.queue_priorities().len() as u32 {
3118                match get_device_queue(&device.inner.loader, device.inner.handle,
3119                        qci.queue_family_index(), q_idx) {
3120                    Some(q_handle) => unsafe {
3121                        queues.push(Queue::from_parts(q_handle, device.clone(),
3122                            qci.queue_family_index(), q_idx))
3123                    },
3124                    None => {
3125                        panic!("unable to get device queue (family_index: {}, index: {})",
3126                            qci.queue_family_index(), q_idx);
3127                    },
3128                }
3129            }
3130        }
3131
3132        unsafe {
3133            let inner_ptr = &(*device.inner) as *const Inner as *mut Inner;
3134            (*inner_ptr).queues = queues;
3135        }
3136
3137        Ok(device)
3138    }
3139}