1use std::sync::Arc;
2use std::mem;
3use std::ptr;
4use std::marker::PhantomData;
5use std::ffi::CStr;
6use libc::{c_void};
7use smallvec::SmallVec;
8use vks;
9use ::{error, VdResult, Instance, PhysicalDevice, DeviceQueueCreateInfo, CharStrs,
10 PhysicalDeviceFeatures, PRINT, Handle, SubmitInfo, QueueHandle, MemoryAllocateInfo,
11 DeviceMemoryHandle, MemoryMapFlags, SwapchainKhrHandle, SwapchainCreateInfoKhr,
12 ShaderModuleCreateInfo, ShaderModuleHandle, SemaphoreCreateInfo, SemaphoreHandle,
13 SamplerCreateInfo, SamplerHandle, RenderPassCreateInfo, RenderPassHandle, BufferCreateInfo,
14 BufferHandle, BufferViewCreateInfo, BufferViewHandle, ImageViewCreateInfo, ImageViewHandle,
15 ImageCreateInfo, ImageHandle, FramebufferCreateInfo, FramebufferHandle,
16 DescriptorSetLayoutCreateInfo, DescriptorSetLayoutHandle, DescriptorPoolCreateInfo,
17 DescriptorPoolHandle, CommandPoolCreateInfo, CommandPoolHandle, CommandBufferAllocateInfo,
18 CommandBufferHandle, PipelineLayoutCreateInfo, PipelineLayoutHandle, FenceCreateInfo,
19 FenceHandle, EventCreateInfo, EventHandle, PipelineCacheCreateInfo, PipelineCacheHandle,
20 MemoryRequirements, DeviceSize, CommandBufferBeginInfo, GraphicsPipelineCreateInfo,
21 PipelineHandle, ComputePipelineCreateInfo, PipelineStageFlags, DependencyFlags, MemoryBarrier,
22 BufferMemoryBarrier, ImageMemoryBarrier, WriteDescriptorSet, CopyDescriptorSet,
23 BufferImageCopy, ImageLayout, BufferCopy, CommandBufferResetFlags, PipelineBindPoint, Viewport,
24 Rect2d, StencilFaceFlags, DebugMarkerMarkerInfoExt, DescriptorSetHandle, QueryPoolHandle,
25 QueryResultFlags, ShaderStageFlags, RenderPassBeginInfo, SubpassContents, ImageCopy, IndexType,
26 ImageBlit, Filter, ClearColorValue, ImageSubresourceRange, ClearDepthStencilValue,
27 ClearAttachment, ImageResolve, QueryControlFlags, ClearRect, PresentInfoKhr, MappedMemoryRange,
28 SparseImageMemoryRequirements, BindSparseInfo, CallResult, QueryPoolCreateInfo,
29 ImageSubresource, SubresourceLayout, DescriptorSetAllocateInfo, DescriptorPoolResetFlags,
30 Extent2d, CommandPoolResetFlags, CommandPoolTrimFlagsKhr, MemoryGetWin32HandleInfoKhr,
31 ExternalMemoryHandleTypeFlagsKhr, HANDLE, MemoryGetFdInfoKhr,
32 ImportSemaphoreWin32HandleInfoKhr, SemaphoreGetWin32HandleInfoKhr, ImportSemaphoreFdInfoKhr,
33 SemaphoreGetFdInfoKhr, PipelineLayout, BufferMemoryRequirementsInfo2Khr,
34 ImportFenceWin32HandleInfoKhr, FenceGetWin32HandleInfoKhr, ImportFenceFdInfoKhr,
35 FenceGetFdInfoKhr, ImageMemoryRequirementsInfo2Khr, ImageSparseMemoryRequirementsInfo2Khr,
36 DebugMarkerObjectTagInfoExt, DebugMarkerObjectNameInfoExt, DisplayPowerInfoExt,
37 DisplayKhrHandle, DeviceEventInfoExt, DisplayEventInfoExt, HdrMetadataExt,
38 SurfaceCounterFlagsExt, Queue};
39
40#[cfg(feature = "unimplemented")]
44use ::{SamplerYcbcrConversionCreateInfoKhr, IndirectCommandsLayoutNvxCreateInfo,
45 ObjectTableNvxCreateInfo, ValidationCacheExtCreateInfo, DescriptorUpdateTemplateCreateInfoKhr,
46 DescriptorUpdateTemplateKhrHandle, SamplerYcbcrConversionKhrHandle, IndirectCommandsLayoutNvxHandle,
47 ValidationCacheExtHandle, ObjectTableNvxHandle, SampleLocationsInfoExt, ValidationCacheExt,};
48
49
50#[derive(Clone, Copy, Debug, Eq, PartialEq)]
53#[repr(C)]
54pub struct DeviceHandle(pub(crate) vks::VkDevice);
55
56impl DeviceHandle {
57 #[inline(always)]
58 pub fn to_raw(&self) -> vks::VkDevice {
59 self.0
60 }
61}
62
63unsafe impl Handle for DeviceHandle {
64 type Target = DeviceHandle;
65
66 #[inline(always)]
67 fn handle(&self) -> Self::Target {
68 *self
69 }
70}
71
72fn get_device_queue(proc_addr_loader: &vks::DeviceProcAddrLoader, device: DeviceHandle,
73 queue_family_index: u32, queue_index: u32) -> Option<QueueHandle> {
74 let mut handle = ptr::null_mut();
75 unsafe {
76 proc_addr_loader.vk.vkGetDeviceQueue(device.to_raw(),
77 queue_family_index, queue_index, &mut handle);
78 }
79 if !handle.is_null() {
80 Some(QueueHandle(handle))
81 } else {
82 None
83 }
84}
85
86
87#[derive(Debug)]
88struct Inner {
89 handle: DeviceHandle,
90 physical_device: PhysicalDevice,
91 queues: SmallVec<[Queue; 16]>,
93 instance: Instance,
94 loader: vks::DeviceProcAddrLoader,
95}
96
97impl Drop for Inner {
98 fn drop(&mut self) {
99 if PRINT { println!("Destroying device..."); }
100 unsafe {
101 self.instance.destroy_device(self.handle, None);
102 }
103 }
104}
105
106
107#[derive(Debug, Clone)]
116pub struct Device {
117 inner: Arc<Inner>,
118}
119
120impl Device {
121 pub fn builder<'db>() -> DeviceBuilder<'db> {
123 DeviceBuilder::new()
124 }
125
126 #[inline]
131 pub fn queue(&self, device_queue_index: usize) -> Option<&Queue> {
132 self.inner.queues.get(device_queue_index)
133 }
134
135 #[inline]
137 pub fn queues(&self) -> &[Queue] {
138 &self.inner.queues
139 }
140
141 #[inline]
143 pub fn proc_addr_loader(&self) -> &vks::DeviceProcAddrLoader {
144 &self.inner.loader
145 }
146
147 #[inline]
149 pub fn handle(&self) -> DeviceHandle {
150 self.inner.handle
151 }
152
153 #[inline]
155 pub fn physical_device(&self) -> &PhysicalDevice {
156 &self.inner.physical_device
157 }
158
159 #[inline]
161 pub fn instance(&self) -> &Instance {
162 &self.inner.instance
163 }
164
165 #[inline]
170 pub fn wait_idle(&self) {
171 self.device_wait_idle()
172 }
173
174 pub fn memory_type_index(&self, type_filter: u32, properties: ::MemoryPropertyFlags)
178 -> VdResult<u32> {
179 let mem_props = self.physical_device().memory_properties();
180
181 for i in 0..mem_props.memory_type_count() {
182 if (type_filter & (1 << i)) != 0 &&
183 (mem_props.memory_types()[i as usize].property_flags() & properties) == properties
184 {
185 return Ok(i);
186 }
187 }
188 panic!("failed to find suitable memory type index with: type_filter: '{}', properties: '{:?}'",
189 type_filter, properties);
190 }
191
192
193 pub fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> Option<QueueHandle> {
200 get_device_queue(self.proc_addr_loader(), self.inner.handle, queue_family_index, queue_index)
201 }
202
203
204 pub unsafe fn queue_submit<Q>(&self, queue: Q, submit_info: &[SubmitInfo],
221 fence: Option<FenceHandle>) -> VdResult<()>
222 where Q: Handle<Target=QueueHandle> {
223 let fence_handle_raw = fence.map(|f| f.to_raw()).unwrap_or(0);
224 let result = self.proc_addr_loader().vk.vkQueueSubmit(queue.handle().to_raw(),
225 submit_info.len() as u32, submit_info.as_ptr() as *const vks::VkSubmitInfo,
226 fence_handle_raw);
227 error::check(result, "vkQueueSubmit", ())
228 }
229
230 pub fn queue_wait_idle<Q>(&self, queue: Q)
236 where Q: Handle<Target=QueueHandle> {
237 unsafe {
238 self.proc_addr_loader().vk.vkQueueWaitIdle(queue.handle().to_raw());
239 }
240 }
241
242 pub fn device_wait_idle(&self) {
248 unsafe {
249 self.proc_addr_loader().vk.vkDeviceWaitIdle(self.handle().to_raw());
250 }
251 }
252
253 pub unsafe fn allocate_memory(&self, allocate_info: &MemoryAllocateInfo,
261 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<DeviceMemoryHandle> {
262 let allocator = allocator.unwrap_or(ptr::null());
263 let mut handle = 0;
264 let result = self.proc_addr_loader().vk.vkAllocateMemory(self.handle().0,
265 allocate_info.as_raw(), allocator, &mut handle);
266 error::check(result, "vkAllocateMemory", DeviceMemoryHandle(handle))
267 }
268
269 pub unsafe fn free_memory(&self, memory: DeviceMemoryHandle,
276 allocator: Option<*const vks::VkAllocationCallbacks>) {
277 let allocator = allocator.unwrap_or(ptr::null());
278 self.proc_addr_loader().vk.vkFreeMemory(self.handle().0,
279 memory.handle().to_raw(), allocator);
280 }
281
282 pub unsafe fn map_memory<T>(&self, memory: DeviceMemoryHandle, offset_bytes: u64, size_bytes: u64,
289 flags: MemoryMapFlags) -> VdResult<*mut T> {
290 let mut data = ptr::null_mut();
291 let result = self.proc_addr_loader().vk.vkMapMemory(self.handle().to_raw(),
292 memory.to_raw(), offset_bytes, size_bytes, flags.bits(), &mut data);
293 error::check(result, "vkMapMemory", data as *mut T)
294 }
295
296 pub unsafe fn unmap_memory(&self, memory: DeviceMemoryHandle) {
302 self.proc_addr_loader().vk.vkUnmapMemory(self.handle().0, memory.to_raw());
303 }
304
305 pub unsafe fn flush_mapped_memory_ranges(&self, memory_ranges: &[MappedMemoryRange])
312 -> VdResult<()> {
313 let result = self.proc_addr_loader().vk.vkFlushMappedMemoryRanges(self.handle().to_raw(),
314 memory_ranges.len() as u32, memory_ranges.as_ptr() as *const vks::VkMappedMemoryRange);
315 error::check(result, "vkFlushMappedMemoryRanges", ())
316 }
317
318 pub unsafe fn invalidate_mapped_memory_ranges(&self, memory_ranges: &[MappedMemoryRange])
325 -> VdResult<()> {
326 let result = self.proc_addr_loader().vk.vkInvalidateMappedMemoryRanges(self.handle().to_raw(),
327 memory_ranges.len() as u32, memory_ranges.as_ptr() as *const vks::VkMappedMemoryRange);
328 error::check(result, "vkInvalidateMappedMemoryRanges", ())
329 }
330
331 pub unsafe fn get_device_memory_commitment<Dm>(&self, memory: Dm)
338 -> DeviceSize
339 where Dm: Handle<Target=DeviceMemoryHandle> {
340 let mut committed_memory_in_bytes = 0;
341 self.proc_addr_loader().vk.vkGetDeviceMemoryCommitment(self.handle().to_raw(),
342 memory.handle().to_raw(), &mut committed_memory_in_bytes);
343 committed_memory_in_bytes
344 }
345
346 pub unsafe fn bind_buffer_memory(&self, buffer: BufferHandle, memory: DeviceMemoryHandle,
353 memory_offset: DeviceSize) -> VdResult<()> {
354 let result = self.proc_addr_loader().vk.vkBindBufferMemory(
355 self.handle().to_raw(), buffer.to_raw(), memory.to_raw(), memory_offset);
356 error::check(result, "vkBindBufferMemory", ())
357 }
358
359 pub unsafe fn bind_image_memory(&self, image: ImageHandle, memory: DeviceMemoryHandle,
366 memory_offset: DeviceSize) -> VdResult<()> {
367 let result = self.proc_addr_loader().vk.vkBindImageMemory(
368 self.handle().to_raw(), image.to_raw(), memory.to_raw(), memory_offset);
369 error::check(result, "vkBindImageMemory", ())
370 }
371
372 pub unsafe fn get_buffer_memory_requirements(&self, buffer: BufferHandle) -> MemoryRequirements {
379 let mut memory_requirements: vks::VkMemoryRequirements;
380 memory_requirements = mem::uninitialized();
381 self.proc_addr_loader().vk.vkGetBufferMemoryRequirements(self.handle().to_raw(),
382 buffer.to_raw(), &mut memory_requirements);
383 MemoryRequirements::from_raw(memory_requirements)
384 }
385
386 pub unsafe fn get_image_memory_requirements<I>(&self, image: I) -> MemoryRequirements
393 where I: Handle<Target=ImageHandle> {
394 let mut memory_requirements: vks::VkMemoryRequirements;
395 memory_requirements = mem::uninitialized();
396 self.proc_addr_loader().vk.vkGetImageMemoryRequirements(self.handle().to_raw(),
397 image.handle().to_raw(), &mut memory_requirements);
398 MemoryRequirements::from_raw(memory_requirements)
399 }
400
401 pub unsafe fn get_image_sparse_memory_requirements<I>(&self, image: I)
409 -> SmallVec<[SparseImageMemoryRequirements; 32]>
410 where I: Handle<Target=ImageHandle> {
411 let mut sparse_memory_requirement_count = 0u32;
412 let mut sparse_memory_requirements: SmallVec<[SparseImageMemoryRequirements; 32]> = SmallVec::new();
413 self.proc_addr_loader().vk.vkGetImageSparseMemoryRequirements(self.handle().to_raw(),
414 image.handle().to_raw(), &mut sparse_memory_requirement_count, ptr::null_mut());
415 sparse_memory_requirements.reserve_exact(sparse_memory_requirement_count as usize);
416 sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize);
417 self.proc_addr_loader().vk.vkGetImageSparseMemoryRequirements(self.handle().to_raw(),
418 image.handle().to_raw(), &mut sparse_memory_requirement_count,
419 sparse_memory_requirements.as_mut_ptr() as *mut vks::VkSparseImageMemoryRequirements);
420 sparse_memory_requirements
421 }
422
423 pub unsafe fn queue_bind_sparse<Q, F>(&self, queue: Q, bind_info: &[BindSparseInfo], fence: F)
430 -> VdResult<()>
431 where Q: Handle<Target=QueueHandle>, F: Handle<Target=FenceHandle> {
432 let result = self.proc_addr_loader().vk.vkQueueBindSparse(queue.handle().to_raw(),
433 bind_info.len() as u32, bind_info.as_ptr() as *const _ as *const vks::VkBindSparseInfo,
434 fence.handle().to_raw());
435 error::check(result, "vkQueueBindSparse", ())
436 }
437
438 pub unsafe fn create_fence(&self, create_info: &FenceCreateInfo,
445 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<FenceHandle> {
446 let allocator = allocator.unwrap_or(ptr::null());
447 let mut handle = 0;
448 let result = self.proc_addr_loader().vk.vkCreateFence(self.handle().to_raw(),
449 create_info.as_raw(), allocator, &mut handle);
450 error::check(result, "vkCreateFence", FenceHandle(handle))
451 }
452
453 pub unsafe fn destroy_fence(&self, fence: FenceHandle,
460 allocator: Option<*const vks::VkAllocationCallbacks>) {
461 let allocator = allocator.unwrap_or(ptr::null());
462 self.proc_addr_loader().vk.vkDestroyFence(self.handle().to_raw(),
463 fence.to_raw(), allocator);
464 }
465
466 pub unsafe fn reset_fences(&self, fences: &[FenceHandle]) -> VdResult<()> {
473 let result = self.proc_addr_loader().vk.vkResetFences(self.handle().to_raw(),
474 fences.len() as u32, fences.as_ptr() as *const vks::VkFence);
475 error::check(result, "vkResetFences", ())
476 }
477
478 pub unsafe fn get_fence_status<F>(&self, fence: F) -> VdResult<CallResult>
484 where F: Handle<Target=FenceHandle> {
485 let result = self.proc_addr_loader().vk.vkGetFenceStatus(self.handle().to_raw(), fence.handle().to_raw());
486 error::check(result, "vkGetFenceStatus", CallResult::from(result))
487 }
488
489 pub unsafe fn wait_for_fences(&self, fences: &[FenceHandle], wait_all: bool, timeout: u64)
496 -> VdResult<()> {
497 let result = self.proc_addr_loader().vk.vkWaitForFences(self.handle().to_raw(),
498 fences.len() as u32, fences.as_ptr() as *const vks::VkFence,
499 wait_all as vks::VkBool32, timeout);
500 error::check(result, "vkWaitForFences", ())
501 }
502
503 pub unsafe fn create_semaphore(&self, create_info: &SemaphoreCreateInfo,
511 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<SemaphoreHandle> {
512 let allocator = allocator.unwrap_or(ptr::null());
513 let mut handle = 0;
514 let result = self.proc_addr_loader().vk.vkCreateSemaphore(self.handle().to_raw(),
515 create_info.as_raw(), allocator, &mut handle);
516 error::check(result, "vkCreateSemaphore", SemaphoreHandle(handle))
517 }
518
519 pub unsafe fn destroy_semaphore(&self, shader_module: SemaphoreHandle,
526 allocator: Option<*const vks::VkAllocationCallbacks>) {
527 let allocator = allocator.unwrap_or(ptr::null());
528 self.proc_addr_loader().vk.vkDestroySemaphore(self.handle().to_raw(),
529 shader_module.to_raw(), allocator);
530 }
531
532 pub unsafe fn create_event(&self, create_info: &EventCreateInfo,
539 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<EventHandle> {
540 let allocator = allocator.unwrap_or(ptr::null());
541 let mut handle = 0;
542 let result = self.proc_addr_loader().vk.vkCreateEvent(self.handle().to_raw(),
543 create_info.as_raw(), allocator, &mut handle);
544 error::check(result, "vkCreateEvent", EventHandle(handle))
545 }
546
547 pub unsafe fn destroy_event(&self, event: EventHandle,
554 allocator: Option<*const vks::VkAllocationCallbacks>) {
555 let allocator = allocator.unwrap_or(ptr::null());
556 self.proc_addr_loader().vk.vkDestroyEvent(self.handle().to_raw(),
557 event.to_raw(), allocator);
558 }
559
560 pub unsafe fn get_event_status<E>(&self, event: E) -> VdResult<CallResult>
566 where E: Handle<Target=EventHandle> {
567 let result = self.proc_addr_loader().vk.vkGetEventStatus(self.handle().to_raw(),
568 event.handle().to_raw());
569 error::check(result, "vkGetEventStatus", CallResult::from(result))
570 }
571
572 pub unsafe fn set_event<E>(&self, event: E) -> VdResult<()>
578 where E: Handle<Target=EventHandle> {
579 let result = self.proc_addr_loader().vk.vkSetEvent(self.handle().to_raw(),
580 event.handle().to_raw());
581 error::check(result, "vkSetEvent", ())
582 }
583
584 pub unsafe fn reset_event<E>(&self, event: E) -> VdResult<()>
590 where E: Handle<Target=EventHandle> {
591 let result = self.proc_addr_loader().vk.vkResetEvent(self.handle().to_raw(),
592 event.handle().to_raw());
593 error::check(result, "vkResetEvent", ())
594 }
595
596 pub unsafe fn create_query_pool(&self, create_info: &QueryPoolCreateInfo,
604 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<QueryPoolHandle> {
605 let allocator = allocator.unwrap_or(ptr::null());
606 let mut handle = 0;
607 let result = self.proc_addr_loader().vk.vkCreateQueryPool(self.handle().to_raw(),
608 create_info.as_raw(), allocator, &mut handle);
609 error::check(result, "vkCreateQueryPool", QueryPoolHandle(handle))
610 }
611
612 pub unsafe fn destroy_query_pool(&self, query_pool: QueryPoolHandle,
619 allocator: Option<*const vks::VkAllocationCallbacks>) {
620 let allocator = allocator.unwrap_or(ptr::null());
621 self.proc_addr_loader().vk.vkDestroyQueryPool(self.handle().to_raw(),
622 query_pool.to_raw(), allocator);
623 }
624
625 pub unsafe fn get_query_pool_results<Q>(&self, query_pool: Q, first_query: u32, query_count: u32,
633 data_size: usize, data: *mut c_void, stride: DeviceSize, flags: QueryResultFlags)
634 -> VdResult<()>
635 where Q: Handle<Target=QueryPoolHandle> {
636 let result = self.proc_addr_loader().vk.vkGetQueryPoolResults(self.handle().to_raw(),
637 query_pool.handle().to_raw(), first_query, query_count, data_size, data, stride,
638 flags.bits());
639 error::check(result, "vkGetQueryPoolResults", ())
640 }
641
642 pub unsafe fn create_buffer(&self, create_info: &BufferCreateInfo,
650 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<BufferHandle> {
651 let allocator = allocator.unwrap_or(ptr::null());
652 let mut handle = 0;
653 let result = self.proc_addr_loader().vk.vkCreateBuffer(self.handle().to_raw(),
654 create_info.as_raw(), allocator, &mut handle);
655 error::check(result, "vkCreateBuffer", BufferHandle(handle))
656 }
657
658 pub unsafe fn destroy_buffer(&self, buffer: BufferHandle,
665 allocator: Option<*const vks::VkAllocationCallbacks>) {
666 let allocator = allocator.unwrap_or(ptr::null());
667 self.proc_addr_loader().vk.vkDestroyBuffer(self.handle().to_raw(),
668 buffer.to_raw(), allocator);
669 }
670
671 pub unsafe fn create_buffer_view(&self, create_info: &BufferViewCreateInfo,
679 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<BufferViewHandle> {
680 let allocator = allocator.unwrap_or(ptr::null());
681 let mut handle = 0;
682 let result = self.proc_addr_loader().vk.vkCreateBufferView(self.handle().to_raw(),
683 create_info.as_raw(), allocator, &mut handle);
684 error::check(result, "vkCreateBufferView", BufferViewHandle(handle))
685 }
686
687 pub unsafe fn destroy_buffer_view(&self, buffer_view: BufferViewHandle,
694 allocator: Option<*const vks::VkAllocationCallbacks>) {
695 let allocator = allocator.unwrap_or(ptr::null());
696 self.proc_addr_loader().vk.vkDestroyBufferView(self.handle().to_raw(),
697 buffer_view.to_raw(), allocator);
698 }
699
700 pub unsafe fn create_image(&self, create_info: &ImageCreateInfo,
707 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<ImageHandle> {
708 let allocator = allocator.unwrap_or(ptr::null());
709 let mut handle = 0;
710 let result = self.proc_addr_loader().vk.vkCreateImage(self.handle().to_raw(),
711 create_info.as_raw(), allocator, &mut handle);
712 error::check(result, "vkCreateImage", ImageHandle(handle))
713 }
714
715 pub unsafe fn destroy_image(&self, image: ImageHandle,
722 allocator: Option<*const vks::VkAllocationCallbacks>) {
723 let allocator = allocator.unwrap_or(ptr::null());
724 self.proc_addr_loader().vk.vkDestroyImage(self.handle().to_raw(),
725 image.to_raw(), allocator);
726 }
727
728 pub unsafe fn get_image_subresource_layout<I>(&self, image: I, subresource: &ImageSubresource)
735 -> SubresourceLayout
736 where I: Handle<Target=ImageHandle> {
737 let mut layout = mem::uninitialized();
738 self.proc_addr_loader().vk.vkGetImageSubresourceLayout(self.handle().to_raw(),
739 image.handle().to_raw(), subresource.as_raw(),
740 &mut layout as *mut _ as *mut vks::VkSubresourceLayout);
741 layout
742 }
743
744 pub unsafe fn create_image_view(&self, create_info: &ImageViewCreateInfo,
752 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<ImageViewHandle> {
753 let allocator = allocator.unwrap_or(ptr::null());
754 let mut handle = 0;
755 let result = self.proc_addr_loader().vk.vkCreateImageView(self.handle().to_raw(),
756 create_info.as_raw(), allocator, &mut handle);
757 error::check(result, "vkCreateImageView", ImageViewHandle(handle))
758 }
759
760 pub unsafe fn destroy_image_view(&self, image_view: ImageViewHandle,
767 allocator: Option<*const vks::VkAllocationCallbacks>) {
768 let allocator = allocator.unwrap_or(ptr::null());
769 self.proc_addr_loader().vk.vkDestroyImageView(self.handle().to_raw(),
770 image_view.to_raw(), allocator);
771 }
772
773 pub unsafe fn create_shader_module(&self, create_info: &ShaderModuleCreateInfo,
781 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<ShaderModuleHandle> {
782 let allocator = allocator.unwrap_or(ptr::null());
783 let mut handle = 0;
784 let result = self.proc_addr_loader().vk.vkCreateShaderModule(self.handle().to_raw(),
785 create_info.as_raw(), allocator, &mut handle);
786 error::check(result, "vkCreateShaderModule", ShaderModuleHandle(handle))
787 }
788
789 pub unsafe fn destroy_shader_module(&self, shader_module: ShaderModuleHandle,
796 allocator: Option<*const vks::VkAllocationCallbacks>) {
797 let allocator = allocator.unwrap_or(ptr::null());
798 self.proc_addr_loader().vk.vkDestroyShaderModule(self.handle().to_raw(),
799 shader_module.to_raw(), allocator);
800 }
801
802 pub unsafe fn create_pipeline_cache(&self, create_info: &PipelineCacheCreateInfo,
810 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<PipelineCacheHandle> {
811 let allocator = allocator.unwrap_or(ptr::null());
812 let mut handle = 0;
813 let result = self.proc_addr_loader().vk.vkCreatePipelineCache(self.handle().to_raw(),
814 create_info.as_raw(), allocator, &mut handle);
815 error::check(result, "vkCreatePipelineCache", PipelineCacheHandle(handle))
816 }
817
818 pub unsafe fn destroy_pipeline_cache(&self, pipeline_cache: PipelineCacheHandle,
825 allocator: Option<*const vks::VkAllocationCallbacks>) {
826 let allocator = allocator.unwrap_or(ptr::null());
827 self.proc_addr_loader().vk.vkDestroyPipelineCache(self.handle().to_raw(),
828 pipeline_cache.to_raw(), allocator);
829 }
830
831 pub unsafe fn get_pipeline_cache_data<Pc>(&self, pipeline_cache: Pc, data_size: *mut usize,
838 data: *mut c_void) -> VdResult<()>
839 where Pc: Handle<Target=PipelineCacheHandle> {
840 let result = self.proc_addr_loader().vk.vkGetPipelineCacheData(self.handle().to_raw(),
841 pipeline_cache.handle().to_raw(), data_size, data);
842 error::check(result, "vkGetPipelineCacheData", ())
843 }
844
845 pub unsafe fn merge_pipeline_caches<Pc>(&self, dst_cache: Pc, src_caches: &[PipelineCacheHandle])
852 -> VdResult<()>
853 where Pc: Handle<Target=PipelineCacheHandle> {
854 let result = self.proc_addr_loader().vk.vkMergePipelineCaches(self.handle().to_raw(),
855 dst_cache.handle().to_raw(), src_caches.len() as u32,
856 src_caches.as_ptr() as *const vks::VkPipelineCache);
857 error::check(result, "vkMergePipelineCaches", ())
858 }
859
860 pub unsafe fn create_graphics_pipelines(&self, pipeline_cache: Option<PipelineCacheHandle>,
869 create_infos: &[GraphicsPipelineCreateInfo],
870 allocator: Option<*const vks::VkAllocationCallbacks>)
871 -> VdResult<SmallVec<[PipelineHandle; 4]>> {
872 let allocator = allocator.unwrap_or(ptr::null());
873 let pipeline_cache = pipeline_cache.map(|pc| pc.to_raw()).unwrap_or(0);
874 let mut pipelines = SmallVec::<[PipelineHandle; 4]>::new();
875 pipelines.reserve_exact(create_infos.len());
876 pipelines.set_len(create_infos.len());
877 let result = self.proc_addr_loader().vk.vkCreateGraphicsPipelines(self.handle().to_raw(),
878 pipeline_cache, create_infos.len() as u32,
879 create_infos.as_ptr() as *const vks::VkGraphicsPipelineCreateInfo,
880 allocator,
881 pipelines.as_mut_ptr() as *mut vks::VkPipeline);
882 error::check(result, "vkCreateGraphicsPipelines", pipelines)
883 }
884
885 pub unsafe fn create_compute_pipelines(&self, pipeline_cache: Option<PipelineCacheHandle>,
894 create_infos: &[ComputePipelineCreateInfo],
895 allocator: Option<*const vks::VkAllocationCallbacks>)
896 -> VdResult<SmallVec<[PipelineHandle; 4]>> {
897 let allocator = allocator.unwrap_or(ptr::null());
898 let pipeline_cache = pipeline_cache.map(|pc| pc.to_raw()).unwrap_or(0);
899 let mut pipelines = SmallVec::<[PipelineHandle; 4]>::new();
900 pipelines.reserve_exact(create_infos.len());
901 pipelines.set_len(create_infos.len());
902 let result = self.proc_addr_loader().vk.vkCreateComputePipelines(self.handle().to_raw(),
903 pipeline_cache, create_infos.len() as u32,
904 create_infos.as_ptr() as *const vks::VkComputePipelineCreateInfo,
905 allocator,
906 pipelines.as_mut_ptr() as *mut vks::VkPipeline);
907 error::check(result, "vkCreateComputePipelines", pipelines)
908 }
909
910 pub unsafe fn destroy_pipeline(&self, pipeline: PipelineHandle,
917 allocator: Option<*const vks::VkAllocationCallbacks>) {
918 let allocator = allocator.unwrap_or(ptr::null());
919 self.proc_addr_loader().vk.vkDestroyPipeline(self.handle().to_raw(),
920 pipeline.to_raw(), allocator);
921 }
922
923 pub unsafe fn create_pipeline_layout(&self, create_info: &PipelineLayoutCreateInfo,
931 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<PipelineLayoutHandle> {
932 let allocator = allocator.unwrap_or(ptr::null());
933 let mut handle = 0;
934 let result = self.proc_addr_loader().vk.vkCreatePipelineLayout(self.handle().to_raw(),
935 create_info.as_raw(), allocator, &mut handle);
936 error::check(result, "vkCreatePipelineLayout", PipelineLayoutHandle(handle))
937 }
938
939 pub unsafe fn destroy_pipeline_layout(&self, pipeline_layout: PipelineLayoutHandle,
946 allocator: Option<*const vks::VkAllocationCallbacks>) {
947 let allocator = allocator.unwrap_or(ptr::null());
948 self.proc_addr_loader().vk.vkDestroyPipelineLayout(self.handle().to_raw(),
949 pipeline_layout.to_raw(), allocator);
950 }
951
952 pub unsafe fn create_sampler(&self, create_info: &SamplerCreateInfo,
960 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<SamplerHandle> {
961 let allocator = allocator.unwrap_or(ptr::null());
962 let mut handle = 0;
963 let result = self.proc_addr_loader().vk.vkCreateSampler(self.handle().to_raw(),
964 create_info.as_raw(), allocator, &mut handle);
965 error::check(result, "vkCreateSampler", SamplerHandle(handle))
966 }
967
968 pub unsafe fn destroy_sampler(&self, sampler: SamplerHandle,
975 allocator: Option<*const vks::VkAllocationCallbacks>) {
976 let allocator = allocator.unwrap_or(ptr::null());
977 self.proc_addr_loader().vk.vkDestroySampler(self.handle().to_raw(),
978 sampler.to_raw(), allocator);
979 }
980
981 pub unsafe fn create_descriptor_set_layout(&self, create_info: &DescriptorSetLayoutCreateInfo,
989 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<DescriptorSetLayoutHandle> {
990 let allocator = allocator.unwrap_or(ptr::null());
991 let mut handle = 0;
992 let result = self.proc_addr_loader().vk.vkCreateDescriptorSetLayout(self.handle().to_raw(),
993 create_info.as_raw(), allocator, &mut handle);
994 error::check(result, "vkCreateDescriptorSetLayout", DescriptorSetLayoutHandle(handle))
995 }
996
997 pub unsafe fn destroy_descriptor_set_layout(&self, descriptor_set_layout: DescriptorSetLayoutHandle,
1005 allocator: Option<*const vks::VkAllocationCallbacks>) {
1006 let allocator = allocator.unwrap_or(ptr::null());
1007 self.proc_addr_loader().vk.vkDestroyDescriptorSetLayout(self.handle().to_raw(),
1008 descriptor_set_layout.to_raw(), allocator);
1009 }
1010
1011 pub unsafe fn create_descriptor_pool(&self, create_info: &DescriptorPoolCreateInfo,
1019 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<DescriptorPoolHandle> {
1020 let allocator = allocator.unwrap_or(ptr::null());
1021 let mut handle = 0;
1022 let result = self.proc_addr_loader().vk.vkCreateDescriptorPool(self.handle().to_raw(),
1023 create_info.as_raw(), allocator, &mut handle);
1024 error::check(result, "vkCreateDescriptorPool", DescriptorPoolHandle(handle))
1025 }
1026
1027 pub unsafe fn destroy_descriptor_pool(&self, descriptor_pool: DescriptorPoolHandle,
1034 allocator: Option<*const vks::VkAllocationCallbacks>) {
1035 let allocator = allocator.unwrap_or(ptr::null());
1036 self.proc_addr_loader().vk.vkDestroyDescriptorPool(self.handle().to_raw(),
1037 descriptor_pool.to_raw(), allocator);
1038 }
1039
1040 pub unsafe fn reset_descriptor_pool<Dp>(&self, descriptor_pool: Dp,
1047 flags: DescriptorPoolResetFlags) -> VdResult<()>
1048 where Dp: Handle<Target=DescriptorPoolHandle> {
1049 let result = self.proc_addr_loader().vk.vkResetDescriptorPool(self.handle().to_raw(),
1050 descriptor_pool.handle().to_raw(), flags.bits());
1051 error::check(result, "vkResetDescriptorPool", ())
1052 }
1053
1054 pub unsafe fn allocate_descriptor_sets(&self, allocate_info: &DescriptorSetAllocateInfo)
1062 -> VdResult<SmallVec<[DescriptorSetHandle; 8]>> {
1063 let mut descriptor_sets = SmallVec::<[DescriptorSetHandle; 8]>::new();
1064 let count = allocate_info.set_layouts().len();
1065 descriptor_sets.reserve_exact(count);
1066 descriptor_sets.set_len(count);
1067 let result = self.proc_addr_loader().vk.vkAllocateDescriptorSets(
1068 self.handle().to_raw(), allocate_info.as_raw(),
1069 descriptor_sets.as_mut_ptr() as *mut vks::VkDescriptorSet);
1070 error::check(result, "vkAllocateDescriptorSets", descriptor_sets)
1071 }
1072
1073 pub unsafe fn free_descriptor_sets<Dp>(&self, descriptor_pool: Dp,
1081 descriptor_sets: &[DescriptorSetHandle]) -> VdResult<()>
1082 where Dp: Handle<Target=DescriptorPoolHandle> {
1083 let result = self.proc_addr_loader().vk.vkFreeDescriptorSets(self.handle().to_raw(),
1084 descriptor_pool.handle().to_raw(), descriptor_sets.len() as u32,
1085 descriptor_sets.as_ptr() as *const vks::VkDescriptorSet);
1086 error::check(result, "vkFreeDescriptorSets", ())
1087 }
1088
1089 pub fn update_descriptor_sets(&self, descriptor_writes: &[WriteDescriptorSet],
1098 descriptor_copies: &[CopyDescriptorSet]) {
1099 unsafe {
1100 self.proc_addr_loader().vk.vkUpdateDescriptorSets(self.handle().0,
1101 descriptor_writes.len() as u32,
1102 descriptor_writes.as_ptr() as *const vks::VkWriteDescriptorSet,
1103 descriptor_copies.len() as u32,
1104 descriptor_copies.as_ptr() as *const vks::VkCopyDescriptorSet);
1105 }
1106 }
1107
1108 pub unsafe fn create_framebuffer(&self, create_info: &FramebufferCreateInfo,
1116 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<FramebufferHandle> {
1117 let allocator = allocator.unwrap_or(ptr::null());
1118 let mut handle = 0;
1119 let result = self.proc_addr_loader().vk.vkCreateFramebuffer(self.handle().to_raw(),
1120 create_info.as_raw(), allocator, &mut handle);
1121 error::check(result, "vkCreateFramebuffer", FramebufferHandle(handle))
1122 }
1123
1124 pub unsafe fn destroy_framebuffer(&self, framebuffer: FramebufferHandle,
1131 allocator: Option<*const vks::VkAllocationCallbacks>) {
1132 let allocator = allocator.unwrap_or(ptr::null());
1133 self.proc_addr_loader().vk.vkDestroyFramebuffer(self.handle().to_raw(),
1134 framebuffer.to_raw(), allocator);
1135 }
1136
1137 pub unsafe fn create_render_pass(&self, create_info: &RenderPassCreateInfo,
1145 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<RenderPassHandle> {
1146 let allocator = allocator.unwrap_or(ptr::null());
1147 let mut handle = 0;
1148 let result = self.proc_addr_loader().vk.vkCreateRenderPass(self.handle().to_raw(),
1149 create_info.as_raw(), allocator, &mut handle);
1150 error::check(result, "vkCreateRenderPass", RenderPassHandle(handle))
1151 }
1152
1153 pub unsafe fn destroy_render_pass(&self, render_pass: RenderPassHandle,
1160 allocator: Option<*const vks::VkAllocationCallbacks>) {
1161 let allocator = allocator.unwrap_or(ptr::null());
1162 self.proc_addr_loader().vk.vkDestroyRenderPass(self.handle().to_raw(),
1163 render_pass.to_raw(), allocator);
1164 }
1165
1166 pub unsafe fn get_render_area_granularity<Rp>(&self, render_pass: Rp)
1173 -> Extent2d
1174 where Rp: Handle<Target=RenderPassHandle> {
1175 let mut granularity = mem::uninitialized();
1176 self.proc_addr_loader().vk.vkGetRenderAreaGranularity(self.handle().to_raw(),
1177 render_pass.handle().to_raw(), &mut granularity as *mut _ as *mut vks::VkExtent2D);
1178 granularity
1179 }
1180
1181 pub unsafe fn create_command_pool(&self, create_info: &CommandPoolCreateInfo,
1189 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<CommandPoolHandle> {
1190 let allocator = allocator.unwrap_or(ptr::null());
1191 let mut handle = 0;
1192 let result = self.proc_addr_loader().vk.vkCreateCommandPool(self.handle().to_raw(),
1193 create_info.as_raw(), allocator, &mut handle);
1194 error::check(result, "vkCreateCommandPool", CommandPoolHandle(handle))
1195 }
1196
1197 pub unsafe fn destroy_command_pool(&self, command_pool: CommandPoolHandle,
1204 allocator: Option<*const vks::VkAllocationCallbacks>) {
1205 let allocator = allocator.unwrap_or(ptr::null());
1206 self.proc_addr_loader().vk.vkDestroyCommandPool(self.handle().to_raw(),
1207 command_pool.to_raw(), allocator);
1208 }
1209
1210 pub unsafe fn reset_command_pool<Cp>(&self, command_pool: Cp, flags: CommandPoolResetFlags)
1217 -> VdResult<()>
1218 where Cp: Handle<Target=CommandPoolHandle> {
1219 let result = self.proc_addr_loader().vk.vkResetCommandPool(self.handle().to_raw(),
1220 command_pool.handle().to_raw(), flags.bits());
1221 error::check(result, "vkResetCommandPool", ())
1222 }
1223
1224 pub unsafe fn allocate_command_buffers(&self, allocate_info: &CommandBufferAllocateInfo)
1232 -> VdResult<SmallVec<[CommandBufferHandle; 16]>> {
1233 let mut command_buffers: SmallVec<[CommandBufferHandle; 16]> = SmallVec::new();
1234 command_buffers.reserve_exact(allocate_info.command_buffer_count() as usize);
1235 command_buffers.set_len(allocate_info.command_buffer_count() as usize);
1236 let result = self.proc_addr_loader().vk.vkAllocateCommandBuffers(
1237 self.handle().to_raw(), allocate_info.as_raw(),
1238 command_buffers.as_mut_ptr() as *mut vks::VkCommandBuffer);
1239 error::check(result, "vkAllocateCommandBuffers", command_buffers)
1240 }
1241
1242 pub unsafe fn free_command_buffers<Cp>(&self, command_pool: Cp, command_buffers: &[CommandBufferHandle])
1249 where Cp: Handle<Target=CommandPoolHandle> {
1250 self.proc_addr_loader().vk.vkFreeCommandBuffers(self.handle().to_raw(),
1251 command_pool.handle().to_raw(), command_buffers.len() as u32,
1252 command_buffers.as_ptr() as *const vks::VkCommandBuffer);
1253 }
1254
1255 pub unsafe fn begin_command_buffer(&self, command_buffer: CommandBufferHandle,
1262 begin_info: &CommandBufferBeginInfo) -> VdResult<()> {
1263 let result = self.proc_addr_loader().vk.vkBeginCommandBuffer(command_buffer.to_raw(), begin_info.as_raw());
1264 error::check(result, "vkBeginCommandBuffer", ())
1265 }
1266
1267 pub unsafe fn end_command_buffer(&self, command_buffer: CommandBufferHandle) -> VdResult<()> {
1273 let result = self.proc_addr_loader().vk.vkEndCommandBuffer(command_buffer.to_raw());
1274 error::check(result, "vkEndCommandBuffer", ())
1275 }
1276
1277 pub unsafe fn cmd_reset_command_buffer(&self, command_buffer: CommandBufferHandle,
1284 flags: CommandBufferResetFlags) -> VdResult<()> {
1285 let result = self.proc_addr_loader().vk.vkResetCommandBuffer(command_buffer.to_raw(), flags.bits());
1286 error::check(result, "vkResetCommandBuffer", ())
1287 }
1288
1289 pub unsafe fn cmd_bind_pipeline(&self, command_buffer: CommandBufferHandle,
1296 pipeline_bind_point: PipelineBindPoint, pipeline: PipelineHandle) {
1297 self.proc_addr_loader().vk.vkCmdBindPipeline(command_buffer.to_raw(),
1298 pipeline_bind_point.into(), pipeline.handle().to_raw());
1299 }
1300
1301 pub unsafe fn cmd_set_viewport(&self, command_buffer: CommandBufferHandle,
1308 first_viewport: u32, viewports: &[Viewport]) {
1309 self.proc_addr_loader().vk.vkCmdSetViewport(command_buffer.to_raw(),
1310 first_viewport, viewports.len() as u32, viewports.as_ptr() as *const vks::VkViewport);
1311 }
1312
1313 pub unsafe fn cmd_set_scissor(&self, command_buffer: CommandBufferHandle, first_scissor: u32,
1320 scissors: &[Rect2d]) {
1321 self.proc_addr_loader().vk.vkCmdSetScissor(command_buffer.to_raw(),
1322 first_scissor, scissors.len() as u32, scissors.as_ptr() as *const vks::VkRect2D);
1323 }
1324
1325 pub unsafe fn cmd_set_line_width(&self, command_buffer: CommandBufferHandle, line_width: f32) {
1332 self.proc_addr_loader().vk.vkCmdSetLineWidth(command_buffer.to_raw(), line_width);
1333 }
1334
1335
1336 pub unsafe fn cmd_set_depth_bias(&self, command_buffer: CommandBufferHandle,
1344 depth_bias_constant_factor: f32, depth_bias_clamp: f32, depth_bias_slope_factor: f32) {
1345 self.proc_addr_loader().vk.vkCmdSetDepthBias(command_buffer.to_raw(),
1346 depth_bias_constant_factor, depth_bias_clamp, depth_bias_slope_factor);
1347 }
1348
1349 pub unsafe fn cmd_set_blend_constants(&self, command_buffer: CommandBufferHandle,
1356 blend_constants: [f32; 4]) {
1357 self.proc_addr_loader().vk.vkCmdSetBlendConstants(command_buffer.to_raw(),
1358 blend_constants.as_ptr());
1359 }
1360
1361 pub unsafe fn cmd_set_depth_bounds(&self, command_buffer: CommandBufferHandle,
1368 min_depth_bounds: f32, max_depth_bounds: f32) {
1369 self.proc_addr_loader().vk.vkCmdSetDepthBounds(command_buffer.to_raw(),
1370 min_depth_bounds, max_depth_bounds);
1371 }
1372
1373 pub unsafe fn cmd_set_stencil_compare_mask(&self, command_buffer: CommandBufferHandle,
1380 face_mask: StencilFaceFlags, compare_mask: u32) {
1381 self.proc_addr_loader().vk.vkCmdSetStencilCompareMask(command_buffer.to_raw(),
1382 face_mask.bits(), compare_mask);
1383 }
1384
1385 pub unsafe fn cmd_set_stencil_write_mask(&self, command_buffer: CommandBufferHandle,
1392 face_mask: StencilFaceFlags, write_mask: u32) {
1393 self.proc_addr_loader().vk.vkCmdSetStencilWriteMask(command_buffer.to_raw(),
1394 face_mask.bits(), write_mask);
1395 }
1396
1397 pub unsafe fn cmd_set_stencil_reference(&self, command_buffer: CommandBufferHandle,
1404 face_mask: StencilFaceFlags, reference: u32) {
1405 self.proc_addr_loader().vk.vkCmdSetStencilReference(command_buffer.to_raw(),
1406 face_mask.bits(), reference);
1407 }
1408
1409 pub unsafe fn cmd_bind_descriptor_sets(&self, command_buffer: CommandBufferHandle,
1419 pipeline_bind_point: PipelineBindPoint, layout: PipelineLayoutHandle,
1420 first_set: u32, descriptor_sets: &[DescriptorSetHandle],
1421 dynamic_offsets: &[u32]) {
1422 self.proc_addr_loader().vk.vkCmdBindDescriptorSets(command_buffer.to_raw(), pipeline_bind_point.into(),
1423 layout.handle().to_raw(), first_set, descriptor_sets.len() as u32,
1424 descriptor_sets.as_ptr() as *const vks::VkDescriptorSet,
1425 dynamic_offsets.len() as u32, dynamic_offsets.as_ptr());
1426 }
1427
1428 pub unsafe fn cmd_bind_index_buffer(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1435 offset: u64, index_type: IndexType) {
1436 self.proc_addr_loader().vk.vkCmdBindIndexBuffer(command_buffer.to_raw(),
1437 buffer.handle().to_raw(), offset, index_type.into());
1438 }
1439
1440 pub unsafe fn cmd_bind_vertex_buffers(&self, command_buffer: CommandBufferHandle, first_binding: u32,
1448 buffers: &[BufferHandle], offsets: &[u64]) {
1449 self.proc_addr_loader().vk.vkCmdBindVertexBuffers(command_buffer.to_raw(),
1450 first_binding, buffers.len() as u32, buffers.as_ptr() as *const vks::VkBuffer,
1451 offsets.as_ptr());
1452 }
1453
1454 pub unsafe fn cmd_draw(&self, command_buffer: CommandBufferHandle, vertex_count: u32, instance_count: u32,
1461 first_vertex: u32, first_instance: u32) {
1462 self.proc_addr_loader().vk.vkCmdDraw(command_buffer.to_raw(), vertex_count, instance_count,
1463 first_vertex, first_instance);
1464 }
1465
1466 pub unsafe fn cmd_draw_indexed(&self, command_buffer: CommandBufferHandle, index_count: u32,
1474 instance_count: u32, first_index: u32, vertex_offset: i32, first_instance: u32) {
1475 self.proc_addr_loader().vk.vkCmdDrawIndexed(command_buffer.to_raw(), index_count,
1476 instance_count, first_index, vertex_offset, first_instance);
1477 }
1478
1479 pub unsafe fn cmd_draw_indirect(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1486 offset: u64, draw_count: u32, stride: u32) {
1487 self.proc_addr_loader().vk.vkCmdDrawIndirect(command_buffer.to_raw(),
1488 buffer.handle().to_raw(), offset, draw_count, stride);
1489 }
1490
1491 pub unsafe fn cmd_draw_indexed_indirect(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1498 offset: u64, draw_count: u32, stride: u32) {
1499 self.proc_addr_loader().vk.vkCmdDrawIndexedIndirect(command_buffer.to_raw(),
1500 buffer.handle().to_raw(), offset, draw_count, stride);
1501 }
1502
1503 pub unsafe fn cmd_dispatch(&self, command_buffer: CommandBufferHandle, group_count_x: u32,
1510 group_count_y: u32, group_count_z: u32) {
1511 self.proc_addr_loader().vk.vkCmdDispatch(command_buffer.to_raw(), group_count_x,
1512 group_count_y, group_count_z);
1513 }
1514
1515 pub unsafe fn cmd_dispatch_indirect(&self, command_buffer: CommandBufferHandle, buffer: BufferHandle,
1522 offset: u64) {
1523 self.proc_addr_loader().vk.vkCmdDispatchIndirect(command_buffer.to_raw(),
1524 buffer.handle().to_raw(), offset);
1525 }
1526
1527 pub unsafe fn cmd_copy_buffer(&self, command_buffer: CommandBufferHandle, src_buffer: BufferHandle,
1535 dst_buffer: BufferHandle, regions: &[BufferCopy]) {
1536 self.proc_addr_loader().vk.vkCmdCopyBuffer(
1537 command_buffer.to_raw(),
1538 src_buffer.to_raw(),
1539 dst_buffer.to_raw(),
1540 regions.len() as u32,
1541 regions.as_ptr() as *const vks::VkBufferCopy,
1542 );
1543 }
1544
1545 pub unsafe fn cmd_copy_image(&self, command_buffer: CommandBufferHandle, src_image: ImageHandle,
1553 src_image_layout: ImageLayout, dst_image: ImageHandle, dst_image_layout: ImageLayout,
1554 regions: &[ImageCopy]) {
1555 self.proc_addr_loader().vk.vkCmdCopyImage(command_buffer.to_raw(),
1556 src_image.to_raw(), src_image_layout.into(), dst_image.to_raw(), dst_image_layout.into(),
1557 regions.len() as u32, regions.as_ptr() as *const vks::VkImageCopy);
1558 }
1559
1560 pub unsafe fn cmd_blit_image(&self, command_buffer: CommandBufferHandle, src_image: ImageHandle,
1569 src_image_layout: ImageLayout, dst_image: ImageHandle, dst_image_layout: ImageLayout,
1570 regions: &[ImageBlit], filter: Filter) {
1571 self.proc_addr_loader().vk.vkCmdBlitImage(command_buffer.to_raw(),
1572 src_image.to_raw(), src_image_layout.into(), dst_image.to_raw(),
1573 dst_image_layout.into(), regions.len() as u32,
1574 regions.as_ptr() as *const vks::VkImageBlit, filter.into());
1575 }
1576
1577 pub unsafe fn cmd_copy_buffer_to_image(&self, command_buffer: CommandBufferHandle,
1585 src_buffer: BufferHandle, dst_image: ImageHandle, dst_image_layout: ImageLayout,
1586 regions: &[BufferImageCopy]) {
1587 self.proc_addr_loader().vk.vkCmdCopyBufferToImage(
1588 command_buffer.to_raw(),
1589 src_buffer.to_raw(),
1590 dst_image.to_raw(),
1591 dst_image_layout as u32,
1592 regions.len() as u32,
1593 regions.as_ptr() as *const vks::VkBufferImageCopy,
1594 );
1595 }
1596
1597 pub unsafe fn cmd_copy_image_to_buffer(&self, command_buffer: CommandBufferHandle,
1605 src_image: ImageHandle, src_image_layout: ImageLayout, dst_buffer: BufferHandle,
1606 regions: &[BufferImageCopy]) {
1607 self.proc_addr_loader().vk.vkCmdCopyImageToBuffer(command_buffer.to_raw(),
1608 src_image.to_raw(), src_image_layout.into(), dst_buffer.to_raw(), regions.len() as u32,
1609 regions.as_ptr() as *const vks::VkBufferImageCopy);
1610 }
1611
1612 pub unsafe fn cmd_update_buffer(&self, command_buffer: CommandBufferHandle, dst_buffer: BufferHandle,
1620 dst_offset: u64, data: &[u8]) {
1621 self.proc_addr_loader().vk.vkCmdUpdateBuffer(command_buffer.to_raw(),
1622 dst_buffer.to_raw(), dst_offset, data.len() as u64, data.as_ptr() as *const _);
1623 }
1624
1625 pub unsafe fn cmd_fill_buffer(&self,command_buffer: CommandBufferHandle, dst_buffer: BufferHandle,
1632 dst_offset: u64, size: Option<DeviceSize>, data: u32) {
1633 self.proc_addr_loader().vk.vkCmdFillBuffer(command_buffer.to_raw(),
1634 dst_buffer.to_raw(), dst_offset, size.unwrap_or(0), data);
1635 }
1636
1637 pub unsafe fn cmd_clear_color_image(&self, command_buffer: CommandBufferHandle, image: ImageHandle,
1645 image_layout: ImageLayout, color: &ClearColorValue, ranges: &[ImageSubresourceRange]) {
1646 self.proc_addr_loader().vk.vkCmdClearColorImage(command_buffer.to_raw(),
1647 image.to_raw(), image_layout.into(), color, ranges.len() as u32,
1648 ranges.as_ptr() as *const vks::VkImageSubresourceRange);
1649 }
1650
1651 pub unsafe fn cmd_clear_depth_stencil_image(&self, command_buffer: CommandBufferHandle,
1660 image: ImageHandle, image_layout: ImageLayout, depth_stencil: &ClearDepthStencilValue,
1661 ranges: &[ImageSubresourceRange]) {
1662 self.proc_addr_loader().vk.vkCmdClearDepthStencilImage(command_buffer.to_raw(),
1663 image.to_raw(), image_layout.into(), depth_stencil.as_raw(), ranges.len() as u32,
1664 ranges.as_ptr() as *const vks::VkImageSubresourceRange);
1665 }
1666
1667 pub unsafe fn cmd_clear_attachments(&self, command_buffer: CommandBufferHandle,
1675 attachments: &[ClearAttachment], rects: &[ClearRect]) {
1676 self.proc_addr_loader().vk.vkCmdClearAttachments(command_buffer.to_raw(),
1677 attachments.len() as u32, attachments.as_ptr() as *const vks::VkClearAttachment,
1678 rects.len() as u32, rects.as_ptr() as *const vks::VkClearRect);
1679 }
1680
1681 pub unsafe fn cmd_resolve_image(&self, command_buffer: CommandBufferHandle,
1689 src_image: ImageHandle, src_image_layout: ImageLayout, dst_image: ImageHandle,
1690 dst_image_layout: ImageLayout, regions: &[ImageResolve]) {
1691 self.proc_addr_loader().vk.vkCmdResolveImage(command_buffer.to_raw(),
1692 src_image.to_raw(), src_image_layout.into(), dst_image.to_raw(),
1693 dst_image_layout.into(), regions.len() as u32,
1694 regions.as_ptr() as *const vks::VkImageResolve);
1695 }
1696
1697 pub unsafe fn cmd_set_event(&self, command_buffer: CommandBufferHandle, event: EventHandle,
1704 stage_mask: PipelineStageFlags) {
1705 self.proc_addr_loader().vk.vkCmdSetEvent(command_buffer.to_raw(),
1706 event.to_raw(), stage_mask.bits());
1707 }
1708
1709 pub unsafe fn cmd_reset_event(&self, command_buffer: CommandBufferHandle, event: EventHandle,
1716 stage_mask: PipelineStageFlags) {
1717 self.proc_addr_loader().vk.vkCmdResetEvent(command_buffer.to_raw(),
1718 event.to_raw(), stage_mask.bits());
1719 }
1720
1721 pub unsafe fn cmd_wait_events(&self, command_buffer: CommandBufferHandle,
1733 events: &[EventHandle],
1734 src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
1735 memory_barriers: &[MemoryBarrier],
1736 buffer_memory_barriers: &[BufferMemoryBarrier],
1737 image_memory_barriers: &[ImageMemoryBarrier]) {
1738 self.proc_addr_loader().vk.vkCmdWaitEvents(command_buffer.to_raw(),
1739 events.len() as u32, events.as_ptr() as *const vks::VkEvent,
1740 src_stage_mask.bits(), dst_stage_mask.bits(),
1741 memory_barriers.len() as u32, memory_barriers.as_ptr() as *const vks::VkMemoryBarrier,
1742 buffer_memory_barriers.len() as u32,
1743 buffer_memory_barriers.as_ptr() as *const vks::VkBufferMemoryBarrier,
1744 image_memory_barriers.len() as u32,
1745 image_memory_barriers.as_ptr() as *const vks::VkImageMemoryBarrier,
1746 );
1747 }
1748
1749 pub unsafe fn cmd_pipeline_barrier(&self, command_buffer: CommandBufferHandle,
1761 src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags,
1762 dependency_flags: DependencyFlags, memory_barriers: &[MemoryBarrier],
1763 buffer_memory_barriers: &[BufferMemoryBarrier],
1764 image_memory_barriers: &[ImageMemoryBarrier]) {
1765 self.proc_addr_loader().vk.vkCmdPipelineBarrier(command_buffer.to_raw(),
1766 src_stage_mask.bits(), dst_stage_mask.bits(), dependency_flags.bits(),
1767 memory_barriers.len() as u32, memory_barriers.as_ptr() as *const vks::VkMemoryBarrier,
1768 buffer_memory_barriers.len() as u32,
1769 buffer_memory_barriers.as_ptr() as *const vks::VkBufferMemoryBarrier,
1770 image_memory_barriers.len() as u32,
1771 image_memory_barriers.as_ptr() as *const vks::VkImageMemoryBarrier,
1772 );
1773 }
1774
1775 pub unsafe fn cmd_begin_query(&self, command_buffer: CommandBufferHandle,
1782 query_pool: QueryPoolHandle, query: u32, flags: QueryControlFlags) {
1783 self.proc_addr_loader().vk.vkCmdBeginQuery(command_buffer.to_raw(),
1784 query_pool.to_raw(), query, flags.bits());
1785 }
1786
1787 pub unsafe fn cmd_end_query(&self, command_buffer: CommandBufferHandle,
1794 query_pool: QueryPoolHandle, query: u32) {
1795 self.proc_addr_loader().vk.vkCmdEndQuery(command_buffer.to_raw(),
1796 query_pool.to_raw(), query);
1797 }
1798
1799 pub unsafe fn cmd_reset_query_pool(&self, command_buffer: CommandBufferHandle,
1806 query_pool: QueryPoolHandle, first_query: u32, query_count: u32) {
1807 self.proc_addr_loader().vk.vkCmdResetQueryPool(command_buffer.to_raw(),
1808 query_pool.to_raw(), first_query, query_count);
1809 }
1810
1811 pub unsafe fn cmd_write_timestamp(&self, command_buffer: CommandBufferHandle,
1819 pipeline_stage: PipelineStageFlags, query_pool: QueryPoolHandle, query: u32) {
1820 self.proc_addr_loader().vk.vkCmdWriteTimestamp(command_buffer.to_raw(),
1821 pipeline_stage.bits(), query_pool.to_raw(), query);
1822 }
1823
1824 pub unsafe fn cmd_copy_query_pool_results(&self, command_buffer: CommandBufferHandle,
1833 query_pool: QueryPoolHandle, first_query: u32, query_count: u32,
1834 dst_buffer: BufferHandle, dst_offset: u64, stride: u64, flags: QueryResultFlags) {
1835 self.proc_addr_loader().vk.vkCmdCopyQueryPoolResults(command_buffer.to_raw(),
1836 query_pool.to_raw(), first_query, query_count, dst_buffer.to_raw(), dst_offset, stride,
1837 flags.bits());
1838 }
1839
1840 pub unsafe fn cmd_push_constants(&self, command_buffer: CommandBufferHandle,
1848 layout: PipelineLayoutHandle, stage_flags: ShaderStageFlags, offset: u32,
1849 values: &[u8]) {
1850 self.proc_addr_loader().vk.vkCmdPushConstants(command_buffer.to_raw(),
1851 layout.to_raw(),
1852 stage_flags.bits(), offset, values.len() as u32, values.as_ptr() as *const c_void);
1853 }
1854
1855 pub unsafe fn cmd_begin_render_pass(&self, command_buffer: CommandBufferHandle,
1862 render_pass_begin: &RenderPassBeginInfo, contents: SubpassContents) {
1863 self.proc_addr_loader().vk.vkCmdBeginRenderPass(command_buffer.to_raw(),
1864 render_pass_begin.as_raw(), contents.into());
1865 }
1866
1867 pub unsafe fn cmd_next_subpass(&self, command_buffer: CommandBufferHandle,
1874 contents: SubpassContents) {
1875 self.proc_addr_loader().vk.vkCmdNextSubpass(command_buffer.to_raw(), contents.into());
1876 }
1877
1878 pub unsafe fn cmd_end_render_pass(&self, command_buffer: CommandBufferHandle, ) {
1884 self.proc_addr_loader().vk.vkCmdEndRenderPass(command_buffer.to_raw());
1885 }
1886
1887 pub unsafe fn cmd_execute_commands(&self, command_buffer: CommandBufferHandle,
1894 command_buffers: &[CommandBufferHandle]) {
1895 self.proc_addr_loader().vk.vkCmdExecuteCommands(command_buffer.to_raw(),
1896 command_buffers.len() as u32, command_buffers.as_ptr() as *const vks::VkCommandBuffer);
1897 }
1898
1899 pub unsafe fn create_swapchain_khr(&self, create_info: &SwapchainCreateInfoKhr,
1907 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<SwapchainKhrHandle> {
1908 let allocator = allocator.unwrap_or(ptr::null());
1909 let mut handle = 0;
1910 let result = self.proc_addr_loader().khr_swapchain.vkCreateSwapchainKHR(self.handle().to_raw(),
1911 create_info.as_raw(), allocator, &mut handle);
1912 error::check(result, "vkCreateSwapchainKHR", SwapchainKhrHandle(handle))
1913 }
1914
1915 pub unsafe fn destroy_swapchain_khr(&mut self, swapchain: SwapchainKhrHandle,
1922 allocator: Option<*const vks::VkAllocationCallbacks>) {
1923 let _allocator = allocator.unwrap_or(ptr::null());
1924 self.proc_addr_loader().khr_swapchain.vkDestroySwapchainKHR(self.handle().to_raw(),
1925 swapchain.to_raw(), ptr::null());
1926 }
1927
1928 pub unsafe fn get_swapchain_images_khr(&self, swapchain: SwapchainKhrHandle)
1935 -> VdResult<SmallVec<[ImageHandle; 4]>> {
1936 let mut image_count = 0;
1937 let mut image_handles = SmallVec::<[ImageHandle; 4]>::new();
1938 let result = self.proc_addr_loader().khr_swapchain.vkGetSwapchainImagesKHR(self.handle().to_raw(),
1939 swapchain.to_raw(), &mut image_count, ptr::null_mut());
1940 error::check(result, "vkGetSwapchainImagesKHR", ())?;
1941 image_handles.reserve_exact(image_count as usize);
1942 image_handles.set_len(image_count as usize);
1943 loop {
1944 let result = self.proc_addr_loader().khr_swapchain.vkGetSwapchainImagesKHR(self.handle().to_raw(),
1945 swapchain.to_raw(), &mut image_count, image_handles.as_mut_ptr() as *mut vks::VkImage);
1946 if result != CallResult::Incomplete as i32 {
1947 return error::check(result, "vkGetSwapchainImagesKHR", image_handles);
1948 }
1949 }
1950 }
1951
1952 pub unsafe fn acquire_next_image_khr(&self, swapchain: SwapchainKhrHandle, timeout: u64,
1960 semaphore: Option<SemaphoreHandle>, fence: Option<FenceHandle>) -> VdResult<u32> {
1961 let mut image_index = 0;
1962 let result = self.proc_addr_loader().khr_swapchain.vkAcquireNextImageKHR(
1963 self.handle().to_raw(), swapchain.to_raw(), timeout,
1964 semaphore.map(|s| s.to_raw()).unwrap_or(0),
1965 fence.map(|f| f.to_raw()).unwrap_or(0), &mut image_index);
1966 error::check(result, "vkAcquireNextImageKHR", image_index)
1967 }
1968
1969 pub unsafe fn queue_present_khr<Q>(&self, queue: Q, present_info: &PresentInfoKhr)
1975 -> VdResult<()>
1976 where Q: Handle<Target=QueueHandle> {
1977 let result = self.proc_addr_loader().khr_swapchain.vkQueuePresentKHR(
1978 queue.handle().to_raw(), present_info.as_raw());
1979 error::check(result, "vkQueuePresentKHR", ())
1980 }
1981
1982 pub unsafe fn create_shared_swapchains_khr(&self, create_infos: &[SwapchainCreateInfoKhr],
1990 allocator: Option<*const vks::VkAllocationCallbacks>)
1991 -> VdResult<SmallVec<[SwapchainKhrHandle; 4]>> {
1992 let allocator = allocator.unwrap_or(ptr::null());
1993 let mut swapchains = SmallVec::<[SwapchainKhrHandle; 4]>::new();
1994 swapchains.reserve_exact(create_infos.len());
1995 swapchains.set_len(create_infos.len());
1996 let result = self.proc_addr_loader().khr_display_swapchain.vkCreateSharedSwapchainsKHR(self.handle().to_raw(),
1997 create_infos.len() as u32, create_infos as *const _ as *const vks::VkSwapchainCreateInfoKHR,
1998 allocator, swapchains.as_mut_ptr() as *mut vks::VkSwapchainKHR);
1999 error::check(result, "vkCreateSharedSwapchainsKHR", swapchains)
2000 }
2001
2002 pub unsafe fn trim_command_pool_khr<P>(&self, _command_pool: P, _flags: CommandPoolTrimFlagsKhr)
2009 -> VdResult<()>
2010 where P: Handle<Target=CommandPoolHandle> {
2011 unimplemented!();
2014 }
2015
2016 pub unsafe fn get_memory_win32_handle_khr(&self,
2023 _get_win32_handle_info: &MemoryGetWin32HandleInfoKhr)
2024 -> VdResult<()> {
2025 unimplemented!();
2028 }
2029
2030 pub unsafe fn get_memory_win32_handle_properties_khr(&self,
2038 _handle_type: ExternalMemoryHandleTypeFlagsKhr, _handle: HANDLE) -> VdResult<()> {
2039 unimplemented!();
2042 }
2043
2044 pub unsafe fn get_memory_fd_khr(&self, _get_fd_info: &MemoryGetFdInfoKhr, _fd: &mut i32)
2051 -> VdResult<()> {
2052 unimplemented!();
2055 }
2056
2057 pub unsafe fn get_memory_fd_properties_khr(&self, _handle_type: ExternalMemoryHandleTypeFlagsKhr,
2065 _fd: i32) -> VdResult<()> {
2066 unimplemented!();
2069 }
2070
2071 pub unsafe fn import_semaphore_win32_handle_khr(&self,
2078 _import_semaphore_win32_handle_info: &ImportSemaphoreWin32HandleInfoKhr) -> VdResult<()> {
2079 unimplemented!();
2082 }
2083
2084 pub unsafe fn get_semaphore_win32_handle_khr(&self,
2092 _get_win32_handle_info: &SemaphoreGetWin32HandleInfoKhr) -> VdResult<()> {
2093 unimplemented!();
2096 }
2097
2098 pub unsafe fn import_semaphore_fd_khr(&self,
2105 _import_semaphore_fd_info: &ImportSemaphoreFdInfoKhr) -> VdResult<()> {
2106 unimplemented!();
2109 }
2110
2111 pub unsafe fn get_semaphore_fd_khr(&self, _get_fd_info: &SemaphoreGetFdInfoKhr)
2118 -> VdResult<()> {
2119 unimplemented!();
2122 }
2123
2124 pub unsafe fn cmd_push_descriptor_set_khr<Cb>(&self, _command_buffer: Cb,
2133 _pipeline_bind_point: PipelineBindPoint, _layout: PipelineLayout, _set: u32,
2134 _descriptor_writes: &[WriteDescriptorSet]) -> VdResult<()>
2135 where Cb: Handle<Target=CommandBufferHandle> {
2136 unimplemented!();
2139 }
2140
2141 #[cfg(feature = "unimplemented")]
2150 pub unsafe fn create_descriptor_update_template_khr(&self,
2151 create_info: &DescriptorUpdateTemplateKhrCreateInfo,
2152 allocator: Option<*const vks::VkAllocationCallbacks>)
2153 -> VdResult<DescriptorUpdateTemplateKhrHandle> {
2154 let allocator = allocator.unwrap_or(ptr::null());
2155 let mut handle = 0;
2156 let result = self.proc_addr_loader().vk.vkCreateDescriptorUpdateTemplateKhr(self.handle().to_raw(),
2157 create_info.as_raw(), allocator, &mut handle);
2158 error::check(result, "vkCreateDescriptorUpdateTemplateKhr",
2159 DescriptorUpdateTemplateKhrHandle(handle))
2160 }
2161
2162 #[cfg(feature = "unimplemented")]
2170 pub unsafe fn destroy_descriptor_update_template_khr(&self,
2171 descriptor_update_template_khr: DescriptorUpdateTemplateKhrHandle,
2172 allocator: Option<*const vks::VkAllocationCallbacks>) {
2173 let allocator = allocator.unwrap_or(ptr::null());
2174 self.proc_addr_loader().vk.vkDestroyDescriptorUpdateTemplateKhr(self.handle().to_raw(),
2175 descriptor_update_template_khr.to_raw(), allocator);
2176 }
2177
2178 #[cfg(feature = "unimplemented")]
2186 pub unsafe fn update_descriptor_set_with_template_khr<Ds>(&self, descriptor_set: Ds,
2187 descriptor_update_template: DescriptorUpdateTemplateKhrHandle, data: *const c_void)
2188 where Ds: Handle<Target=DescriptorSetHandle> {
2189 unimplemented!();
2192 }
2193
2194 #[cfg(feature = "unimplemented")]
2202 pub unsafe fn cmd_push_descriptor_set_with_template_khr<Cb, Pl>(&self, command_buffer: Cb,
2203 descriptor_update_template: DescriptorUpdateTemplateKhr, layout: Pl, set: u32,
2204 data: *const c_void) -> VdResult<()>
2205 where Cb: Handle<Target=CommandBufferHandle>, Pl: Handle<Target=PipelineLayoutHandle> {
2206 unimplemented!();
2209 }
2210
2211 pub unsafe fn get_swapchain_status_khr<Sk>(&self, _swapchain: Sk) -> VdResult<()>
2218 where Sk: Handle<Target=SwapchainKhrHandle> {
2219 unimplemented!();
2222 }
2223
2224 pub unsafe fn import_fence_win32_handle_khr(&self,
2231 _import_fence_win32_handle_info: &ImportFenceWin32HandleInfoKhr) -> VdResult<()> {
2232 unimplemented!();
2235 }
2236
2237 pub unsafe fn get_fence_win32_handle_khr(&self,
2244 _get_win32_handle_info: &FenceGetWin32HandleInfoKhr) -> VdResult<()> {
2245 unimplemented!();
2248 }
2249
2250 pub unsafe fn import_fence_fd_khr(&self, _import_fence_fd_info: &ImportFenceFdInfoKhr)
2257 -> VdResult<()> {
2258 unimplemented!();
2261 }
2262
2263 pub unsafe fn get_fence_fd_khr(&self, _get_fd_info: &FenceGetFdInfoKhr) -> VdResult<()> {
2270 unimplemented!();
2273 }
2274
2275 pub unsafe fn get_image_memory_requirements_2_khr(&self,
2283 _info: &ImageMemoryRequirementsInfo2Khr) -> VdResult<()> {
2284 unimplemented!();
2285 }
2286
2287 pub fn get_buffer_memory_requirements_2_khr(&self, _info: &BufferMemoryRequirementsInfo2Khr)
2295 -> VdResult<()> {
2296 unimplemented!();
2297 }
2298
2299 pub unsafe fn get_image_sparse_memory_requirements_2_khr(&self,
2308 _info: &ImageSparseMemoryRequirementsInfo2Khr) -> VdResult<()> {
2309 unimplemented!();
2310 }
2311
2312 #[cfg(feature = "unimplemented")]
2321 pub unsafe fn create_sampler_ycbcr_conversion_khr(&self,
2322 create_info: &SamplerYcbcrConversionKhrCreateInfo,
2323 allocator: Option<*const vks::VkAllocationCallbacks>)
2324 -> VdResult<SamplerYcbcrConversionKhrHandle> {
2325 let allocator = allocator.unwrap_or(ptr::null());
2326 let mut handle = 0;
2327 let result = self.proc_addr_loader().vk.vkCreateSamplerYcbcrConversionKhr(
2328 self.handle().to_raw(), create_info.as_raw(), allocator, &mut handle);
2329 error::check(result, "vkCreateSamplerYcbcrConversionKhr",
2330 SamplerYcbcrConversionKhrHandle(handle))
2331 }
2332
2333 #[cfg(feature = "unimplemented")]
2341 pub unsafe fn destroy_sampler_ycbcr_conversion_khr(&self,
2342 sampler_ycbcr_conversion_khr: SamplerYcbcrConversionKhrHandle,
2343 allocator: Option<*const vks::VkAllocationCallbacks>) {
2344 let allocator = allocator.unwrap_or(ptr::null());
2345 self.proc_addr_loader().vk.vkDestroySamplerYcbcrConversionKhr(self.handle().to_raw(),
2346 sampler_ycbcr_conversion_khr.to_raw(), allocator);
2347 }
2348
2349 pub unsafe fn bind_buffer_memory_2_khr(&self) {
2356 unimplemented!();
2357 }
2358
2359 pub unsafe fn bind_image_memory_2_khr(&self) {
2366 unimplemented!();
2367 }
2368
2369 pub unsafe fn debug_marker_set_object_tag_ext(&self, _tag_info: &DebugMarkerObjectTagInfoExt)
2376 -> VdResult<()> {
2377 unimplemented!();
2378 }
2379
2380 pub unsafe fn debug_marker_set_object_name_ext(&self, _name_info: &DebugMarkerObjectNameInfoExt)
2387 -> VdResult<()> {
2388 unimplemented!();
2389 }
2390
2391 pub unsafe fn cmd_debug_marker_begin_ext(&self, command_buffer: CommandBufferHandle,
2398 marker_info: &DebugMarkerMarkerInfoExt) {
2399 self.proc_addr_loader().ext_debug_marker.vkCmdDebugMarkerBeginEXT(command_buffer.to_raw(),
2400 marker_info.as_raw());
2401 }
2402
2403 pub unsafe fn cmd_debug_marker_end_ext(&self, command_buffer: CommandBufferHandle) {
2409 self.proc_addr_loader().ext_debug_marker.vkCmdDebugMarkerEndEXT(command_buffer.to_raw());
2410 }
2411
2412 pub unsafe fn cmd_debug_marker_insert_ext(&self, command_buffer: CommandBufferHandle,
2419 marker_info: &DebugMarkerMarkerInfoExt) {
2420 self.proc_addr_loader().ext_debug_marker.vkCmdDebugMarkerInsertEXT(command_buffer.to_raw(),
2421 marker_info.as_raw());
2422 }
2423
2424 pub unsafe fn cmd_draw_indirect_count_amd(&self) {
2432 unimplemented!();
2433 }
2434
2435 pub unsafe fn cmd_draw_indexed_indirect_count_amd(&self) {
2444 unimplemented!();
2445 }
2446
2447 pub unsafe fn get_memory_win32_handle_nv(&self) {
2454 unimplemented!();
2455 }
2456
2457 pub unsafe fn get_device_group_peer_memory_features_khx(&self) {
2465 unimplemented!();
2466 }
2467
2468 pub unsafe fn cmd_set_device_mask_khx(&self) {
2475 unimplemented!();
2476 }
2477
2478 pub unsafe fn cmd_dispatch_base_khx(&self) {
2486 unimplemented!();
2487 }
2488
2489 pub unsafe fn get_device_group_present_capabilities_khx(&self) {
2496 unimplemented!();
2497 }
2498
2499 pub unsafe fn get_device_group_surface_present_modes_khx(&self) {
2506 unimplemented!();
2507 }
2508
2509 pub unsafe fn acquire_next_image2_khx(&self) {
2516 unimplemented!();
2517 }
2518
2519 pub unsafe fn cmd_process_commands_nvx(&self) {
2526 unimplemented!();
2527 }
2528
2529 pub unsafe fn cmd_reserve_space_for_commands_nvx(&self) {
2536 unimplemented!();
2537 }
2538
2539 #[cfg(feature = "unimplemented")]
2548 pub unsafe fn create_indirect_commands_layout_nvx(&self,
2549 create_info: &IndirectCommandsLayoutNvxCreateInfo,
2550 allocator: Option<*const vks::VkAllocationCallbacks>)
2551 -> VdResult<IndirectCommandsLayoutNvxHandle> {
2552 let allocator = allocator.unwrap_or(ptr::null());
2553 let mut handle = 0;
2554 let result = self.proc_addr_loader().vk.vkCreateIndirectCommandsLayoutNvx(
2555 self.handle().to_raw(), create_info.as_raw(), allocator, &mut handle);
2556 error::check(result, "vkCreateIndirectCommandsLayoutNvx",
2557 IndirectCommandsLayoutNvxHandle(handle))
2558 }
2559
2560 #[cfg(feature = "unimplemented")]
2568 pub unsafe fn destroy_indirect_commands_layout_nvx(&self,
2569 indirect_commands_layout_nvx: IndirectCommandsLayoutNvxHandle,
2570 allocator: Option<*const vks::VkAllocationCallbacks>) {
2571 let allocator = allocator.unwrap_or(ptr::null());
2572 self.proc_addr_loader().vk.vkDestroyIndirectCommandsLayoutNvx(self.handle().to_raw(),
2573 indirect_commands_layout_nvx.to_raw(), allocator);
2574 }
2575
2576 #[cfg(feature = "unimplemented")]
2584 pub unsafe fn create_object_table_nvx(&self, create_info: &ObjectTableNvxCreateInfo,
2585 allocator: Option<*const vks::VkAllocationCallbacks>)
2586 -> VdResult<ObjectTableNvxHandle> {
2587 let allocator = allocator.unwrap_or(ptr::null());
2588 let mut handle = 0;
2589 let result = self.proc_addr_loader().vk.vkCreateObjectTableNvx(self.handle().to_raw(),
2590 create_info.as_raw(), allocator, &mut handle);
2591 error::check(result, "vkCreateObjectTableNvx", ObjectTableNvxHandle(handle))
2592 }
2593
2594 #[cfg(feature = "unimplemented")]
2601 pub unsafe fn destroy_object_table_nvx(&self, object_table_nvx: ObjectTableNvxHandle,
2602 allocator: Option<*const vks::VkAllocationCallbacks>) {
2603 let allocator = allocator.unwrap_or(ptr::null());
2604 self.proc_addr_loader().vk.vkDestroyObjectTableNvx(self.handle().to_raw(),
2605 object_table_nvx.to_raw(), allocator);
2606 }
2607
2608 pub unsafe fn register_objects_nvx(&self) {
2616 unimplemented!();
2617 }
2618
2619 pub unsafe fn unregister_objects_nvx(&self) {
2627 unimplemented!();
2628 }
2629
2630 pub unsafe fn cmd_set_viewport_w_scaling_nv(&self) {
2638 unimplemented!();
2639 }
2640
2641 pub unsafe fn display_power_control_ext<Dk>(&self, _display: Dk,
2648 _display_power_info: &DisplayPowerInfoExt)
2649 where Dk: Handle<Target=DisplayKhrHandle> {
2650 unimplemented!();
2651 }
2652
2653 pub unsafe fn register_device_event_ext(&self, _device_event_info: &DeviceEventInfoExt,
2661 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<()> {
2662 let _allocator = allocator.unwrap_or(ptr::null());
2663 unimplemented!();
2664 }
2665
2666 pub unsafe fn register_display_event_ext<Dk>(&self, _display: Dk,
2674 _display_event_info: &DisplayEventInfoExt,
2675 allocator: Option<*const vks::VkAllocationCallbacks>) -> VdResult<()>
2676 where Dk: Handle<Target=DisplayKhrHandle> {
2677 let _allocator = allocator.unwrap_or(ptr::null());
2678 unimplemented!();
2679 }
2680
2681 pub unsafe fn get_swapchain_counter_ext<Sk>(&self, _swapchain: Sk,
2689 _counter: SurfaceCounterFlagsExt) -> VdResult<u64>
2690 where Sk: Handle<Target=SwapchainKhrHandle> {
2691 unimplemented!();
2692 }
2693
2694 pub unsafe fn get_refresh_cycle_duration_google(&self) {
2701 unimplemented!();
2702 }
2703
2704 pub unsafe fn get_past_presentation_timing_google(&self) {
2712 unimplemented!();
2713 }
2714
2715 pub unsafe fn cmd_set_discard_rectangle_ext<Cb>(&self, _command_buffer: Cb,
2723 _first_discard_rectangle: u32, _discard_rectangle_count: u32, _discard_rectangles: &Rect2d)
2724 -> VdResult<()>
2725 where Cb: Handle<Target=CommandBufferHandle> {
2726 unimplemented!();
2727 }
2728
2729 pub unsafe fn set_hdr_metadata_ext(&self, _swapchains: &[SwapchainKhrHandle],
2736 _metadata: &HdrMetadataExt) -> VdResult<()> {
2737 unimplemented!();
2738 }
2739
2740 #[cfg(feature = "unimplemented")]
2747 pub unsafe fn cmd_set_sample_locations_ext<Cb>(&self, command_buffer: Cb,
2748 sample_locations_info: &SampleLocationsInfoExt) -> VdResult<()>
2749 where Cb: Handle<Target=CommandBufferHandle> {
2750 unimplemented!();
2751 }
2752
2753 #[cfg(feature = "unimplemented")]
2762 pub unsafe fn create_validation_cache_ext(&self,
2763 create_info: &ValidationCacheExtCreateInfo,
2764 allocator: Option<*const vks::VkAllocationCallbacks>)
2765 -> VdResult<ValidationCacheExtHandle> {
2766 let allocator = allocator.unwrap_or(ptr::null());
2767 let mut handle = 0;
2768 let result = self.proc_addr_loader().vk.vkCreateValidationCacheExt(self.handle().to_raw(),
2769 create_info.as_raw(), allocator, &mut handle);
2770 error::check(result, "vkCreateValidationCacheExt", ValidationCacheExtHandle(handle))
2772 }
2773
2774 #[cfg(feature = "unimplemented")]
2781 pub unsafe fn destroy_validation_cache_ext(&self,
2782 validation_cache_ext: ValidationCacheExtHandle,
2783 allocator: Option<*const vks::VkAllocationCallbacks>) {
2784 let allocator = allocator.unwrap_or(ptr::null());
2785 self.proc_addr_loader().vk.vkDestroyValidationCacheExt(self.handle().to_raw(),
2786 validation_cache_ext.to_raw(), allocator);
2787 }
2788
2789 #[cfg(feature = "unimplemented")]
2797 pub unsafe fn merge_validation_caches_ext(&self, dst_cache: ValidationCacheExt,
2798 src_caches: &[ValidationCacheExt]) -> VdResult<()> {
2799 unimplemented!();
2800 }
2801
2802 #[cfg(feature = "unimplemented")]
2809 pub unsafe fn get_validation_cache_data_ext(&self, validation_cache: ValidationCacheExt,
2810 data_size: *mut usize, data: *mut c_void) -> VdResult<()> {
2811 unimplemented!();
2812 }
2813}
2814
2815unsafe impl<'h> Handle for &'h Device {
2816 type Target = DeviceHandle;
2817
2818 #[inline(always)]
2819 fn handle(&self) -> Self::Target {
2820 self.inner.handle
2821 }
2822}
2823
2824unsafe impl Send for Device {}
2825unsafe impl Sync for Device {}
2826
2827
2828#[derive(Debug, Clone)]
2830pub struct DeviceBuilder<'db> {
2831 create_info: ::DeviceCreateInfo<'db>,
2832 enabled_layer_names: Option<CharStrs<'db>>,
2833 enabled_extension_names: Option<CharStrs<'db>>,
2834 _p: PhantomData<&'db ()>,
2835}
2836
2837impl<'db> DeviceBuilder<'db> {
2838 pub fn new() -> DeviceBuilder<'db> {
2840 DeviceBuilder {
2841 create_info: ::DeviceCreateInfo::default(),
2842 enabled_layer_names: None,
2843 enabled_extension_names: None,
2844 _p: PhantomData,
2845 }
2846 }
2847
2848 pub fn queue_create_infos<'s, 'ci>(&'s mut self,
2852 queue_create_infos: &'ci [DeviceQueueCreateInfo])
2853 -> &'s mut DeviceBuilder<'db>
2854 where 'ci: 'db {
2855 debug_assert_eq!(mem::align_of::<DeviceQueueCreateInfo>(),
2856 mem::align_of::<vks::VkDeviceQueueCreateInfo>());
2857 debug_assert_eq!(mem::size_of::<DeviceQueueCreateInfo>(),
2858 mem::size_of::<vks::VkDeviceQueueCreateInfo>());
2859 self.create_info.set_queue_create_infos(queue_create_infos);
2860 self
2861 }
2862
2863 #[deprecated(note = "ignored by Vulkan API")]
2867 pub fn enabled_layer_names<'s, 'cs, Cs>(&'s mut self, enabled_layer_names: Cs)
2868 -> &'s mut DeviceBuilder<'db>
2869 where 'cs: 'db, Cs: 'cs + Into<CharStrs<'cs>> {
2870 self.enabled_layer_names = Some(enabled_layer_names.into());
2871 if let Some(ref elns) = self.enabled_layer_names {
2872 self.create_info.set_enabled_layer_names(elns.as_ptr_slice());
2873 }
2874 self
2875 }
2876
2877 pub fn enabled_extension_names<'s, 'cs, Cs>(&'s mut self, enabled_extension_names: Cs)
2880 -> &'s mut DeviceBuilder<'db>
2881 where 'cs: 'db, Cs: 'cs + Into<CharStrs<'cs>> {
2882 self.enabled_extension_names = Some(enabled_extension_names.into());
2883 if let Some(ref eens) = self.enabled_extension_names {
2884 self.create_info.set_enabled_extension_names(eens.as_ptr_slice());
2885 }
2886 self
2887 }
2888
2889 pub fn enabled_features<'s, 'f>(&'s mut self, enabled_features: &'f PhysicalDeviceFeatures)
2892 -> &'s mut DeviceBuilder<'db>
2893 where 'f: 'db {
2894 self.create_info.set_enabled_features(enabled_features);
2895 self
2896 }
2897
2898 pub fn build(&self, physical_device: PhysicalDevice) -> VdResult<Device> {
2900 let handle = unsafe {
2901 physical_device.instance().create_device(physical_device.handle(), &self.create_info, None)?
2902 };
2903
2904 let mut loader = vks::DeviceProcAddrLoader::from_get_device_proc_addr(
2905 physical_device.instance().proc_addr_loader().vk.pfn_vkGetDeviceProcAddr);
2906
2907 unsafe {
2908 loader.load_vk(handle.to_raw());
2909 }
2910
2911 unsafe {
2912 if let Some(extension_name_char_strs) = self.enabled_extension_names.as_ref() {
2913 let extension_names = extension_name_char_strs.as_ptr_slice();
2914 for &extension_name in extension_names {
2915 match CStr::from_ptr(extension_name).to_str().expect("invalid extension name") {
2916 #[cfg(feature = "unimplemented")]
2917 "VK_KHR_16bit_storage" => loader.load_khr_16bit_storage(handle.to_raw()),
2918 #[cfg(feature = "unimplemented")]
2919 "VK_KHR_android_surface" => loader.load_khr_android_surface(handle.to_raw()),
2920 #[cfg(feature = "unimplemented")]
2921 "VK_KHR_bind_memory2" => loader.load_khr_bind_memory2(handle.to_raw()),
2922 #[cfg(feature = "unimplemented")]
2923 "VK_KHR_dedicated_allocation" => loader.load_khr_dedicated_allocation(handle.to_raw()),
2924 #[cfg(feature = "unimplemented")]
2925 "VK_KHR_descriptor_update_template" => loader.load_khr_descriptor_update_template(handle.to_raw()),
2926 #[cfg(feature = "unimplemented")]
2927 "VK_KHR_display" => loader.load_khr_display(handle.to_raw()),
2928 "VK_KHR_display_swapchain" => loader.load_khr_display_swapchain(handle.to_raw()),
2929 #[cfg(feature = "unimplemented")]
2930 "VK_KHR_external_fence" => loader.load_khr_external_fence(handle.to_raw()),
2931 #[cfg(feature = "unimplemented")]
2932 "VK_KHR_external_fence_capabilities" => loader.load_khr_external_fence_capabilities(handle.to_raw()),
2933 "VK_KHR_external_fence_fd" => loader.load_khr_external_fence_fd(handle.to_raw()),
2934 "VK_KHR_external_fence_win32" => loader.load_khr_external_fence_win32(handle.to_raw()),
2935 #[cfg(feature = "unimplemented")]
2936 "VK_KHR_external_memory" => loader.load_khr_external_memory(handle.to_raw()),
2937 #[cfg(feature = "unimplemented")]
2938 "VK_KHR_external_memory_capabilities" => loader.load_khr_external_memory_capabilities(handle.to_raw()),
2939 "VK_KHR_external_memory_fd" => loader.load_khr_external_memory_fd(handle.to_raw()),
2940 "VK_KHR_external_memory_win32" => loader.load_khr_external_memory_win32(handle.to_raw()),
2941 #[cfg(feature = "unimplemented")]
2942 "VK_KHR_external_semaphore" => loader.load_khr_external_semaphore(handle.to_raw()),
2943 #[cfg(feature = "unimplemented")]
2944 "VK_KHR_external_semaphore_capabilities" => loader.load_khr_external_semaphore_capabilities(handle.to_raw()),
2945 "VK_KHR_external_semaphore_fd" => loader.load_khr_external_semaphore_fd(handle.to_raw()),
2946 "VK_KHR_external_semaphore_win32" => loader.load_khr_external_semaphore_win32(handle.to_raw()),
2947 "VK_KHR_get_memory_requirements2" => loader.load_khr_get_memory_requirements2(handle.to_raw()),
2948 #[cfg(feature = "unimplemented")]
2949 "VK_KHR_get_physical_device_properties2" => loader.load_khr_get_physical_device_properties2(handle.to_raw()),
2950 #[cfg(feature = "unimplemented")]
2951 "VK_KHR_get_surface_capabilities2" => loader.load_khr_get_surface_capabilities2(handle.to_raw()),
2952 #[cfg(feature = "unimplemented")]
2953 "VK_KHR_image_format_list" => loader.load_khr_image_format_list(handle.to_raw()),
2954 #[cfg(feature = "unimplemented")]
2955 "VK_KHR_incremental_present" => loader.load_khr_incremental_present(handle.to_raw()),
2956 "VK_KHR_maintenance1" => loader.load_khr_maintenance1(handle.to_raw()),
2957 #[cfg(feature = "unimplemented")]
2958 "VK_KHR_maintenance2" => loader.load_khr_maintenance2(handle.to_raw()),
2959 #[cfg(feature = "unimplemented")]
2960 "VK_KHR_mir_surface" => loader.load_khr_mir_surface(handle.to_raw()),
2961 "VK_KHR_push_descriptor" => loader.load_khr_push_descriptor(handle.to_raw()),
2962 #[cfg(feature = "unimplemented")]
2963 "VK_KHR_relaxed_block_layout" => loader.load_khr_relaxed_block_layout(handle.to_raw()),
2964 #[cfg(feature = "unimplemented")]
2965 "VK_KHR_sampler_mirror_clamp_to_edge" => loader.load_khr_sampler_mirror_clamp_to_edge(handle.to_raw()),
2966 #[cfg(feature = "unimplemented")]
2967 "VK_KHR_sampler_ycbcr_conversion" => loader.load_khr_sampler_ycbcr_conversion(handle.to_raw()),
2968 #[cfg(feature = "unimplemented")]
2969 "VK_KHR_shader_draw_parameters" => loader.load_khr_shader_draw_parameters(handle.to_raw()),
2970 "VK_KHR_shared_presentable_image" => loader.load_khr_shared_presentable_image(handle.to_raw()),
2971 #[cfg(feature = "unimplemented")]
2972 "VK_KHR_storage_buffer_storage_class" => loader.load_khr_storage_buffer_storage_class(handle.to_raw()),
2973 #[cfg(feature = "unimplemented")]
2974 "VK_KHR_surface" => loader.load_khr_surface(handle.to_raw()),
2975 "VK_KHR_swapchain" => loader.load_khr_swapchain(handle.to_raw()),
2976 #[cfg(feature = "unimplemented")]
2977 "VK_KHR_variable_pointers" => loader.load_khr_variable_pointers(handle.to_raw()),
2978 #[cfg(feature = "unimplemented")]
2979 "VK_KHR_wayland_surface" => loader.load_khr_wayland_surface(handle.to_raw()),
2980 #[cfg(feature = "unimplemented")]
2981 "VK_KHR_win32_keyed_mutex" => loader.load_khr_win32_keyed_mutex(handle.to_raw()),
2982 #[cfg(feature = "unimplemented")]
2983 "VK_KHR_win32_surface" => loader.load_khr_win32_surface(handle.to_raw()),
2984 #[cfg(feature = "unimplemented")]
2985 "VK_KHR_xcb_surface" => loader.load_khr_xcb_surface(handle.to_raw()),
2986 #[cfg(feature = "unimplemented")]
2987 "VK_KHR_xlib_surface" => loader.load_khr_xlib_surface(handle.to_raw()),
2988 #[cfg(feature = "unimplemented")]
2989 "VK_EXT_acquire_xlib_display" => loader.load_ext_acquire_xlib_display(handle.to_raw()),
2990 #[cfg(feature = "unimplemented")]
2991 "VK_EXT_blend_operation_advanced" => loader.load_ext_blend_operation_advanced(handle.to_raw()),
2992 "VK_EXT_debug_marker" => loader.load_ext_debug_marker(handle.to_raw()),
2993 #[cfg(feature = "unimplemented")]
2994 "VK_EXT_debug_report" => loader.load_ext_debug_report(handle.to_raw()),
2995 #[cfg(feature = "unimplemented")]
2996 "VK_EXT_depth_range_unrestricted" => loader.load_ext_depth_range_unrestricted(handle.to_raw()),
2997 #[cfg(feature = "unimplemented")]
2998 "VK_EXT_direct_mode_display" => loader.load_ext_direct_mode_display(handle.to_raw()),
2999 "VK_EXT_discard_rectangles" => loader.load_ext_discard_rectangles(handle.to_raw()),
3000 "VK_EXT_display_control" => loader.load_ext_display_control(handle.to_raw()),
3001 #[cfg(feature = "unimplemented")]
3002 "VK_EXT_display_surface_counter" => loader.load_ext_display_surface_counter(handle.to_raw()),
3003 "VK_EXT_hdr_metadata" => loader.load_ext_hdr_metadata(handle.to_raw()),
3004 #[cfg(feature = "unimplemented")]
3005 "VK_EXT_post_depth_coverage" => loader.load_ext_post_depth_coverage(handle.to_raw()),
3006 #[cfg(feature = "unimplemented")]
3007 "VK_EXT_sample_locations" => loader.load_ext_sample_locations(handle.to_raw()),
3008 #[cfg(feature = "unimplemented")]
3009 "VK_EXT_sampler_filter_minmax" => loader.load_ext_sampler_filter_minmax(handle.to_raw()),
3010 #[cfg(feature = "unimplemented")]
3011 "VK_EXT_shader_stencil_export" => loader.load_ext_shader_stencil_export(handle.to_raw()),
3012 #[cfg(feature = "unimplemented")]
3013 "VK_EXT_shader_subgroup_ballot" => loader.load_ext_shader_subgroup_ballot(handle.to_raw()),
3014 #[cfg(feature = "unimplemented")]
3015 "VK_EXT_shader_subgroup_vote" => loader.load_ext_shader_subgroup_vote(handle.to_raw()),
3016 #[cfg(feature = "unimplemented")]
3017 "VK_EXT_shader_viewport_index_layer" => loader.load_ext_shader_viewport_index_layer(handle.to_raw()),
3018 #[cfg(feature = "unimplemented")]
3019 "VK_EXT_swapchain_colorspace" => loader.load_ext_swapchain_colorspace(handle.to_raw()),
3020 #[cfg(feature = "unimplemented")]
3021 "VK_EXT_validation_cache" => loader.load_ext_validation_cache(handle.to_raw()),
3022 #[cfg(feature = "unimplemented")]
3023 "VK_EXT_validation_flags" => loader.load_ext_validation_flags(handle.to_raw()),
3024 "VK_AMD_draw_indirect_count" => loader.load_amd_draw_indirect_count(handle.to_raw()),
3025 #[cfg(feature = "unimplemented")]
3026 "VK_AMD_gcn_shader" => loader.load_amd_gcn_shader(handle.to_raw()),
3027 #[cfg(feature = "unimplemented")]
3028 "VK_AMD_gpu_shader_half_float" => loader.load_amd_gpu_shader_half_float(handle.to_raw()),
3029 #[cfg(feature = "unimplemented")]
3030 "VK_AMD_gpu_shader_int16" => loader.load_amd_gpu_shader_int16(handle.to_raw()),
3031 #[cfg(feature = "unimplemented")]
3032 "VK_AMD_mixed_attachment_samples" => loader.load_amd_mixed_attachment_samples(handle.to_raw()),
3033 #[cfg(feature = "unimplemented")]
3034 "VK_AMD_negative_viewport_height" => loader.load_amd_negative_viewport_height(handle.to_raw()),
3035 #[cfg(feature = "unimplemented")]
3036 "VK_AMD_rasterization_order" => loader.load_amd_rasterization_order(handle.to_raw()),
3037 #[cfg(feature = "unimplemented")]
3038 "VK_AMD_shader_ballot" => loader.load_amd_shader_ballot(handle.to_raw()),
3039 #[cfg(feature = "unimplemented")]
3040 "VK_AMD_shader_explicit_vertex_parameter" => loader.load_amd_shader_explicit_vertex_parameter(handle.to_raw()),
3041 #[cfg(feature = "unimplemented")]
3042 "VK_AMD_shader_fragment_mask" => loader.load_amd_shader_fragment_mask(handle.to_raw()),
3043 #[cfg(feature = "unimplemented")]
3044 "VK_AMD_shader_image_load_store_lod" => loader.load_amd_shader_image_load_store_lod(handle.to_raw()),
3045 #[cfg(feature = "unimplemented")]
3046 "VK_AMD_shader_trinary_minmax" => loader.load_amd_shader_trinary_minmax(handle.to_raw()),
3047 #[cfg(feature = "unimplemented")]
3048 "VK_AMD_texture_gather_bias_lod" => loader.load_amd_texture_gather_bias_lod(handle.to_raw()),
3049 "VK_GOOGLE_display_timing" => loader.load_google_display_timing(handle.to_raw()),
3050 #[cfg(feature = "unimplemented")]
3051 "VK_IMG_filter_cubic" => loader.load_img_filter_cubic(handle.to_raw()),
3052 #[cfg(feature = "unimplemented")]
3053 "VK_IMG_format_pvrtc" => loader.load_img_format_pvrtc(handle.to_raw()),
3054 #[cfg(feature = "unimplemented")]
3055 "VK_KHX_device_group" => loader.load_khx_device_group(handle.to_raw()),
3056 #[cfg(feature = "unimplemented")]
3057 "VK_KHX_device_group_creation" => loader.load_khx_device_group_creation(handle.to_raw()),
3058 #[cfg(feature = "unimplemented")]
3059 "VK_KHX_multiview" => loader.load_khx_multiview(handle.to_raw()),
3060 #[cfg(feature = "unimplemented")]
3061 "VK_MVK_ios_surface" => loader.load_mvk_ios_surface(handle.to_raw()),
3062 #[cfg(feature = "unimplemented")]
3063 "VK_MVK_macos_surface" => loader.load_mvk_macos_surface(handle.to_raw()),
3064 #[cfg(feature = "unimplemented")]
3065 "VK_NN_vi_surface" => loader.load_nn_vi_surface(handle.to_raw()),
3066 "VK_NV_clip_space_w_scaling" => loader.load_nv_clip_space_w_scaling(handle.to_raw()),
3067 #[cfg(feature = "unimplemented")]
3068 "VK_NV_dedicated_allocation" => loader.load_nv_dedicated_allocation(handle.to_raw()),
3069 #[cfg(feature = "unimplemented")]
3070 "VK_NV_external_memory" => loader.load_nv_external_memory(handle.to_raw()),
3071 #[cfg(feature = "unimplemented")]
3072 "VK_NV_external_memory_capabilities" => loader.load_nv_external_memory_capabilities(handle.to_raw()),
3073 "VK_NV_external_memory_win32" => loader.load_nv_external_memory_win32(handle.to_raw()),
3074 #[cfg(feature = "unimplemented")]
3075 "VK_NV_fill_rectangle" => loader.load_nv_fill_rectangle(handle.to_raw()),
3076 #[cfg(feature = "unimplemented")]
3077 "VK_NV_fragment_coverage_to_color" => loader.load_nv_fragment_coverage_to_color(handle.to_raw()),
3078 #[cfg(feature = "unimplemented")]
3079 "VK_NV_framebuffer_mixed_samples" => loader.load_nv_framebuffer_mixed_samples(handle.to_raw()),
3080 #[cfg(feature = "unimplemented")]
3081 "VK_NV_geometry_shader_passthrough" => loader.load_nv_geometry_shader_passthrough(handle.to_raw()),
3082 #[cfg(feature = "unimplemented")]
3083 "VK_NV_glsl_shader" => loader.load_nv_glsl_shader(handle.to_raw()),
3084 #[cfg(feature = "unimplemented")]
3085 "VK_NV_sample_mask_override_coverage" => loader.load_nv_sample_mask_override_coverage(handle.to_raw()),
3086 #[cfg(feature = "unimplemented")]
3087 "VK_NV_viewport_array2" => loader.load_nv_viewport_array2(handle.to_raw()),
3088 #[cfg(feature = "unimplemented")]
3089 "VK_NV_viewport_swizzle" => loader.load_nv_viewport_swizzle(handle.to_raw()),
3090 #[cfg(feature = "unimplemented")]
3091 "VK_NV_win32_keyed_mutex" => loader.load_nv_win32_keyed_mutex(handle.to_raw()),
3092 #[cfg(feature = "unimplemented")]
3093 "VK_NVX_device_generated_commands" => loader.load_nvx_device_generated_commands(handle.to_raw()),
3094 #[cfg(feature = "unimplemented")]
3095 "VK_NVX_multiview_per_view_attributes" => loader.load_nvx_multiview_per_view_attributes(handle.to_raw()),
3096 &_ => (),
3097 }
3098 }
3099 }
3100 }
3101
3102 let instance = physical_device.instance().clone();
3103
3104 let device = Device {
3105 inner: Arc::new(Inner {
3106 handle,
3107 physical_device,
3108 queues: SmallVec::new(),
3109 instance,
3110 loader,
3111 }),
3112 };
3113
3114 let mut queues: SmallVec<[Queue; 16]> = SmallVec::new();
3115
3116 for qci in self.create_info.queue_create_infos() {
3117 for q_idx in 0..qci.queue_priorities().len() as u32 {
3118 match get_device_queue(&device.inner.loader, device.inner.handle,
3119 qci.queue_family_index(), q_idx) {
3120 Some(q_handle) => unsafe {
3121 queues.push(Queue::from_parts(q_handle, device.clone(),
3122 qci.queue_family_index(), q_idx))
3123 },
3124 None => {
3125 panic!("unable to get device queue (family_index: {}, index: {})",
3126 qci.queue_family_index(), q_idx);
3127 },
3128 }
3129 }
3130 }
3131
3132 unsafe {
3133 let inner_ptr = &(*device.inner) as *const Inner as *mut Inner;
3134 (*inner_ptr).queues = queues;
3135 }
3136
3137 Ok(device)
3138 }
3139}