1#![allow(
9 non_camel_case_types,
10 non_snake_case,
11 clippy::bad_bit_mask,
12 clippy::let_unit_value,
13 clippy::missing_safety_doc,
14 clippy::missing_transmute_annotations,
15 clippy::needless_lifetimes,
16 clippy::too_many_arguments,
17 clippy::type_complexity,
18 clippy::unnecessary_cast,
19 clippy::upper_case_acronyms,
20 clippy::useless_transmute
21)]
22
23use alloc::vec::Vec;
24use core::borrow::Borrow;
25use core::ffi::c_void;
26use core::mem::MaybeUninit;
27use core::ptr;
28
29use super::*;
30
31pub trait EntryV1_0 {
33 fn commands(&self) -> &EntryCommands;
34
35 #[inline]
37 unsafe fn create_instance(
38 &self,
39 create_info: &InstanceCreateInfo,
40 allocator: Option<&AllocationCallbacks>,
41 ) -> crate::VkResult<Instance> {
42 let mut instance = MaybeUninit::<Instance>::uninit();
43
44 let __result = (self.commands().create_instance)(
45 create_info,
46 allocator.map_or(ptr::null(), |v| v),
47 instance.as_mut_ptr(),
48 );
49
50 if __result == Result::SUCCESS {
51 Ok(instance.assume_init())
52 } else {
53 Err(__result.into())
54 }
55 }
56
57 #[inline]
59 unsafe fn enumerate_instance_extension_properties(
60 &self,
61 layer_name: Option<&[u8]>,
62 ) -> crate::VkResult<Vec<ExtensionProperties>> {
63 let mut property_count = 0;
64
65 (self.commands().enumerate_instance_extension_properties)(
66 layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
67 &mut property_count,
68 ptr::null_mut(),
69 );
70
71 let mut properties = Vec::with_capacity(property_count as usize);
72
73 let __result = (self.commands().enumerate_instance_extension_properties)(
74 layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
75 &mut property_count,
76 properties.as_mut_ptr(),
77 );
78
79 debug_assert!(properties.capacity() >= property_count as usize);
80 properties.set_len(property_count as usize);
81
82 if __result == Result::SUCCESS {
83 Ok(properties)
84 } else {
85 Err(__result.into())
86 }
87 }
88
89 #[inline]
91 unsafe fn enumerate_instance_layer_properties(&self) -> crate::VkResult<Vec<LayerProperties>> {
92 let mut property_count = 0;
93
94 (self.commands().enumerate_instance_layer_properties)(&mut property_count, ptr::null_mut());
95
96 let mut properties = Vec::with_capacity(property_count as usize);
97
98 let __result = (self.commands().enumerate_instance_layer_properties)(
99 &mut property_count,
100 properties.as_mut_ptr(),
101 );
102
103 debug_assert!(properties.capacity() >= property_count as usize);
104 properties.set_len(property_count as usize);
105
106 if __result == Result::SUCCESS {
107 Ok(properties)
108 } else {
109 Err(__result.into())
110 }
111 }
112}
113
114impl EntryV1_0 for crate::Entry {
115 #[inline]
116 fn commands(&self) -> &EntryCommands {
117 &self.commands
118 }
119}
120
121impl<C: Borrow<EntryCommands>> EntryV1_0 for C {
122 #[inline]
123 fn commands(&self) -> &EntryCommands {
124 self.borrow()
125 }
126}
127
128pub trait InstanceV1_0 {
130 fn commands(&self) -> &InstanceCommands;
131
132 fn handle(&self) -> Instance;
133
134 #[inline]
136 unsafe fn create_device(
137 &self,
138 physical_device: PhysicalDevice,
139 create_info: &DeviceCreateInfo,
140 allocator: Option<&AllocationCallbacks>,
141 ) -> crate::VkResult<Device> {
142 let mut device = MaybeUninit::<Device>::uninit();
143
144 let __result = (self.commands().create_device)(
145 physical_device,
146 create_info,
147 allocator.map_or(ptr::null(), |v| v),
148 device.as_mut_ptr(),
149 );
150
151 if __result == Result::SUCCESS {
152 Ok(device.assume_init())
153 } else {
154 Err(__result.into())
155 }
156 }
157
158 #[inline]
160 unsafe fn destroy_instance(&self, allocator: Option<&AllocationCallbacks>) {
161 let __result =
162 (self.commands().destroy_instance)(self.handle(), allocator.map_or(ptr::null(), |v| v));
163 }
164
165 #[inline]
167 unsafe fn enumerate_device_extension_properties(
168 &self,
169 physical_device: PhysicalDevice,
170 layer_name: Option<&[u8]>,
171 ) -> crate::VkResult<Vec<ExtensionProperties>> {
172 let mut property_count = 0;
173
174 (self.commands().enumerate_device_extension_properties)(
175 physical_device,
176 layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
177 &mut property_count,
178 ptr::null_mut(),
179 );
180
181 let mut properties = Vec::with_capacity(property_count as usize);
182
183 let __result = (self.commands().enumerate_device_extension_properties)(
184 physical_device,
185 layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
186 &mut property_count,
187 properties.as_mut_ptr(),
188 );
189
190 debug_assert!(properties.capacity() >= property_count as usize);
191 properties.set_len(property_count as usize);
192
193 if __result == Result::SUCCESS {
194 Ok(properties)
195 } else {
196 Err(__result.into())
197 }
198 }
199
200 #[inline]
202 unsafe fn enumerate_device_layer_properties(
203 &self,
204 physical_device: PhysicalDevice,
205 ) -> crate::VkResult<Vec<LayerProperties>> {
206 let mut property_count = 0;
207
208 (self.commands().enumerate_device_layer_properties)(
209 physical_device,
210 &mut property_count,
211 ptr::null_mut(),
212 );
213
214 let mut properties = Vec::with_capacity(property_count as usize);
215
216 let __result = (self.commands().enumerate_device_layer_properties)(
217 physical_device,
218 &mut property_count,
219 properties.as_mut_ptr(),
220 );
221
222 debug_assert!(properties.capacity() >= property_count as usize);
223 properties.set_len(property_count as usize);
224
225 if __result == Result::SUCCESS {
226 Ok(properties)
227 } else {
228 Err(__result.into())
229 }
230 }
231
232 #[inline]
234 unsafe fn enumerate_physical_devices(&self) -> crate::VkResult<Vec<PhysicalDevice>> {
235 let mut physical_device_count = 0;
236
237 (self.commands().enumerate_physical_devices)(
238 self.handle(),
239 &mut physical_device_count,
240 ptr::null_mut(),
241 );
242
243 let mut physical_devices = Vec::with_capacity(physical_device_count as usize);
244
245 let __result = (self.commands().enumerate_physical_devices)(
246 self.handle(),
247 &mut physical_device_count,
248 physical_devices.as_mut_ptr(),
249 );
250
251 debug_assert!(physical_devices.capacity() >= physical_device_count as usize);
252 physical_devices.set_len(physical_device_count as usize);
253
254 if __result == Result::SUCCESS {
255 Ok(physical_devices)
256 } else {
257 Err(__result.into())
258 }
259 }
260
261 #[inline]
263 unsafe fn get_physical_device_features(
264 &self,
265 physical_device: PhysicalDevice,
266 ) -> PhysicalDeviceFeatures {
267 let mut features = MaybeUninit::<PhysicalDeviceFeatures>::uninit();
268
269 let __result =
270 (self.commands().get_physical_device_features)(physical_device, features.as_mut_ptr());
271
272 features.assume_init()
273 }
274
275 #[inline]
277 unsafe fn get_physical_device_format_properties(
278 &self,
279 physical_device: PhysicalDevice,
280 format: Format,
281 ) -> FormatProperties {
282 let mut format_properties = MaybeUninit::<FormatProperties>::uninit();
283
284 let __result = (self.commands().get_physical_device_format_properties)(
285 physical_device,
286 format,
287 format_properties.as_mut_ptr(),
288 );
289
290 format_properties.assume_init()
291 }
292
293 #[inline]
295 unsafe fn get_physical_device_image_format_properties(
296 &self,
297 physical_device: PhysicalDevice,
298 format: Format,
299 type_: ImageType,
300 tiling: ImageTiling,
301 usage: ImageUsageFlags,
302 flags: ImageCreateFlags,
303 ) -> crate::VkResult<ImageFormatProperties> {
304 let mut image_format_properties = MaybeUninit::<ImageFormatProperties>::uninit();
305
306 let __result = (self.commands().get_physical_device_image_format_properties)(
307 physical_device,
308 format,
309 type_,
310 tiling,
311 usage,
312 flags,
313 image_format_properties.as_mut_ptr(),
314 );
315
316 if __result == Result::SUCCESS {
317 Ok(image_format_properties.assume_init())
318 } else {
319 Err(__result.into())
320 }
321 }
322
323 #[inline]
325 unsafe fn get_physical_device_memory_properties(
326 &self,
327 physical_device: PhysicalDevice,
328 ) -> PhysicalDeviceMemoryProperties {
329 let mut memory_properties = MaybeUninit::<PhysicalDeviceMemoryProperties>::uninit();
330
331 let __result = (self.commands().get_physical_device_memory_properties)(
332 physical_device,
333 memory_properties.as_mut_ptr(),
334 );
335
336 memory_properties.assume_init()
337 }
338
339 #[inline]
341 unsafe fn get_physical_device_properties(
342 &self,
343 physical_device: PhysicalDevice,
344 ) -> PhysicalDeviceProperties {
345 let mut properties = MaybeUninit::<PhysicalDeviceProperties>::uninit();
346
347 let __result = (self.commands().get_physical_device_properties)(
348 physical_device,
349 properties.as_mut_ptr(),
350 );
351
352 properties.assume_init()
353 }
354
355 #[inline]
357 unsafe fn get_physical_device_queue_family_properties(
358 &self,
359 physical_device: PhysicalDevice,
360 ) -> Vec<QueueFamilyProperties> {
361 let mut queue_family_property_count = 0;
362
363 (self.commands().get_physical_device_queue_family_properties)(
364 physical_device,
365 &mut queue_family_property_count,
366 ptr::null_mut(),
367 );
368
369 let mut queue_family_properties = Vec::with_capacity(queue_family_property_count as usize);
370
371 let __result = (self.commands().get_physical_device_queue_family_properties)(
372 physical_device,
373 &mut queue_family_property_count,
374 queue_family_properties.as_mut_ptr(),
375 );
376
377 debug_assert!(queue_family_properties.capacity() >= queue_family_property_count as usize);
378 queue_family_properties.set_len(queue_family_property_count as usize);
379
380 queue_family_properties
381 }
382
383 #[inline]
385 unsafe fn get_physical_device_sparse_image_format_properties(
386 &self,
387 physical_device: PhysicalDevice,
388 format: Format,
389 type_: ImageType,
390 samples: SampleCountFlags,
391 usage: ImageUsageFlags,
392 tiling: ImageTiling,
393 ) -> Vec<SparseImageFormatProperties> {
394 let mut property_count = 0;
395
396 (self
397 .commands()
398 .get_physical_device_sparse_image_format_properties)(
399 physical_device,
400 format,
401 type_,
402 samples,
403 usage,
404 tiling,
405 &mut property_count,
406 ptr::null_mut(),
407 );
408
409 let mut properties = Vec::with_capacity(property_count as usize);
410
411 let __result = (self
412 .commands()
413 .get_physical_device_sparse_image_format_properties)(
414 physical_device,
415 format,
416 type_,
417 samples,
418 usage,
419 tiling,
420 &mut property_count,
421 properties.as_mut_ptr(),
422 );
423
424 debug_assert!(properties.capacity() >= property_count as usize);
425 properties.set_len(property_count as usize);
426
427 properties
428 }
429}
430
431impl InstanceV1_0 for crate::Instance {
432 #[inline]
433 fn commands(&self) -> &InstanceCommands {
434 &self.commands
435 }
436
437 #[inline]
438 fn handle(&self) -> Instance {
439 self.handle
440 }
441}
442
443impl<C: Borrow<InstanceCommands>> InstanceV1_0 for (C, Instance) {
444 #[inline]
445 fn commands(&self) -> &InstanceCommands {
446 self.0.borrow()
447 }
448
449 #[inline]
450 fn handle(&self) -> Instance {
451 self.1
452 }
453}
454
455pub trait DeviceV1_0 {
457 fn commands(&self) -> &DeviceCommands;
458
459 fn handle(&self) -> Device;
460
461 #[inline]
463 unsafe fn allocate_command_buffers(
464 &self,
465 allocate_info: &CommandBufferAllocateInfo,
466 ) -> crate::VkResult<Vec<CommandBuffer>> {
467 let mut command_buffers =
468 Vec::with_capacity(allocate_info.as_ref().command_buffer_count as usize);
469
470 let __result = (self.commands().allocate_command_buffers)(
471 self.handle(),
472 allocate_info,
473 command_buffers.as_mut_ptr(),
474 );
475
476 command_buffers.set_len(allocate_info.as_ref().command_buffer_count as usize);
477
478 if __result == Result::SUCCESS {
479 Ok(command_buffers)
480 } else {
481 Err(__result.into())
482 }
483 }
484
485 #[inline]
487 unsafe fn allocate_descriptor_sets(
488 &self,
489 allocate_info: &DescriptorSetAllocateInfo,
490 ) -> crate::VkResult<Vec<DescriptorSet>> {
491 let mut descriptor_sets =
492 Vec::with_capacity(allocate_info.as_ref().descriptor_set_count as usize);
493
494 let __result = (self.commands().allocate_descriptor_sets)(
495 self.handle(),
496 allocate_info,
497 descriptor_sets.as_mut_ptr(),
498 );
499
500 descriptor_sets.set_len(allocate_info.as_ref().descriptor_set_count as usize);
501
502 if __result == Result::SUCCESS {
503 Ok(descriptor_sets)
504 } else {
505 Err(__result.into())
506 }
507 }
508
509 #[inline]
511 unsafe fn allocate_memory(
512 &self,
513 allocate_info: &MemoryAllocateInfo,
514 allocator: Option<&AllocationCallbacks>,
515 ) -> crate::VkResult<DeviceMemory> {
516 let mut memory = MaybeUninit::<DeviceMemory>::uninit();
517
518 let __result = (self.commands().allocate_memory)(
519 self.handle(),
520 allocate_info,
521 allocator.map_or(ptr::null(), |v| v),
522 memory.as_mut_ptr(),
523 );
524
525 if __result == Result::SUCCESS {
526 Ok(memory.assume_init())
527 } else {
528 Err(__result.into())
529 }
530 }
531
532 #[inline]
534 unsafe fn begin_command_buffer(
535 &self,
536 command_buffer: CommandBuffer,
537 begin_info: &CommandBufferBeginInfo,
538 ) -> crate::VkResult<()> {
539 let __result = (self.commands().begin_command_buffer)(command_buffer, begin_info);
540
541 if __result == Result::SUCCESS {
542 Ok(())
543 } else {
544 Err(__result.into())
545 }
546 }
547
548 #[inline]
550 unsafe fn bind_buffer_memory(
551 &self,
552 buffer: Buffer,
553 memory: DeviceMemory,
554 memory_offset: DeviceSize,
555 ) -> crate::VkResult<()> {
556 let __result =
557 (self.commands().bind_buffer_memory)(self.handle(), buffer, memory, memory_offset);
558
559 if __result == Result::SUCCESS {
560 Ok(())
561 } else {
562 Err(__result.into())
563 }
564 }
565
566 #[inline]
568 unsafe fn bind_image_memory(
569 &self,
570 image: Image,
571 memory: DeviceMemory,
572 memory_offset: DeviceSize,
573 ) -> crate::VkResult<()> {
574 let __result =
575 (self.commands().bind_image_memory)(self.handle(), image, memory, memory_offset);
576
577 if __result == Result::SUCCESS {
578 Ok(())
579 } else {
580 Err(__result.into())
581 }
582 }
583
584 #[inline]
586 unsafe fn cmd_begin_query(
587 &self,
588 command_buffer: CommandBuffer,
589 query_pool: QueryPool,
590 query: u32,
591 flags: QueryControlFlags,
592 ) {
593 let __result = (self.commands().cmd_begin_query)(command_buffer, query_pool, query, flags);
594 }
595
596 #[inline]
598 unsafe fn cmd_begin_render_pass(
599 &self,
600 command_buffer: CommandBuffer,
601 render_pass_begin: &RenderPassBeginInfo,
602 contents: SubpassContents,
603 ) {
604 let __result =
605 (self.commands().cmd_begin_render_pass)(command_buffer, render_pass_begin, contents);
606 }
607
608 #[inline]
610 unsafe fn cmd_bind_descriptor_sets(
611 &self,
612 command_buffer: CommandBuffer,
613 pipeline_bind_point: PipelineBindPoint,
614 layout: PipelineLayout,
615 first_set: u32,
616 descriptor_sets: &[DescriptorSet],
617 dynamic_offsets: &[u32],
618 ) {
619 let __result = (self.commands().cmd_bind_descriptor_sets)(
620 command_buffer,
621 pipeline_bind_point,
622 layout,
623 first_set,
624 descriptor_sets.len() as u32,
625 descriptor_sets.as_ptr(),
626 dynamic_offsets.len() as u32,
627 dynamic_offsets.as_ptr(),
628 );
629 }
630
631 #[inline]
633 unsafe fn cmd_bind_index_buffer(
634 &self,
635 command_buffer: CommandBuffer,
636 buffer: Buffer,
637 offset: DeviceSize,
638 index_type: IndexType,
639 ) {
640 let __result =
641 (self.commands().cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
642 }
643
644 #[inline]
646 unsafe fn cmd_bind_pipeline(
647 &self,
648 command_buffer: CommandBuffer,
649 pipeline_bind_point: PipelineBindPoint,
650 pipeline: Pipeline,
651 ) {
652 let __result =
653 (self.commands().cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
654 }
655
656 #[inline]
658 unsafe fn cmd_bind_vertex_buffers(
659 &self,
660 command_buffer: CommandBuffer,
661 first_binding: u32,
662 buffers: &[Buffer],
663 offsets: &[DeviceSize],
664 ) {
665 let __result = (self.commands().cmd_bind_vertex_buffers)(
666 command_buffer,
667 first_binding,
668 buffers.len() as u32,
669 buffers.as_ptr(),
670 offsets.as_ptr(),
671 );
672 }
673
674 #[inline]
676 unsafe fn cmd_blit_image(
677 &self,
678 command_buffer: CommandBuffer,
679 src_image: Image,
680 src_image_layout: ImageLayout,
681 dst_image: Image,
682 dst_image_layout: ImageLayout,
683 regions: &[impl Cast<Target = ImageBlit>],
684 filter: Filter,
685 ) {
686 let __result = (self.commands().cmd_blit_image)(
687 command_buffer,
688 src_image,
689 src_image_layout,
690 dst_image,
691 dst_image_layout,
692 regions.len() as u32,
693 regions.as_ptr().cast(),
694 filter,
695 );
696 }
697
698 #[inline]
700 unsafe fn cmd_clear_attachments(
701 &self,
702 command_buffer: CommandBuffer,
703 attachments: &[impl Cast<Target = ClearAttachment>],
704 rects: &[impl Cast<Target = ClearRect>],
705 ) {
706 let __result = (self.commands().cmd_clear_attachments)(
707 command_buffer,
708 attachments.len() as u32,
709 attachments.as_ptr().cast(),
710 rects.len() as u32,
711 rects.as_ptr().cast(),
712 );
713 }
714
715 #[inline]
717 unsafe fn cmd_clear_color_image(
718 &self,
719 command_buffer: CommandBuffer,
720 image: Image,
721 image_layout: ImageLayout,
722 color: &ClearColorValue,
723 ranges: &[impl Cast<Target = ImageSubresourceRange>],
724 ) {
725 let __result = (self.commands().cmd_clear_color_image)(
726 command_buffer,
727 image,
728 image_layout,
729 color,
730 ranges.len() as u32,
731 ranges.as_ptr().cast(),
732 );
733 }
734
735 #[inline]
737 unsafe fn cmd_clear_depth_stencil_image(
738 &self,
739 command_buffer: CommandBuffer,
740 image: Image,
741 image_layout: ImageLayout,
742 depth_stencil: &ClearDepthStencilValue,
743 ranges: &[impl Cast<Target = ImageSubresourceRange>],
744 ) {
745 let __result = (self.commands().cmd_clear_depth_stencil_image)(
746 command_buffer,
747 image,
748 image_layout,
749 depth_stencil,
750 ranges.len() as u32,
751 ranges.as_ptr().cast(),
752 );
753 }
754
755 #[inline]
757 unsafe fn cmd_copy_buffer(
758 &self,
759 command_buffer: CommandBuffer,
760 src_buffer: Buffer,
761 dst_buffer: Buffer,
762 regions: &[impl Cast<Target = BufferCopy>],
763 ) {
764 let __result = (self.commands().cmd_copy_buffer)(
765 command_buffer,
766 src_buffer,
767 dst_buffer,
768 regions.len() as u32,
769 regions.as_ptr().cast(),
770 );
771 }
772
773 #[inline]
775 unsafe fn cmd_copy_buffer_to_image(
776 &self,
777 command_buffer: CommandBuffer,
778 src_buffer: Buffer,
779 dst_image: Image,
780 dst_image_layout: ImageLayout,
781 regions: &[impl Cast<Target = BufferImageCopy>],
782 ) {
783 let __result = (self.commands().cmd_copy_buffer_to_image)(
784 command_buffer,
785 src_buffer,
786 dst_image,
787 dst_image_layout,
788 regions.len() as u32,
789 regions.as_ptr().cast(),
790 );
791 }
792
793 #[inline]
795 unsafe fn cmd_copy_image(
796 &self,
797 command_buffer: CommandBuffer,
798 src_image: Image,
799 src_image_layout: ImageLayout,
800 dst_image: Image,
801 dst_image_layout: ImageLayout,
802 regions: &[impl Cast<Target = ImageCopy>],
803 ) {
804 let __result = (self.commands().cmd_copy_image)(
805 command_buffer,
806 src_image,
807 src_image_layout,
808 dst_image,
809 dst_image_layout,
810 regions.len() as u32,
811 regions.as_ptr().cast(),
812 );
813 }
814
815 #[inline]
817 unsafe fn cmd_copy_image_to_buffer(
818 &self,
819 command_buffer: CommandBuffer,
820 src_image: Image,
821 src_image_layout: ImageLayout,
822 dst_buffer: Buffer,
823 regions: &[impl Cast<Target = BufferImageCopy>],
824 ) {
825 let __result = (self.commands().cmd_copy_image_to_buffer)(
826 command_buffer,
827 src_image,
828 src_image_layout,
829 dst_buffer,
830 regions.len() as u32,
831 regions.as_ptr().cast(),
832 );
833 }
834
835 #[inline]
837 unsafe fn cmd_copy_query_pool_results(
838 &self,
839 command_buffer: CommandBuffer,
840 query_pool: QueryPool,
841 first_query: u32,
842 query_count: u32,
843 dst_buffer: Buffer,
844 dst_offset: DeviceSize,
845 stride: DeviceSize,
846 flags: QueryResultFlags,
847 ) {
848 let __result = (self.commands().cmd_copy_query_pool_results)(
849 command_buffer,
850 query_pool,
851 first_query,
852 query_count,
853 dst_buffer,
854 dst_offset,
855 stride,
856 flags,
857 );
858 }
859
860 #[inline]
862 unsafe fn cmd_dispatch(
863 &self,
864 command_buffer: CommandBuffer,
865 group_count_x: u32,
866 group_count_y: u32,
867 group_count_z: u32,
868 ) {
869 let __result = (self.commands().cmd_dispatch)(
870 command_buffer,
871 group_count_x,
872 group_count_y,
873 group_count_z,
874 );
875 }
876
877 #[inline]
879 unsafe fn cmd_dispatch_indirect(
880 &self,
881 command_buffer: CommandBuffer,
882 buffer: Buffer,
883 offset: DeviceSize,
884 ) {
885 let __result = (self.commands().cmd_dispatch_indirect)(command_buffer, buffer, offset);
886 }
887
888 #[inline]
890 unsafe fn cmd_draw(
891 &self,
892 command_buffer: CommandBuffer,
893 vertex_count: u32,
894 instance_count: u32,
895 first_vertex: u32,
896 first_instance: u32,
897 ) {
898 let __result = (self.commands().cmd_draw)(
899 command_buffer,
900 vertex_count,
901 instance_count,
902 first_vertex,
903 first_instance,
904 );
905 }
906
907 #[inline]
909 unsafe fn cmd_draw_indexed(
910 &self,
911 command_buffer: CommandBuffer,
912 index_count: u32,
913 instance_count: u32,
914 first_index: u32,
915 vertex_offset: i32,
916 first_instance: u32,
917 ) {
918 let __result = (self.commands().cmd_draw_indexed)(
919 command_buffer,
920 index_count,
921 instance_count,
922 first_index,
923 vertex_offset,
924 first_instance,
925 );
926 }
927
928 #[inline]
930 unsafe fn cmd_draw_indexed_indirect(
931 &self,
932 command_buffer: CommandBuffer,
933 buffer: Buffer,
934 offset: DeviceSize,
935 draw_count: u32,
936 stride: u32,
937 ) {
938 let __result = (self.commands().cmd_draw_indexed_indirect)(
939 command_buffer,
940 buffer,
941 offset,
942 draw_count,
943 stride,
944 );
945 }
946
947 #[inline]
949 unsafe fn cmd_draw_indirect(
950 &self,
951 command_buffer: CommandBuffer,
952 buffer: Buffer,
953 offset: DeviceSize,
954 draw_count: u32,
955 stride: u32,
956 ) {
957 let __result =
958 (self.commands().cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride);
959 }
960
961 #[inline]
963 unsafe fn cmd_end_query(
964 &self,
965 command_buffer: CommandBuffer,
966 query_pool: QueryPool,
967 query: u32,
968 ) {
969 let __result = (self.commands().cmd_end_query)(command_buffer, query_pool, query);
970 }
971
972 #[inline]
974 unsafe fn cmd_end_render_pass(&self, command_buffer: CommandBuffer) {
975 let __result = (self.commands().cmd_end_render_pass)(command_buffer);
976 }
977
978 #[inline]
980 unsafe fn cmd_execute_commands(
981 &self,
982 command_buffer: CommandBuffer,
983 command_buffers: &[CommandBuffer],
984 ) {
985 let __result = (self.commands().cmd_execute_commands)(
986 command_buffer,
987 command_buffers.len() as u32,
988 command_buffers.as_ptr(),
989 );
990 }
991
992 #[inline]
994 unsafe fn cmd_fill_buffer(
995 &self,
996 command_buffer: CommandBuffer,
997 dst_buffer: Buffer,
998 dst_offset: DeviceSize,
999 size: DeviceSize,
1000 data: u32,
1001 ) {
1002 let __result =
1003 (self.commands().cmd_fill_buffer)(command_buffer, dst_buffer, dst_offset, size, data);
1004 }
1005
1006 #[inline]
1008 unsafe fn cmd_next_subpass(&self, command_buffer: CommandBuffer, contents: SubpassContents) {
1009 let __result = (self.commands().cmd_next_subpass)(command_buffer, contents);
1010 }
1011
1012 #[inline]
1014 unsafe fn cmd_pipeline_barrier(
1015 &self,
1016 command_buffer: CommandBuffer,
1017 src_stage_mask: PipelineStageFlags,
1018 dst_stage_mask: PipelineStageFlags,
1019 dependency_flags: DependencyFlags,
1020 memory_barriers: &[impl Cast<Target = MemoryBarrier>],
1021 buffer_memory_barriers: &[impl Cast<Target = BufferMemoryBarrier>],
1022 image_memory_barriers: &[impl Cast<Target = ImageMemoryBarrier>],
1023 ) {
1024 let __result = (self.commands().cmd_pipeline_barrier)(
1025 command_buffer,
1026 src_stage_mask,
1027 dst_stage_mask,
1028 dependency_flags,
1029 memory_barriers.len() as u32,
1030 memory_barriers.as_ptr().cast(),
1031 buffer_memory_barriers.len() as u32,
1032 buffer_memory_barriers.as_ptr().cast(),
1033 image_memory_barriers.len() as u32,
1034 image_memory_barriers.as_ptr().cast(),
1035 );
1036 }
1037
1038 #[inline]
1040 unsafe fn cmd_push_constants(
1041 &self,
1042 command_buffer: CommandBuffer,
1043 layout: PipelineLayout,
1044 stage_flags: ShaderStageFlags,
1045 offset: u32,
1046 values: &[u8],
1047 ) {
1048 let __result = (self.commands().cmd_push_constants)(
1049 command_buffer,
1050 layout,
1051 stage_flags,
1052 offset,
1053 values.len() as u32,
1054 values.as_ptr() as *const c_void,
1055 );
1056 }
1057
1058 #[inline]
1060 unsafe fn cmd_reset_event(
1061 &self,
1062 command_buffer: CommandBuffer,
1063 event: Event,
1064 stage_mask: PipelineStageFlags,
1065 ) {
1066 let __result = (self.commands().cmd_reset_event)(command_buffer, event, stage_mask);
1067 }
1068
1069 #[inline]
1071 unsafe fn cmd_reset_query_pool(
1072 &self,
1073 command_buffer: CommandBuffer,
1074 query_pool: QueryPool,
1075 first_query: u32,
1076 query_count: u32,
1077 ) {
1078 let __result = (self.commands().cmd_reset_query_pool)(
1079 command_buffer,
1080 query_pool,
1081 first_query,
1082 query_count,
1083 );
1084 }
1085
1086 #[inline]
1088 unsafe fn cmd_resolve_image(
1089 &self,
1090 command_buffer: CommandBuffer,
1091 src_image: Image,
1092 src_image_layout: ImageLayout,
1093 dst_image: Image,
1094 dst_image_layout: ImageLayout,
1095 regions: &[impl Cast<Target = ImageResolve>],
1096 ) {
1097 let __result = (self.commands().cmd_resolve_image)(
1098 command_buffer,
1099 src_image,
1100 src_image_layout,
1101 dst_image,
1102 dst_image_layout,
1103 regions.len() as u32,
1104 regions.as_ptr().cast(),
1105 );
1106 }
1107
1108 #[inline]
1110 unsafe fn cmd_set_blend_constants(
1111 &self,
1112 command_buffer: CommandBuffer,
1113 blend_constants: [f32; 4],
1114 ) {
1115 let __result =
1116 (self.commands().cmd_set_blend_constants)(command_buffer, blend_constants.as_ptr());
1117 }
1118
1119 #[inline]
1121 unsafe fn cmd_set_depth_bias(
1122 &self,
1123 command_buffer: CommandBuffer,
1124 depth_bias_constant_factor: f32,
1125 depth_bias_clamp: f32,
1126 depth_bias_slope_factor: f32,
1127 ) {
1128 let __result = (self.commands().cmd_set_depth_bias)(
1129 command_buffer,
1130 depth_bias_constant_factor,
1131 depth_bias_clamp,
1132 depth_bias_slope_factor,
1133 );
1134 }
1135
1136 #[inline]
1138 unsafe fn cmd_set_depth_bounds(
1139 &self,
1140 command_buffer: CommandBuffer,
1141 min_depth_bounds: f32,
1142 max_depth_bounds: f32,
1143 ) {
1144 let __result = (self.commands().cmd_set_depth_bounds)(
1145 command_buffer,
1146 min_depth_bounds,
1147 max_depth_bounds,
1148 );
1149 }
1150
1151 #[inline]
1153 unsafe fn cmd_set_event(
1154 &self,
1155 command_buffer: CommandBuffer,
1156 event: Event,
1157 stage_mask: PipelineStageFlags,
1158 ) {
1159 let __result = (self.commands().cmd_set_event)(command_buffer, event, stage_mask);
1160 }
1161
1162 #[inline]
1164 unsafe fn cmd_set_line_width(&self, command_buffer: CommandBuffer, line_width: f32) {
1165 let __result = (self.commands().cmd_set_line_width)(command_buffer, line_width);
1166 }
1167
1168 #[inline]
1170 unsafe fn cmd_set_scissor(
1171 &self,
1172 command_buffer: CommandBuffer,
1173 first_scissor: u32,
1174 scissors: &[impl Cast<Target = Rect2D>],
1175 ) {
1176 let __result = (self.commands().cmd_set_scissor)(
1177 command_buffer,
1178 first_scissor,
1179 scissors.len() as u32,
1180 scissors.as_ptr().cast(),
1181 );
1182 }
1183
1184 #[inline]
1186 unsafe fn cmd_set_stencil_compare_mask(
1187 &self,
1188 command_buffer: CommandBuffer,
1189 face_mask: StencilFaceFlags,
1190 compare_mask: u32,
1191 ) {
1192 let __result =
1193 (self.commands().cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask);
1194 }
1195
1196 #[inline]
1198 unsafe fn cmd_set_stencil_reference(
1199 &self,
1200 command_buffer: CommandBuffer,
1201 face_mask: StencilFaceFlags,
1202 reference: u32,
1203 ) {
1204 let __result =
1205 (self.commands().cmd_set_stencil_reference)(command_buffer, face_mask, reference);
1206 }
1207
1208 #[inline]
1210 unsafe fn cmd_set_stencil_write_mask(
1211 &self,
1212 command_buffer: CommandBuffer,
1213 face_mask: StencilFaceFlags,
1214 write_mask: u32,
1215 ) {
1216 let __result =
1217 (self.commands().cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
1218 }
1219
1220 #[inline]
1222 unsafe fn cmd_set_viewport(
1223 &self,
1224 command_buffer: CommandBuffer,
1225 first_viewport: u32,
1226 viewports: &[impl Cast<Target = Viewport>],
1227 ) {
1228 let __result = (self.commands().cmd_set_viewport)(
1229 command_buffer,
1230 first_viewport,
1231 viewports.len() as u32,
1232 viewports.as_ptr().cast(),
1233 );
1234 }
1235
1236 #[inline]
1238 unsafe fn cmd_update_buffer(
1239 &self,
1240 command_buffer: CommandBuffer,
1241 dst_buffer: Buffer,
1242 dst_offset: DeviceSize,
1243 data: &[u8],
1244 ) {
1245 let __result = (self.commands().cmd_update_buffer)(
1246 command_buffer,
1247 dst_buffer,
1248 dst_offset,
1249 data.len() as DeviceSize,
1250 data.as_ptr() as *const c_void,
1251 );
1252 }
1253
1254 #[inline]
1256 unsafe fn cmd_wait_events(
1257 &self,
1258 command_buffer: CommandBuffer,
1259 events: &[Event],
1260 src_stage_mask: PipelineStageFlags,
1261 dst_stage_mask: PipelineStageFlags,
1262 memory_barriers: &[impl Cast<Target = MemoryBarrier>],
1263 buffer_memory_barriers: &[impl Cast<Target = BufferMemoryBarrier>],
1264 image_memory_barriers: &[impl Cast<Target = ImageMemoryBarrier>],
1265 ) {
1266 let __result = (self.commands().cmd_wait_events)(
1267 command_buffer,
1268 events.len() as u32,
1269 events.as_ptr(),
1270 src_stage_mask,
1271 dst_stage_mask,
1272 memory_barriers.len() as u32,
1273 memory_barriers.as_ptr().cast(),
1274 buffer_memory_barriers.len() as u32,
1275 buffer_memory_barriers.as_ptr().cast(),
1276 image_memory_barriers.len() as u32,
1277 image_memory_barriers.as_ptr().cast(),
1278 );
1279 }
1280
1281 #[inline]
1283 unsafe fn cmd_write_timestamp(
1284 &self,
1285 command_buffer: CommandBuffer,
1286 pipeline_stage: PipelineStageFlags,
1287 query_pool: QueryPool,
1288 query: u32,
1289 ) {
1290 let __result = (self.commands().cmd_write_timestamp)(
1291 command_buffer,
1292 pipeline_stage,
1293 query_pool,
1294 query,
1295 );
1296 }
1297
1298 #[inline]
1300 unsafe fn create_buffer(
1301 &self,
1302 create_info: &BufferCreateInfo,
1303 allocator: Option<&AllocationCallbacks>,
1304 ) -> crate::VkResult<Buffer> {
1305 let mut buffer = MaybeUninit::<Buffer>::uninit();
1306
1307 let __result = (self.commands().create_buffer)(
1308 self.handle(),
1309 create_info,
1310 allocator.map_or(ptr::null(), |v| v),
1311 buffer.as_mut_ptr(),
1312 );
1313
1314 if __result == Result::SUCCESS {
1315 Ok(buffer.assume_init())
1316 } else {
1317 Err(__result.into())
1318 }
1319 }
1320
1321 #[inline]
1323 unsafe fn create_buffer_view(
1324 &self,
1325 create_info: &BufferViewCreateInfo,
1326 allocator: Option<&AllocationCallbacks>,
1327 ) -> crate::VkResult<BufferView> {
1328 let mut view = MaybeUninit::<BufferView>::uninit();
1329
1330 let __result = (self.commands().create_buffer_view)(
1331 self.handle(),
1332 create_info,
1333 allocator.map_or(ptr::null(), |v| v),
1334 view.as_mut_ptr(),
1335 );
1336
1337 if __result == Result::SUCCESS {
1338 Ok(view.assume_init())
1339 } else {
1340 Err(__result.into())
1341 }
1342 }
1343
1344 #[inline]
1346 unsafe fn create_command_pool(
1347 &self,
1348 create_info: &CommandPoolCreateInfo,
1349 allocator: Option<&AllocationCallbacks>,
1350 ) -> crate::VkResult<CommandPool> {
1351 let mut command_pool = MaybeUninit::<CommandPool>::uninit();
1352
1353 let __result = (self.commands().create_command_pool)(
1354 self.handle(),
1355 create_info,
1356 allocator.map_or(ptr::null(), |v| v),
1357 command_pool.as_mut_ptr(),
1358 );
1359
1360 if __result == Result::SUCCESS {
1361 Ok(command_pool.assume_init())
1362 } else {
1363 Err(__result.into())
1364 }
1365 }
1366
1367 #[inline]
1369 unsafe fn create_compute_pipelines(
1370 &self,
1371 pipeline_cache: PipelineCache,
1372 create_infos: &[impl Cast<Target = ComputePipelineCreateInfo>],
1373 allocator: Option<&AllocationCallbacks>,
1374 ) -> crate::VkSuccessResult<Vec<Pipeline>> {
1375 let mut pipelines = Vec::with_capacity(create_infos.len() as usize);
1376
1377 let __result = (self.commands().create_compute_pipelines)(
1378 self.handle(),
1379 pipeline_cache,
1380 create_infos.len() as u32,
1381 create_infos.as_ptr().cast(),
1382 allocator.map_or(ptr::null(), |v| v),
1383 pipelines.as_mut_ptr(),
1384 );
1385
1386 pipelines.set_len(create_infos.len() as usize);
1387
1388 if __result >= Result::SUCCESS {
1389 Ok((pipelines, __result.into()))
1390 } else {
1391 Err(__result.into())
1392 }
1393 }
1394
1395 #[inline]
1397 unsafe fn create_descriptor_pool(
1398 &self,
1399 create_info: &DescriptorPoolCreateInfo,
1400 allocator: Option<&AllocationCallbacks>,
1401 ) -> crate::VkResult<DescriptorPool> {
1402 let mut descriptor_pool = MaybeUninit::<DescriptorPool>::uninit();
1403
1404 let __result = (self.commands().create_descriptor_pool)(
1405 self.handle(),
1406 create_info,
1407 allocator.map_or(ptr::null(), |v| v),
1408 descriptor_pool.as_mut_ptr(),
1409 );
1410
1411 if __result == Result::SUCCESS {
1412 Ok(descriptor_pool.assume_init())
1413 } else {
1414 Err(__result.into())
1415 }
1416 }
1417
1418 #[inline]
1420 unsafe fn create_descriptor_set_layout(
1421 &self,
1422 create_info: &DescriptorSetLayoutCreateInfo,
1423 allocator: Option<&AllocationCallbacks>,
1424 ) -> crate::VkResult<DescriptorSetLayout> {
1425 let mut set_layout = MaybeUninit::<DescriptorSetLayout>::uninit();
1426
1427 let __result = (self.commands().create_descriptor_set_layout)(
1428 self.handle(),
1429 create_info,
1430 allocator.map_or(ptr::null(), |v| v),
1431 set_layout.as_mut_ptr(),
1432 );
1433
1434 if __result == Result::SUCCESS {
1435 Ok(set_layout.assume_init())
1436 } else {
1437 Err(__result.into())
1438 }
1439 }
1440
1441 #[inline]
1443 unsafe fn create_event(
1444 &self,
1445 create_info: &EventCreateInfo,
1446 allocator: Option<&AllocationCallbacks>,
1447 ) -> crate::VkResult<Event> {
1448 let mut event = MaybeUninit::<Event>::uninit();
1449
1450 let __result = (self.commands().create_event)(
1451 self.handle(),
1452 create_info,
1453 allocator.map_or(ptr::null(), |v| v),
1454 event.as_mut_ptr(),
1455 );
1456
1457 if __result == Result::SUCCESS {
1458 Ok(event.assume_init())
1459 } else {
1460 Err(__result.into())
1461 }
1462 }
1463
1464 #[inline]
1466 unsafe fn create_fence(
1467 &self,
1468 create_info: &FenceCreateInfo,
1469 allocator: Option<&AllocationCallbacks>,
1470 ) -> crate::VkResult<Fence> {
1471 let mut fence = MaybeUninit::<Fence>::uninit();
1472
1473 let __result = (self.commands().create_fence)(
1474 self.handle(),
1475 create_info,
1476 allocator.map_or(ptr::null(), |v| v),
1477 fence.as_mut_ptr(),
1478 );
1479
1480 if __result == Result::SUCCESS {
1481 Ok(fence.assume_init())
1482 } else {
1483 Err(__result.into())
1484 }
1485 }
1486
1487 #[inline]
1489 unsafe fn create_framebuffer(
1490 &self,
1491 create_info: &FramebufferCreateInfo,
1492 allocator: Option<&AllocationCallbacks>,
1493 ) -> crate::VkResult<Framebuffer> {
1494 let mut framebuffer = MaybeUninit::<Framebuffer>::uninit();
1495
1496 let __result = (self.commands().create_framebuffer)(
1497 self.handle(),
1498 create_info,
1499 allocator.map_or(ptr::null(), |v| v),
1500 framebuffer.as_mut_ptr(),
1501 );
1502
1503 if __result == Result::SUCCESS {
1504 Ok(framebuffer.assume_init())
1505 } else {
1506 Err(__result.into())
1507 }
1508 }
1509
1510 #[inline]
1512 unsafe fn create_graphics_pipelines(
1513 &self,
1514 pipeline_cache: PipelineCache,
1515 create_infos: &[impl Cast<Target = GraphicsPipelineCreateInfo>],
1516 allocator: Option<&AllocationCallbacks>,
1517 ) -> crate::VkSuccessResult<Vec<Pipeline>> {
1518 let mut pipelines = Vec::with_capacity(create_infos.len() as usize);
1519
1520 let __result = (self.commands().create_graphics_pipelines)(
1521 self.handle(),
1522 pipeline_cache,
1523 create_infos.len() as u32,
1524 create_infos.as_ptr().cast(),
1525 allocator.map_or(ptr::null(), |v| v),
1526 pipelines.as_mut_ptr(),
1527 );
1528
1529 pipelines.set_len(create_infos.len() as usize);
1530
1531 if __result >= Result::SUCCESS {
1532 Ok((pipelines, __result.into()))
1533 } else {
1534 Err(__result.into())
1535 }
1536 }
1537
1538 #[inline]
1540 unsafe fn create_image(
1541 &self,
1542 create_info: &ImageCreateInfo,
1543 allocator: Option<&AllocationCallbacks>,
1544 ) -> crate::VkResult<Image> {
1545 let mut image = MaybeUninit::<Image>::uninit();
1546
1547 let __result = (self.commands().create_image)(
1548 self.handle(),
1549 create_info,
1550 allocator.map_or(ptr::null(), |v| v),
1551 image.as_mut_ptr(),
1552 );
1553
1554 if __result == Result::SUCCESS {
1555 Ok(image.assume_init())
1556 } else {
1557 Err(__result.into())
1558 }
1559 }
1560
1561 #[inline]
1563 unsafe fn create_image_view(
1564 &self,
1565 create_info: &ImageViewCreateInfo,
1566 allocator: Option<&AllocationCallbacks>,
1567 ) -> crate::VkResult<ImageView> {
1568 let mut view = MaybeUninit::<ImageView>::uninit();
1569
1570 let __result = (self.commands().create_image_view)(
1571 self.handle(),
1572 create_info,
1573 allocator.map_or(ptr::null(), |v| v),
1574 view.as_mut_ptr(),
1575 );
1576
1577 if __result == Result::SUCCESS {
1578 Ok(view.assume_init())
1579 } else {
1580 Err(__result.into())
1581 }
1582 }
1583
1584 #[inline]
1586 unsafe fn create_pipeline_cache(
1587 &self,
1588 create_info: &PipelineCacheCreateInfo,
1589 allocator: Option<&AllocationCallbacks>,
1590 ) -> crate::VkResult<PipelineCache> {
1591 let mut pipeline_cache = MaybeUninit::<PipelineCache>::uninit();
1592
1593 let __result = (self.commands().create_pipeline_cache)(
1594 self.handle(),
1595 create_info,
1596 allocator.map_or(ptr::null(), |v| v),
1597 pipeline_cache.as_mut_ptr(),
1598 );
1599
1600 if __result == Result::SUCCESS {
1601 Ok(pipeline_cache.assume_init())
1602 } else {
1603 Err(__result.into())
1604 }
1605 }
1606
1607 #[inline]
1609 unsafe fn create_pipeline_layout(
1610 &self,
1611 create_info: &PipelineLayoutCreateInfo,
1612 allocator: Option<&AllocationCallbacks>,
1613 ) -> crate::VkResult<PipelineLayout> {
1614 let mut pipeline_layout = MaybeUninit::<PipelineLayout>::uninit();
1615
1616 let __result = (self.commands().create_pipeline_layout)(
1617 self.handle(),
1618 create_info,
1619 allocator.map_or(ptr::null(), |v| v),
1620 pipeline_layout.as_mut_ptr(),
1621 );
1622
1623 if __result == Result::SUCCESS {
1624 Ok(pipeline_layout.assume_init())
1625 } else {
1626 Err(__result.into())
1627 }
1628 }
1629
1630 #[inline]
1632 unsafe fn create_query_pool(
1633 &self,
1634 create_info: &QueryPoolCreateInfo,
1635 allocator: Option<&AllocationCallbacks>,
1636 ) -> crate::VkResult<QueryPool> {
1637 let mut query_pool = MaybeUninit::<QueryPool>::uninit();
1638
1639 let __result = (self.commands().create_query_pool)(
1640 self.handle(),
1641 create_info,
1642 allocator.map_or(ptr::null(), |v| v),
1643 query_pool.as_mut_ptr(),
1644 );
1645
1646 if __result == Result::SUCCESS {
1647 Ok(query_pool.assume_init())
1648 } else {
1649 Err(__result.into())
1650 }
1651 }
1652
1653 #[inline]
1655 unsafe fn create_render_pass(
1656 &self,
1657 create_info: &RenderPassCreateInfo,
1658 allocator: Option<&AllocationCallbacks>,
1659 ) -> crate::VkResult<RenderPass> {
1660 let mut render_pass = MaybeUninit::<RenderPass>::uninit();
1661
1662 let __result = (self.commands().create_render_pass)(
1663 self.handle(),
1664 create_info,
1665 allocator.map_or(ptr::null(), |v| v),
1666 render_pass.as_mut_ptr(),
1667 );
1668
1669 if __result == Result::SUCCESS {
1670 Ok(render_pass.assume_init())
1671 } else {
1672 Err(__result.into())
1673 }
1674 }
1675
1676 #[inline]
1678 unsafe fn create_sampler(
1679 &self,
1680 create_info: &SamplerCreateInfo,
1681 allocator: Option<&AllocationCallbacks>,
1682 ) -> crate::VkResult<Sampler> {
1683 let mut sampler = MaybeUninit::<Sampler>::uninit();
1684
1685 let __result = (self.commands().create_sampler)(
1686 self.handle(),
1687 create_info,
1688 allocator.map_or(ptr::null(), |v| v),
1689 sampler.as_mut_ptr(),
1690 );
1691
1692 if __result == Result::SUCCESS {
1693 Ok(sampler.assume_init())
1694 } else {
1695 Err(__result.into())
1696 }
1697 }
1698
1699 #[inline]
1701 unsafe fn create_semaphore(
1702 &self,
1703 create_info: &SemaphoreCreateInfo,
1704 allocator: Option<&AllocationCallbacks>,
1705 ) -> crate::VkResult<Semaphore> {
1706 let mut semaphore = MaybeUninit::<Semaphore>::uninit();
1707
1708 let __result = (self.commands().create_semaphore)(
1709 self.handle(),
1710 create_info,
1711 allocator.map_or(ptr::null(), |v| v),
1712 semaphore.as_mut_ptr(),
1713 );
1714
1715 if __result == Result::SUCCESS {
1716 Ok(semaphore.assume_init())
1717 } else {
1718 Err(__result.into())
1719 }
1720 }
1721
1722 #[inline]
1724 unsafe fn create_shader_module(
1725 &self,
1726 create_info: &ShaderModuleCreateInfo,
1727 allocator: Option<&AllocationCallbacks>,
1728 ) -> crate::VkResult<ShaderModule> {
1729 let mut shader_module = MaybeUninit::<ShaderModule>::uninit();
1730
1731 let __result = (self.commands().create_shader_module)(
1732 self.handle(),
1733 create_info,
1734 allocator.map_or(ptr::null(), |v| v),
1735 shader_module.as_mut_ptr(),
1736 );
1737
1738 if __result == Result::SUCCESS {
1739 Ok(shader_module.assume_init())
1740 } else {
1741 Err(__result.into())
1742 }
1743 }
1744
1745 #[inline]
1747 unsafe fn destroy_buffer(&self, buffer: Buffer, allocator: Option<&AllocationCallbacks>) {
1748 let __result = (self.commands().destroy_buffer)(
1749 self.handle(),
1750 buffer,
1751 allocator.map_or(ptr::null(), |v| v),
1752 );
1753 }
1754
1755 #[inline]
1757 unsafe fn destroy_buffer_view(
1758 &self,
1759 buffer_view: BufferView,
1760 allocator: Option<&AllocationCallbacks>,
1761 ) {
1762 let __result = (self.commands().destroy_buffer_view)(
1763 self.handle(),
1764 buffer_view,
1765 allocator.map_or(ptr::null(), |v| v),
1766 );
1767 }
1768
1769 #[inline]
1771 unsafe fn destroy_command_pool(
1772 &self,
1773 command_pool: CommandPool,
1774 allocator: Option<&AllocationCallbacks>,
1775 ) {
1776 let __result = (self.commands().destroy_command_pool)(
1777 self.handle(),
1778 command_pool,
1779 allocator.map_or(ptr::null(), |v| v),
1780 );
1781 }
1782
1783 #[inline]
1785 unsafe fn destroy_descriptor_pool(
1786 &self,
1787 descriptor_pool: DescriptorPool,
1788 allocator: Option<&AllocationCallbacks>,
1789 ) {
1790 let __result = (self.commands().destroy_descriptor_pool)(
1791 self.handle(),
1792 descriptor_pool,
1793 allocator.map_or(ptr::null(), |v| v),
1794 );
1795 }
1796
1797 #[inline]
1799 unsafe fn destroy_descriptor_set_layout(
1800 &self,
1801 descriptor_set_layout: DescriptorSetLayout,
1802 allocator: Option<&AllocationCallbacks>,
1803 ) {
1804 let __result = (self.commands().destroy_descriptor_set_layout)(
1805 self.handle(),
1806 descriptor_set_layout,
1807 allocator.map_or(ptr::null(), |v| v),
1808 );
1809 }
1810
1811 #[inline]
1813 unsafe fn destroy_device(&self, allocator: Option<&AllocationCallbacks>) {
1814 let __result =
1815 (self.commands().destroy_device)(self.handle(), allocator.map_or(ptr::null(), |v| v));
1816 }
1817
1818 #[inline]
1820 unsafe fn destroy_event(&self, event: Event, allocator: Option<&AllocationCallbacks>) {
1821 let __result = (self.commands().destroy_event)(
1822 self.handle(),
1823 event,
1824 allocator.map_or(ptr::null(), |v| v),
1825 );
1826 }
1827
1828 #[inline]
1830 unsafe fn destroy_fence(&self, fence: Fence, allocator: Option<&AllocationCallbacks>) {
1831 let __result = (self.commands().destroy_fence)(
1832 self.handle(),
1833 fence,
1834 allocator.map_or(ptr::null(), |v| v),
1835 );
1836 }
1837
1838 #[inline]
1840 unsafe fn destroy_framebuffer(
1841 &self,
1842 framebuffer: Framebuffer,
1843 allocator: Option<&AllocationCallbacks>,
1844 ) {
1845 let __result = (self.commands().destroy_framebuffer)(
1846 self.handle(),
1847 framebuffer,
1848 allocator.map_or(ptr::null(), |v| v),
1849 );
1850 }
1851
1852 #[inline]
1854 unsafe fn destroy_image(&self, image: Image, allocator: Option<&AllocationCallbacks>) {
1855 let __result = (self.commands().destroy_image)(
1856 self.handle(),
1857 image,
1858 allocator.map_or(ptr::null(), |v| v),
1859 );
1860 }
1861
1862 #[inline]
1864 unsafe fn destroy_image_view(
1865 &self,
1866 image_view: ImageView,
1867 allocator: Option<&AllocationCallbacks>,
1868 ) {
1869 let __result = (self.commands().destroy_image_view)(
1870 self.handle(),
1871 image_view,
1872 allocator.map_or(ptr::null(), |v| v),
1873 );
1874 }
1875
1876 #[inline]
1878 unsafe fn destroy_pipeline(&self, pipeline: Pipeline, allocator: Option<&AllocationCallbacks>) {
1879 let __result = (self.commands().destroy_pipeline)(
1880 self.handle(),
1881 pipeline,
1882 allocator.map_or(ptr::null(), |v| v),
1883 );
1884 }
1885
1886 #[inline]
1888 unsafe fn destroy_pipeline_cache(
1889 &self,
1890 pipeline_cache: PipelineCache,
1891 allocator: Option<&AllocationCallbacks>,
1892 ) {
1893 let __result = (self.commands().destroy_pipeline_cache)(
1894 self.handle(),
1895 pipeline_cache,
1896 allocator.map_or(ptr::null(), |v| v),
1897 );
1898 }
1899
1900 #[inline]
1902 unsafe fn destroy_pipeline_layout(
1903 &self,
1904 pipeline_layout: PipelineLayout,
1905 allocator: Option<&AllocationCallbacks>,
1906 ) {
1907 let __result = (self.commands().destroy_pipeline_layout)(
1908 self.handle(),
1909 pipeline_layout,
1910 allocator.map_or(ptr::null(), |v| v),
1911 );
1912 }
1913
1914 #[inline]
1916 unsafe fn destroy_query_pool(
1917 &self,
1918 query_pool: QueryPool,
1919 allocator: Option<&AllocationCallbacks>,
1920 ) {
1921 let __result = (self.commands().destroy_query_pool)(
1922 self.handle(),
1923 query_pool,
1924 allocator.map_or(ptr::null(), |v| v),
1925 );
1926 }
1927
1928 #[inline]
1930 unsafe fn destroy_render_pass(
1931 &self,
1932 render_pass: RenderPass,
1933 allocator: Option<&AllocationCallbacks>,
1934 ) {
1935 let __result = (self.commands().destroy_render_pass)(
1936 self.handle(),
1937 render_pass,
1938 allocator.map_or(ptr::null(), |v| v),
1939 );
1940 }
1941
1942 #[inline]
1944 unsafe fn destroy_sampler(&self, sampler: Sampler, allocator: Option<&AllocationCallbacks>) {
1945 let __result = (self.commands().destroy_sampler)(
1946 self.handle(),
1947 sampler,
1948 allocator.map_or(ptr::null(), |v| v),
1949 );
1950 }
1951
1952 #[inline]
1954 unsafe fn destroy_semaphore(
1955 &self,
1956 semaphore: Semaphore,
1957 allocator: Option<&AllocationCallbacks>,
1958 ) {
1959 let __result = (self.commands().destroy_semaphore)(
1960 self.handle(),
1961 semaphore,
1962 allocator.map_or(ptr::null(), |v| v),
1963 );
1964 }
1965
1966 #[inline]
1968 unsafe fn destroy_shader_module(
1969 &self,
1970 shader_module: ShaderModule,
1971 allocator: Option<&AllocationCallbacks>,
1972 ) {
1973 let __result = (self.commands().destroy_shader_module)(
1974 self.handle(),
1975 shader_module,
1976 allocator.map_or(ptr::null(), |v| v),
1977 );
1978 }
1979
1980 #[inline]
1982 unsafe fn device_wait_idle(&self) -> crate::VkResult<()> {
1983 let __result = (self.commands().device_wait_idle)(self.handle());
1984
1985 if __result == Result::SUCCESS {
1986 Ok(())
1987 } else {
1988 Err(__result.into())
1989 }
1990 }
1991
1992 #[inline]
1994 unsafe fn end_command_buffer(&self, command_buffer: CommandBuffer) -> crate::VkResult<()> {
1995 let __result = (self.commands().end_command_buffer)(command_buffer);
1996
1997 if __result == Result::SUCCESS {
1998 Ok(())
1999 } else {
2000 Err(__result.into())
2001 }
2002 }
2003
2004 #[inline]
2006 unsafe fn flush_mapped_memory_ranges(
2007 &self,
2008 memory_ranges: &[impl Cast<Target = MappedMemoryRange>],
2009 ) -> crate::VkResult<()> {
2010 let __result = (self.commands().flush_mapped_memory_ranges)(
2011 self.handle(),
2012 memory_ranges.len() as u32,
2013 memory_ranges.as_ptr().cast(),
2014 );
2015
2016 if __result == Result::SUCCESS {
2017 Ok(())
2018 } else {
2019 Err(__result.into())
2020 }
2021 }
2022
2023 #[inline]
2025 unsafe fn free_command_buffers(
2026 &self,
2027 command_pool: CommandPool,
2028 command_buffers: &[CommandBuffer],
2029 ) {
2030 let __result = (self.commands().free_command_buffers)(
2031 self.handle(),
2032 command_pool,
2033 command_buffers.len() as u32,
2034 command_buffers.as_ptr(),
2035 );
2036 }
2037
2038 #[inline]
2040 unsafe fn free_descriptor_sets(
2041 &self,
2042 descriptor_pool: DescriptorPool,
2043 descriptor_sets: &[DescriptorSet],
2044 ) -> crate::VkResult<()> {
2045 let __result = (self.commands().free_descriptor_sets)(
2046 self.handle(),
2047 descriptor_pool,
2048 descriptor_sets.len() as u32,
2049 descriptor_sets.as_ptr(),
2050 );
2051
2052 if __result == Result::SUCCESS {
2053 Ok(())
2054 } else {
2055 Err(__result.into())
2056 }
2057 }
2058
2059 #[inline]
2061 unsafe fn free_memory(&self, memory: DeviceMemory, allocator: Option<&AllocationCallbacks>) {
2062 let __result = (self.commands().free_memory)(
2063 self.handle(),
2064 memory,
2065 allocator.map_or(ptr::null(), |v| v),
2066 );
2067 }
2068
2069 #[inline]
2071 unsafe fn get_buffer_memory_requirements(&self, buffer: Buffer) -> MemoryRequirements {
2072 let mut memory_requirements = MaybeUninit::<MemoryRequirements>::uninit();
2073
2074 let __result = (self.commands().get_buffer_memory_requirements)(
2075 self.handle(),
2076 buffer,
2077 memory_requirements.as_mut_ptr(),
2078 );
2079
2080 memory_requirements.assume_init()
2081 }
2082
2083 #[inline]
2085 unsafe fn get_device_memory_commitment(&self, memory: DeviceMemory) -> DeviceSize {
2086 let mut committed_memory_in_bytes = MaybeUninit::<DeviceSize>::uninit();
2087
2088 let __result = (self.commands().get_device_memory_commitment)(
2089 self.handle(),
2090 memory,
2091 committed_memory_in_bytes.as_mut_ptr(),
2092 );
2093
2094 committed_memory_in_bytes.assume_init()
2095 }
2096
2097 #[inline]
2099 unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> Queue {
2100 let mut queue = MaybeUninit::<Queue>::uninit();
2101
2102 let __result = (self.commands().get_device_queue)(
2103 self.handle(),
2104 queue_family_index,
2105 queue_index,
2106 queue.as_mut_ptr(),
2107 );
2108
2109 queue.assume_init()
2110 }
2111
2112 #[inline]
2114 unsafe fn get_event_status(&self, event: Event) -> crate::VkResult<SuccessCode> {
2115 let __result = (self.commands().get_event_status)(self.handle(), event);
2116
2117 if __result >= Result::SUCCESS {
2118 Ok(__result.into())
2119 } else {
2120 Err(__result.into())
2121 }
2122 }
2123
2124 #[inline]
2126 unsafe fn get_fence_status(&self, fence: Fence) -> crate::VkResult<SuccessCode> {
2127 let __result = (self.commands().get_fence_status)(self.handle(), fence);
2128
2129 if __result >= Result::SUCCESS {
2130 Ok(__result.into())
2131 } else {
2132 Err(__result.into())
2133 }
2134 }
2135
2136 #[inline]
2138 unsafe fn get_image_memory_requirements(&self, image: Image) -> MemoryRequirements {
2139 let mut memory_requirements = MaybeUninit::<MemoryRequirements>::uninit();
2140
2141 let __result = (self.commands().get_image_memory_requirements)(
2142 self.handle(),
2143 image,
2144 memory_requirements.as_mut_ptr(),
2145 );
2146
2147 memory_requirements.assume_init()
2148 }
2149
2150 #[inline]
2152 unsafe fn get_image_sparse_memory_requirements(
2153 &self,
2154 image: Image,
2155 ) -> Vec<SparseImageMemoryRequirements> {
2156 let mut sparse_memory_requirement_count = 0;
2157
2158 (self.commands().get_image_sparse_memory_requirements)(
2159 self.handle(),
2160 image,
2161 &mut sparse_memory_requirement_count,
2162 ptr::null_mut(),
2163 );
2164
2165 let mut sparse_memory_requirements =
2166 Vec::with_capacity(sparse_memory_requirement_count as usize);
2167
2168 let __result = (self.commands().get_image_sparse_memory_requirements)(
2169 self.handle(),
2170 image,
2171 &mut sparse_memory_requirement_count,
2172 sparse_memory_requirements.as_mut_ptr(),
2173 );
2174
2175 debug_assert!(
2176 sparse_memory_requirements.capacity() >= sparse_memory_requirement_count as usize
2177 );
2178 sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize);
2179
2180 sparse_memory_requirements
2181 }
2182
2183 #[inline]
2185 unsafe fn get_image_subresource_layout(
2186 &self,
2187 image: Image,
2188 subresource: &ImageSubresource,
2189 ) -> SubresourceLayout {
2190 let mut layout = MaybeUninit::<SubresourceLayout>::uninit();
2191
2192 let __result = (self.commands().get_image_subresource_layout)(
2193 self.handle(),
2194 image,
2195 subresource,
2196 layout.as_mut_ptr(),
2197 );
2198
2199 layout.assume_init()
2200 }
2201
2202 #[inline]
2204 unsafe fn get_pipeline_cache_data(
2205 &self,
2206 pipeline_cache: PipelineCache,
2207 ) -> crate::VkResult<Vec<u8>> {
2208 let mut data_size = 0;
2209
2210 (self.commands().get_pipeline_cache_data)(
2211 self.handle(),
2212 pipeline_cache,
2213 &mut data_size,
2214 ptr::null_mut(),
2215 );
2216
2217 let mut data = Vec::with_capacity(data_size as usize);
2218
2219 let __result = (self.commands().get_pipeline_cache_data)(
2220 self.handle(),
2221 pipeline_cache,
2222 &mut data_size,
2223 data.as_mut_ptr() as *mut c_void,
2224 );
2225
2226 debug_assert!(data.capacity() >= data_size as usize);
2227 data.set_len(data_size as usize);
2228
2229 if __result == Result::SUCCESS {
2230 Ok(data)
2231 } else {
2232 Err(__result.into())
2233 }
2234 }
2235
2236 #[inline]
2238 unsafe fn get_query_pool_results(
2239 &self,
2240 query_pool: QueryPool,
2241 first_query: u32,
2242 query_count: u32,
2243 data: &mut [u8],
2244 stride: DeviceSize,
2245 flags: QueryResultFlags,
2246 ) -> crate::VkResult<SuccessCode> {
2247 let __result = (self.commands().get_query_pool_results)(
2248 self.handle(),
2249 query_pool,
2250 first_query,
2251 query_count,
2252 data.len() as usize,
2253 data.as_mut_ptr() as *mut c_void,
2254 stride,
2255 flags,
2256 );
2257
2258 if __result >= Result::SUCCESS {
2259 Ok(__result.into())
2260 } else {
2261 Err(__result.into())
2262 }
2263 }
2264
2265 #[inline]
2267 unsafe fn get_render_area_granularity(&self, render_pass: RenderPass) -> Extent2D {
2268 let mut granularity = MaybeUninit::<Extent2D>::uninit();
2269
2270 let __result = (self.commands().get_render_area_granularity)(
2271 self.handle(),
2272 render_pass,
2273 granularity.as_mut_ptr(),
2274 );
2275
2276 granularity.assume_init()
2277 }
2278
2279 #[inline]
2281 unsafe fn invalidate_mapped_memory_ranges(
2282 &self,
2283 memory_ranges: &[impl Cast<Target = MappedMemoryRange>],
2284 ) -> crate::VkResult<()> {
2285 let __result = (self.commands().invalidate_mapped_memory_ranges)(
2286 self.handle(),
2287 memory_ranges.len() as u32,
2288 memory_ranges.as_ptr().cast(),
2289 );
2290
2291 if __result == Result::SUCCESS {
2292 Ok(())
2293 } else {
2294 Err(__result.into())
2295 }
2296 }
2297
2298 #[inline]
2300 unsafe fn map_memory(
2301 &self,
2302 memory: DeviceMemory,
2303 offset: DeviceSize,
2304 size: DeviceSize,
2305 flags: MemoryMapFlags,
2306 ) -> crate::VkResult<*mut c_void> {
2307 let mut data = MaybeUninit::<*mut c_void>::uninit();
2308
2309 let __result = (self.commands().map_memory)(
2310 self.handle(),
2311 memory,
2312 offset,
2313 size,
2314 flags,
2315 data.as_mut_ptr(),
2316 );
2317
2318 if __result == Result::SUCCESS {
2319 Ok(data.assume_init())
2320 } else {
2321 Err(__result.into())
2322 }
2323 }
2324
2325 #[inline]
2327 unsafe fn merge_pipeline_caches(
2328 &self,
2329 dst_cache: PipelineCache,
2330 src_caches: &[PipelineCache],
2331 ) -> crate::VkResult<()> {
2332 let __result = (self.commands().merge_pipeline_caches)(
2333 self.handle(),
2334 dst_cache,
2335 src_caches.len() as u32,
2336 src_caches.as_ptr(),
2337 );
2338
2339 if __result == Result::SUCCESS {
2340 Ok(())
2341 } else {
2342 Err(__result.into())
2343 }
2344 }
2345
2346 #[inline]
2348 unsafe fn queue_bind_sparse(
2349 &self,
2350 queue: Queue,
2351 bind_info: &[impl Cast<Target = BindSparseInfo>],
2352 fence: Fence,
2353 ) -> crate::VkResult<()> {
2354 let __result = (self.commands().queue_bind_sparse)(
2355 queue,
2356 bind_info.len() as u32,
2357 bind_info.as_ptr().cast(),
2358 fence,
2359 );
2360
2361 if __result == Result::SUCCESS {
2362 Ok(())
2363 } else {
2364 Err(__result.into())
2365 }
2366 }
2367
2368 #[inline]
2370 unsafe fn queue_submit(
2371 &self,
2372 queue: Queue,
2373 submits: &[impl Cast<Target = SubmitInfo>],
2374 fence: Fence,
2375 ) -> crate::VkResult<()> {
2376 let __result = (self.commands().queue_submit)(
2377 queue,
2378 submits.len() as u32,
2379 submits.as_ptr().cast(),
2380 fence,
2381 );
2382
2383 if __result == Result::SUCCESS {
2384 Ok(())
2385 } else {
2386 Err(__result.into())
2387 }
2388 }
2389
2390 #[inline]
2392 unsafe fn queue_wait_idle(&self, queue: Queue) -> crate::VkResult<()> {
2393 let __result = (self.commands().queue_wait_idle)(queue);
2394
2395 if __result == Result::SUCCESS {
2396 Ok(())
2397 } else {
2398 Err(__result.into())
2399 }
2400 }
2401
2402 #[inline]
2404 unsafe fn reset_command_buffer(
2405 &self,
2406 command_buffer: CommandBuffer,
2407 flags: CommandBufferResetFlags,
2408 ) -> crate::VkResult<()> {
2409 let __result = (self.commands().reset_command_buffer)(command_buffer, flags);
2410
2411 if __result == Result::SUCCESS {
2412 Ok(())
2413 } else {
2414 Err(__result.into())
2415 }
2416 }
2417
2418 #[inline]
2420 unsafe fn reset_command_pool(
2421 &self,
2422 command_pool: CommandPool,
2423 flags: CommandPoolResetFlags,
2424 ) -> crate::VkResult<()> {
2425 let __result = (self.commands().reset_command_pool)(self.handle(), command_pool, flags);
2426
2427 if __result == Result::SUCCESS {
2428 Ok(())
2429 } else {
2430 Err(__result.into())
2431 }
2432 }
2433
2434 #[inline]
2436 unsafe fn reset_descriptor_pool(
2437 &self,
2438 descriptor_pool: DescriptorPool,
2439 flags: DescriptorPoolResetFlags,
2440 ) -> crate::VkResult<()> {
2441 let __result =
2442 (self.commands().reset_descriptor_pool)(self.handle(), descriptor_pool, flags);
2443
2444 if __result == Result::SUCCESS {
2445 Ok(())
2446 } else {
2447 Err(__result.into())
2448 }
2449 }
2450
2451 #[inline]
2453 unsafe fn reset_event(&self, event: Event) -> crate::VkResult<()> {
2454 let __result = (self.commands().reset_event)(self.handle(), event);
2455
2456 if __result == Result::SUCCESS {
2457 Ok(())
2458 } else {
2459 Err(__result.into())
2460 }
2461 }
2462
2463 #[inline]
2465 unsafe fn reset_fences(&self, fences: &[Fence]) -> crate::VkResult<()> {
2466 let __result =
2467 (self.commands().reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr());
2468
2469 if __result == Result::SUCCESS {
2470 Ok(())
2471 } else {
2472 Err(__result.into())
2473 }
2474 }
2475
2476 #[inline]
2478 unsafe fn set_event(&self, event: Event) -> crate::VkResult<()> {
2479 let __result = (self.commands().set_event)(self.handle(), event);
2480
2481 if __result == Result::SUCCESS {
2482 Ok(())
2483 } else {
2484 Err(__result.into())
2485 }
2486 }
2487
2488 #[inline]
2490 unsafe fn unmap_memory(&self, memory: DeviceMemory) {
2491 let __result = (self.commands().unmap_memory)(self.handle(), memory);
2492 }
2493
2494 #[inline]
2496 unsafe fn update_descriptor_sets(
2497 &self,
2498 descriptor_writes: &[impl Cast<Target = WriteDescriptorSet>],
2499 descriptor_copies: &[impl Cast<Target = CopyDescriptorSet>],
2500 ) {
2501 let __result = (self.commands().update_descriptor_sets)(
2502 self.handle(),
2503 descriptor_writes.len() as u32,
2504 descriptor_writes.as_ptr().cast(),
2505 descriptor_copies.len() as u32,
2506 descriptor_copies.as_ptr().cast(),
2507 );
2508 }
2509
2510 #[inline]
2512 unsafe fn wait_for_fences(
2513 &self,
2514 fences: &[Fence],
2515 wait_all: bool,
2516 timeout: u64,
2517 ) -> crate::VkResult<SuccessCode> {
2518 let __result = (self.commands().wait_for_fences)(
2519 self.handle(),
2520 fences.len() as u32,
2521 fences.as_ptr(),
2522 wait_all as Bool32,
2523 timeout,
2524 );
2525
2526 if __result >= Result::SUCCESS {
2527 Ok(__result.into())
2528 } else {
2529 Err(__result.into())
2530 }
2531 }
2532}
2533
2534impl DeviceV1_0 for crate::Device {
2535 #[inline]
2536 fn commands(&self) -> &DeviceCommands {
2537 &self.commands
2538 }
2539
2540 #[inline]
2541 fn handle(&self) -> Device {
2542 self.handle
2543 }
2544}
2545
2546impl<C: Borrow<DeviceCommands>> DeviceV1_0 for (C, Device) {
2547 #[inline]
2548 fn commands(&self) -> &DeviceCommands {
2549 self.0.borrow()
2550 }
2551
2552 #[inline]
2553 fn handle(&self) -> Device {
2554 self.1
2555 }
2556}
2557
2558pub trait EntryV1_1: EntryV1_0 {
2560 #[inline]
2562 unsafe fn enumerate_instance_version(&self) -> crate::VkResult<u32> {
2563 let mut api_version = MaybeUninit::<u32>::uninit();
2564
2565 let __result = (self.commands().enumerate_instance_version)(api_version.as_mut_ptr());
2566
2567 if __result == Result::SUCCESS {
2568 Ok(api_version.assume_init())
2569 } else {
2570 Err(__result.into())
2571 }
2572 }
2573}
2574
2575impl<C: EntryV1_0 + ?Sized> EntryV1_1 for C {}
2576
2577pub trait InstanceV1_1: InstanceV1_0 {
2579 #[inline]
2581 unsafe fn enumerate_physical_device_groups(
2582 &self,
2583 ) -> crate::VkResult<Vec<PhysicalDeviceGroupProperties>> {
2584 let mut physical_device_group_count = 0;
2585
2586 (self.commands().enumerate_physical_device_groups)(
2587 self.handle(),
2588 &mut physical_device_group_count,
2589 ptr::null_mut(),
2590 );
2591
2592 let mut physical_device_group_properties =
2593 Vec::with_capacity(physical_device_group_count as usize);
2594
2595 let __result = (self.commands().enumerate_physical_device_groups)(
2596 self.handle(),
2597 &mut physical_device_group_count,
2598 physical_device_group_properties.as_mut_ptr(),
2599 );
2600
2601 debug_assert!(
2602 physical_device_group_properties.capacity() >= physical_device_group_count as usize
2603 );
2604 physical_device_group_properties.set_len(physical_device_group_count as usize);
2605
2606 if __result == Result::SUCCESS {
2607 Ok(physical_device_group_properties)
2608 } else {
2609 Err(__result.into())
2610 }
2611 }
2612
2613 #[inline]
2615 unsafe fn get_physical_device_external_buffer_properties(
2616 &self,
2617 physical_device: PhysicalDevice,
2618 external_buffer_info: &PhysicalDeviceExternalBufferInfo,
2619 external_buffer_properties: &mut ExternalBufferProperties,
2620 ) {
2621 let __result = (self
2622 .commands()
2623 .get_physical_device_external_buffer_properties)(
2624 physical_device,
2625 external_buffer_info,
2626 external_buffer_properties,
2627 );
2628 }
2629
2630 #[inline]
2632 unsafe fn get_physical_device_external_fence_properties(
2633 &self,
2634 physical_device: PhysicalDevice,
2635 external_fence_info: &PhysicalDeviceExternalFenceInfo,
2636 external_fence_properties: &mut ExternalFenceProperties,
2637 ) {
2638 let __result = (self
2639 .commands()
2640 .get_physical_device_external_fence_properties)(
2641 physical_device,
2642 external_fence_info,
2643 external_fence_properties,
2644 );
2645 }
2646
2647 #[inline]
2649 unsafe fn get_physical_device_external_semaphore_properties(
2650 &self,
2651 physical_device: PhysicalDevice,
2652 external_semaphore_info: &PhysicalDeviceExternalSemaphoreInfo,
2653 external_semaphore_properties: &mut ExternalSemaphoreProperties,
2654 ) {
2655 let __result = (self
2656 .commands()
2657 .get_physical_device_external_semaphore_properties)(
2658 physical_device,
2659 external_semaphore_info,
2660 external_semaphore_properties,
2661 );
2662 }
2663
2664 #[inline]
2666 unsafe fn get_physical_device_features2(
2667 &self,
2668 physical_device: PhysicalDevice,
2669 features: &mut PhysicalDeviceFeatures2,
2670 ) {
2671 let __result = (self.commands().get_physical_device_features2)(physical_device, features);
2672 }
2673
2674 #[inline]
2676 unsafe fn get_physical_device_format_properties2(
2677 &self,
2678 physical_device: PhysicalDevice,
2679 format: Format,
2680 format_properties: &mut FormatProperties2,
2681 ) {
2682 let __result = (self.commands().get_physical_device_format_properties2)(
2683 physical_device,
2684 format,
2685 format_properties,
2686 );
2687 }
2688
2689 #[inline]
2691 unsafe fn get_physical_device_image_format_properties2(
2692 &self,
2693 physical_device: PhysicalDevice,
2694 image_format_info: &PhysicalDeviceImageFormatInfo2,
2695 image_format_properties: &mut ImageFormatProperties2,
2696 ) -> crate::VkResult<()> {
2697 let __result = (self.commands().get_physical_device_image_format_properties2)(
2698 physical_device,
2699 image_format_info,
2700 image_format_properties,
2701 );
2702
2703 if __result == Result::SUCCESS {
2704 Ok(())
2705 } else {
2706 Err(__result.into())
2707 }
2708 }
2709
2710 #[inline]
2712 unsafe fn get_physical_device_memory_properties2(
2713 &self,
2714 physical_device: PhysicalDevice,
2715 memory_properties: &mut PhysicalDeviceMemoryProperties2,
2716 ) {
2717 let __result = (self.commands().get_physical_device_memory_properties2)(
2718 physical_device,
2719 memory_properties,
2720 );
2721 }
2722
2723 #[inline]
2725 unsafe fn get_physical_device_properties2(
2726 &self,
2727 physical_device: PhysicalDevice,
2728 properties: &mut PhysicalDeviceProperties2,
2729 ) {
2730 let __result =
2731 (self.commands().get_physical_device_properties2)(physical_device, properties);
2732 }
2733
2734 #[inline]
2736 unsafe fn get_physical_device_queue_family_properties2(
2737 &self,
2738 physical_device: PhysicalDevice,
2739 ) -> Vec<QueueFamilyProperties2> {
2740 let mut queue_family_property_count = 0;
2741
2742 (self.commands().get_physical_device_queue_family_properties2)(
2743 physical_device,
2744 &mut queue_family_property_count,
2745 ptr::null_mut(),
2746 );
2747
2748 let mut queue_family_properties = Vec::with_capacity(queue_family_property_count as usize);
2749
2750 let __result = (self.commands().get_physical_device_queue_family_properties2)(
2751 physical_device,
2752 &mut queue_family_property_count,
2753 queue_family_properties.as_mut_ptr(),
2754 );
2755
2756 debug_assert!(queue_family_properties.capacity() >= queue_family_property_count as usize);
2757 queue_family_properties.set_len(queue_family_property_count as usize);
2758
2759 queue_family_properties
2760 }
2761
2762 #[inline]
2764 unsafe fn get_physical_device_sparse_image_format_properties2(
2765 &self,
2766 physical_device: PhysicalDevice,
2767 format_info: &PhysicalDeviceSparseImageFormatInfo2,
2768 ) -> Vec<SparseImageFormatProperties2> {
2769 let mut property_count = 0;
2770
2771 (self
2772 .commands()
2773 .get_physical_device_sparse_image_format_properties2)(
2774 physical_device,
2775 format_info,
2776 &mut property_count,
2777 ptr::null_mut(),
2778 );
2779
2780 let mut properties = Vec::with_capacity(property_count as usize);
2781
2782 let __result = (self
2783 .commands()
2784 .get_physical_device_sparse_image_format_properties2)(
2785 physical_device,
2786 format_info,
2787 &mut property_count,
2788 properties.as_mut_ptr(),
2789 );
2790
2791 debug_assert!(properties.capacity() >= property_count as usize);
2792 properties.set_len(property_count as usize);
2793
2794 properties
2795 }
2796}
2797
2798impl<C: InstanceV1_0 + ?Sized> InstanceV1_1 for C {}
2799
2800pub trait DeviceV1_1: DeviceV1_0 {
2802 #[inline]
2804 unsafe fn bind_buffer_memory2(
2805 &self,
2806 bind_infos: &[impl Cast<Target = BindBufferMemoryInfo>],
2807 ) -> crate::VkResult<()> {
2808 let __result = (self.commands().bind_buffer_memory2)(
2809 self.handle(),
2810 bind_infos.len() as u32,
2811 bind_infos.as_ptr().cast(),
2812 );
2813
2814 if __result == Result::SUCCESS {
2815 Ok(())
2816 } else {
2817 Err(__result.into())
2818 }
2819 }
2820
2821 #[inline]
2823 unsafe fn bind_image_memory2(
2824 &self,
2825 bind_infos: &[impl Cast<Target = BindImageMemoryInfo>],
2826 ) -> crate::VkResult<()> {
2827 let __result = (self.commands().bind_image_memory2)(
2828 self.handle(),
2829 bind_infos.len() as u32,
2830 bind_infos.as_ptr().cast(),
2831 );
2832
2833 if __result == Result::SUCCESS {
2834 Ok(())
2835 } else {
2836 Err(__result.into())
2837 }
2838 }
2839
2840 #[inline]
2842 unsafe fn cmd_dispatch_base(
2843 &self,
2844 command_buffer: CommandBuffer,
2845 base_group_x: u32,
2846 base_group_y: u32,
2847 base_group_z: u32,
2848 group_count_x: u32,
2849 group_count_y: u32,
2850 group_count_z: u32,
2851 ) {
2852 let __result = (self.commands().cmd_dispatch_base)(
2853 command_buffer,
2854 base_group_x,
2855 base_group_y,
2856 base_group_z,
2857 group_count_x,
2858 group_count_y,
2859 group_count_z,
2860 );
2861 }
2862
2863 #[inline]
2865 unsafe fn cmd_set_device_mask(&self, command_buffer: CommandBuffer, device_mask: u32) {
2866 let __result = (self.commands().cmd_set_device_mask)(command_buffer, device_mask);
2867 }
2868
2869 #[inline]
2871 unsafe fn create_descriptor_update_template(
2872 &self,
2873 create_info: &DescriptorUpdateTemplateCreateInfo,
2874 allocator: Option<&AllocationCallbacks>,
2875 ) -> crate::VkResult<DescriptorUpdateTemplate> {
2876 let mut descriptor_update_template = MaybeUninit::<DescriptorUpdateTemplate>::uninit();
2877
2878 let __result = (self.commands().create_descriptor_update_template)(
2879 self.handle(),
2880 create_info,
2881 allocator.map_or(ptr::null(), |v| v),
2882 descriptor_update_template.as_mut_ptr(),
2883 );
2884
2885 if __result == Result::SUCCESS {
2886 Ok(descriptor_update_template.assume_init())
2887 } else {
2888 Err(__result.into())
2889 }
2890 }
2891
2892 #[inline]
2894 unsafe fn create_sampler_ycbcr_conversion(
2895 &self,
2896 create_info: &SamplerYcbcrConversionCreateInfo,
2897 allocator: Option<&AllocationCallbacks>,
2898 ) -> crate::VkResult<SamplerYcbcrConversion> {
2899 let mut ycbcr_conversion = MaybeUninit::<SamplerYcbcrConversion>::uninit();
2900
2901 let __result = (self.commands().create_sampler_ycbcr_conversion)(
2902 self.handle(),
2903 create_info,
2904 allocator.map_or(ptr::null(), |v| v),
2905 ycbcr_conversion.as_mut_ptr(),
2906 );
2907
2908 if __result == Result::SUCCESS {
2909 Ok(ycbcr_conversion.assume_init())
2910 } else {
2911 Err(__result.into())
2912 }
2913 }
2914
2915 #[inline]
2917 unsafe fn destroy_descriptor_update_template(
2918 &self,
2919 descriptor_update_template: DescriptorUpdateTemplate,
2920 allocator: Option<&AllocationCallbacks>,
2921 ) {
2922 let __result = (self.commands().destroy_descriptor_update_template)(
2923 self.handle(),
2924 descriptor_update_template,
2925 allocator.map_or(ptr::null(), |v| v),
2926 );
2927 }
2928
2929 #[inline]
2931 unsafe fn destroy_sampler_ycbcr_conversion(
2932 &self,
2933 ycbcr_conversion: SamplerYcbcrConversion,
2934 allocator: Option<&AllocationCallbacks>,
2935 ) {
2936 let __result = (self.commands().destroy_sampler_ycbcr_conversion)(
2937 self.handle(),
2938 ycbcr_conversion,
2939 allocator.map_or(ptr::null(), |v| v),
2940 );
2941 }
2942
2943 #[inline]
2945 unsafe fn get_buffer_memory_requirements2(
2946 &self,
2947 info: &BufferMemoryRequirementsInfo2,
2948 memory_requirements: &mut MemoryRequirements2,
2949 ) {
2950 let __result = (self.commands().get_buffer_memory_requirements2)(
2951 self.handle(),
2952 info,
2953 memory_requirements,
2954 );
2955 }
2956
2957 #[inline]
2959 unsafe fn get_descriptor_set_layout_support(
2960 &self,
2961 create_info: &DescriptorSetLayoutCreateInfo,
2962 support: &mut DescriptorSetLayoutSupport,
2963 ) {
2964 let __result = (self.commands().get_descriptor_set_layout_support)(
2965 self.handle(),
2966 create_info,
2967 support,
2968 );
2969 }
2970
2971 #[inline]
2973 unsafe fn get_device_group_peer_memory_features(
2974 &self,
2975 heap_index: u32,
2976 local_device_index: u32,
2977 remote_device_index: u32,
2978 ) -> PeerMemoryFeatureFlags {
2979 let mut peer_memory_features = MaybeUninit::<PeerMemoryFeatureFlags>::uninit();
2980
2981 let __result = (self.commands().get_device_group_peer_memory_features)(
2982 self.handle(),
2983 heap_index,
2984 local_device_index,
2985 remote_device_index,
2986 peer_memory_features.as_mut_ptr(),
2987 );
2988
2989 peer_memory_features.assume_init()
2990 }
2991
2992 #[inline]
2994 unsafe fn get_device_queue2(&self, queue_info: &DeviceQueueInfo2) -> Queue {
2995 let mut queue = MaybeUninit::<Queue>::uninit();
2996
2997 let __result =
2998 (self.commands().get_device_queue2)(self.handle(), queue_info, queue.as_mut_ptr());
2999
3000 queue.assume_init()
3001 }
3002
3003 #[inline]
3005 unsafe fn get_image_memory_requirements2(
3006 &self,
3007 info: &ImageMemoryRequirementsInfo2,
3008 memory_requirements: &mut MemoryRequirements2,
3009 ) {
3010 let __result = (self.commands().get_image_memory_requirements2)(
3011 self.handle(),
3012 info,
3013 memory_requirements,
3014 );
3015 }
3016
3017 #[inline]
3019 unsafe fn get_image_sparse_memory_requirements2(
3020 &self,
3021 info: &ImageSparseMemoryRequirementsInfo2,
3022 ) -> Vec<SparseImageMemoryRequirements2> {
3023 let mut sparse_memory_requirement_count = 0;
3024
3025 (self.commands().get_image_sparse_memory_requirements2)(
3026 self.handle(),
3027 info,
3028 &mut sparse_memory_requirement_count,
3029 ptr::null_mut(),
3030 );
3031
3032 let mut sparse_memory_requirements =
3033 Vec::with_capacity(sparse_memory_requirement_count as usize);
3034
3035 let __result = (self.commands().get_image_sparse_memory_requirements2)(
3036 self.handle(),
3037 info,
3038 &mut sparse_memory_requirement_count,
3039 sparse_memory_requirements.as_mut_ptr(),
3040 );
3041
3042 debug_assert!(
3043 sparse_memory_requirements.capacity() >= sparse_memory_requirement_count as usize
3044 );
3045 sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize);
3046
3047 sparse_memory_requirements
3048 }
3049
3050 #[inline]
3052 unsafe fn trim_command_pool(&self, command_pool: CommandPool, flags: CommandPoolTrimFlags) {
3053 let __result = (self.commands().trim_command_pool)(self.handle(), command_pool, flags);
3054 }
3055
3056 #[inline]
3058 unsafe fn update_descriptor_set_with_template(
3059 &self,
3060 descriptor_set: DescriptorSet,
3061 descriptor_update_template: DescriptorUpdateTemplate,
3062 data: *const c_void,
3063 ) {
3064 let __result = (self.commands().update_descriptor_set_with_template)(
3065 self.handle(),
3066 descriptor_set,
3067 descriptor_update_template,
3068 data,
3069 );
3070 }
3071}
3072
3073impl<C: DeviceV1_0 + ?Sized> DeviceV1_1 for C {}
3074
3075pub trait EntryV1_2: EntryV1_1 {}
3077
3078impl<C: EntryV1_0 + ?Sized> EntryV1_2 for C {}
3079
3080pub trait InstanceV1_2: InstanceV1_1 {}
3082
3083impl<C: InstanceV1_0 + ?Sized> InstanceV1_2 for C {}
3084
3085pub trait DeviceV1_2: DeviceV1_1 {
3087 #[inline]
3089 unsafe fn cmd_begin_render_pass2(
3090 &self,
3091 command_buffer: CommandBuffer,
3092 render_pass_begin: &RenderPassBeginInfo,
3093 subpass_begin_info: &SubpassBeginInfo,
3094 ) {
3095 let __result = (self.commands().cmd_begin_render_pass2)(
3096 command_buffer,
3097 render_pass_begin,
3098 subpass_begin_info,
3099 );
3100 }
3101
3102 #[inline]
3104 unsafe fn cmd_draw_indexed_indirect_count(
3105 &self,
3106 command_buffer: CommandBuffer,
3107 buffer: Buffer,
3108 offset: DeviceSize,
3109 count_buffer: Buffer,
3110 count_buffer_offset: DeviceSize,
3111 max_draw_count: u32,
3112 stride: u32,
3113 ) {
3114 let __result = (self.commands().cmd_draw_indexed_indirect_count)(
3115 command_buffer,
3116 buffer,
3117 offset,
3118 count_buffer,
3119 count_buffer_offset,
3120 max_draw_count,
3121 stride,
3122 );
3123 }
3124
3125 #[inline]
3127 unsafe fn cmd_draw_indirect_count(
3128 &self,
3129 command_buffer: CommandBuffer,
3130 buffer: Buffer,
3131 offset: DeviceSize,
3132 count_buffer: Buffer,
3133 count_buffer_offset: DeviceSize,
3134 max_draw_count: u32,
3135 stride: u32,
3136 ) {
3137 let __result = (self.commands().cmd_draw_indirect_count)(
3138 command_buffer,
3139 buffer,
3140 offset,
3141 count_buffer,
3142 count_buffer_offset,
3143 max_draw_count,
3144 stride,
3145 );
3146 }
3147
3148 #[inline]
3150 unsafe fn cmd_end_render_pass2(
3151 &self,
3152 command_buffer: CommandBuffer,
3153 subpass_end_info: &SubpassEndInfo,
3154 ) {
3155 let __result = (self.commands().cmd_end_render_pass2)(command_buffer, subpass_end_info);
3156 }
3157
3158 #[inline]
3160 unsafe fn cmd_next_subpass2(
3161 &self,
3162 command_buffer: CommandBuffer,
3163 subpass_begin_info: &SubpassBeginInfo,
3164 subpass_end_info: &SubpassEndInfo,
3165 ) {
3166 let __result = (self.commands().cmd_next_subpass2)(
3167 command_buffer,
3168 subpass_begin_info,
3169 subpass_end_info,
3170 );
3171 }
3172
3173 #[inline]
3175 unsafe fn create_render_pass2(
3176 &self,
3177 create_info: &RenderPassCreateInfo2,
3178 allocator: Option<&AllocationCallbacks>,
3179 ) -> crate::VkResult<RenderPass> {
3180 let mut render_pass = MaybeUninit::<RenderPass>::uninit();
3181
3182 let __result = (self.commands().create_render_pass2)(
3183 self.handle(),
3184 create_info,
3185 allocator.map_or(ptr::null(), |v| v),
3186 render_pass.as_mut_ptr(),
3187 );
3188
3189 if __result == Result::SUCCESS {
3190 Ok(render_pass.assume_init())
3191 } else {
3192 Err(__result.into())
3193 }
3194 }
3195
3196 #[inline]
3198 unsafe fn get_buffer_device_address(&self, info: &BufferDeviceAddressInfo) -> DeviceAddress {
3199 let __result = (self.commands().get_buffer_device_address)(self.handle(), info);
3200
3201 __result
3202 }
3203
3204 #[inline]
3206 unsafe fn get_buffer_opaque_capture_address(&self, info: &BufferDeviceAddressInfo) -> u64 {
3207 let __result = (self.commands().get_buffer_opaque_capture_address)(self.handle(), info);
3208
3209 __result
3210 }
3211
3212 #[inline]
3214 unsafe fn get_device_memory_opaque_capture_address(
3215 &self,
3216 info: &DeviceMemoryOpaqueCaptureAddressInfo,
3217 ) -> u64 {
3218 let __result =
3219 (self.commands().get_device_memory_opaque_capture_address)(self.handle(), info);
3220
3221 __result
3222 }
3223
3224 #[inline]
3226 unsafe fn get_semaphore_counter_value(&self, semaphore: Semaphore) -> crate::VkResult<u64> {
3227 let mut value = MaybeUninit::<u64>::uninit();
3228
3229 let __result = (self.commands().get_semaphore_counter_value)(
3230 self.handle(),
3231 semaphore,
3232 value.as_mut_ptr(),
3233 );
3234
3235 if __result == Result::SUCCESS {
3236 Ok(value.assume_init())
3237 } else {
3238 Err(__result.into())
3239 }
3240 }
3241
3242 #[inline]
3244 unsafe fn reset_query_pool(&self, query_pool: QueryPool, first_query: u32, query_count: u32) {
3245 let __result =
3246 (self.commands().reset_query_pool)(self.handle(), query_pool, first_query, query_count);
3247 }
3248
3249 #[inline]
3251 unsafe fn signal_semaphore(&self, signal_info: &SemaphoreSignalInfo) -> crate::VkResult<()> {
3252 let __result = (self.commands().signal_semaphore)(self.handle(), signal_info);
3253
3254 if __result == Result::SUCCESS {
3255 Ok(())
3256 } else {
3257 Err(__result.into())
3258 }
3259 }
3260
3261 #[inline]
3263 unsafe fn wait_semaphores(
3264 &self,
3265 wait_info: &SemaphoreWaitInfo,
3266 timeout: u64,
3267 ) -> crate::VkResult<SuccessCode> {
3268 let __result = (self.commands().wait_semaphores)(self.handle(), wait_info, timeout);
3269
3270 if __result >= Result::SUCCESS {
3271 Ok(__result.into())
3272 } else {
3273 Err(__result.into())
3274 }
3275 }
3276}
3277
3278impl<C: DeviceV1_0 + ?Sized> DeviceV1_2 for C {}
3279
3280pub trait EntryV1_3: EntryV1_2 {}
3282
3283impl<C: EntryV1_0 + ?Sized> EntryV1_3 for C {}
3284
3285pub trait InstanceV1_3: InstanceV1_2 {
3287 #[inline]
3289 unsafe fn get_physical_device_tool_properties(
3290 &self,
3291 physical_device: PhysicalDevice,
3292 ) -> crate::VkResult<Vec<PhysicalDeviceToolProperties>> {
3293 let mut tool_count = 0;
3294
3295 (self.commands().get_physical_device_tool_properties)(
3296 physical_device,
3297 &mut tool_count,
3298 ptr::null_mut(),
3299 );
3300
3301 let mut tool_properties = Vec::with_capacity(tool_count as usize);
3302
3303 let __result = (self.commands().get_physical_device_tool_properties)(
3304 physical_device,
3305 &mut tool_count,
3306 tool_properties.as_mut_ptr(),
3307 );
3308
3309 debug_assert!(tool_properties.capacity() >= tool_count as usize);
3310 tool_properties.set_len(tool_count as usize);
3311
3312 if __result == Result::SUCCESS {
3313 Ok(tool_properties)
3314 } else {
3315 Err(__result.into())
3316 }
3317 }
3318}
3319
3320impl<C: InstanceV1_0 + ?Sized> InstanceV1_3 for C {}
3321
3322pub trait DeviceV1_3: DeviceV1_2 {
3324 #[inline]
3326 unsafe fn cmd_begin_rendering(
3327 &self,
3328 command_buffer: CommandBuffer,
3329 rendering_info: &RenderingInfo,
3330 ) {
3331 let __result = (self.commands().cmd_begin_rendering)(command_buffer, rendering_info);
3332 }
3333
3334 #[inline]
3336 unsafe fn cmd_bind_vertex_buffers2(
3337 &self,
3338 command_buffer: CommandBuffer,
3339 first_binding: u32,
3340 buffers: &[Buffer],
3341 offsets: &[DeviceSize],
3342 sizes: &[DeviceSize],
3343 strides: &[DeviceSize],
3344 ) {
3345 let __result = (self.commands().cmd_bind_vertex_buffers2)(
3346 command_buffer,
3347 first_binding,
3348 buffers.len() as u32,
3349 buffers.as_ptr(),
3350 offsets.as_ptr(),
3351 sizes.as_ptr(),
3352 strides.as_ptr(),
3353 );
3354 }
3355
3356 #[inline]
3358 unsafe fn cmd_blit_image2(
3359 &self,
3360 command_buffer: CommandBuffer,
3361 blit_image_info: &BlitImageInfo2,
3362 ) {
3363 let __result = (self.commands().cmd_blit_image2)(command_buffer, blit_image_info);
3364 }
3365
3366 #[inline]
3368 unsafe fn cmd_copy_buffer2(
3369 &self,
3370 command_buffer: CommandBuffer,
3371 copy_buffer_info: &CopyBufferInfo2,
3372 ) {
3373 let __result = (self.commands().cmd_copy_buffer2)(command_buffer, copy_buffer_info);
3374 }
3375
3376 #[inline]
3378 unsafe fn cmd_copy_buffer_to_image2(
3379 &self,
3380 command_buffer: CommandBuffer,
3381 copy_buffer_to_image_info: &CopyBufferToImageInfo2,
3382 ) {
3383 let __result =
3384 (self.commands().cmd_copy_buffer_to_image2)(command_buffer, copy_buffer_to_image_info);
3385 }
3386
3387 #[inline]
3389 unsafe fn cmd_copy_image2(
3390 &self,
3391 command_buffer: CommandBuffer,
3392 copy_image_info: &CopyImageInfo2,
3393 ) {
3394 let __result = (self.commands().cmd_copy_image2)(command_buffer, copy_image_info);
3395 }
3396
3397 #[inline]
3399 unsafe fn cmd_copy_image_to_buffer2(
3400 &self,
3401 command_buffer: CommandBuffer,
3402 copy_image_to_buffer_info: &CopyImageToBufferInfo2,
3403 ) {
3404 let __result =
3405 (self.commands().cmd_copy_image_to_buffer2)(command_buffer, copy_image_to_buffer_info);
3406 }
3407
3408 #[inline]
3410 unsafe fn cmd_end_rendering(&self, command_buffer: CommandBuffer) {
3411 let __result = (self.commands().cmd_end_rendering)(command_buffer);
3412 }
3413
3414 #[inline]
3416 unsafe fn cmd_pipeline_barrier2(
3417 &self,
3418 command_buffer: CommandBuffer,
3419 dependency_info: &DependencyInfo,
3420 ) {
3421 let __result = (self.commands().cmd_pipeline_barrier2)(command_buffer, dependency_info);
3422 }
3423
3424 #[inline]
3426 unsafe fn cmd_reset_event2(
3427 &self,
3428 command_buffer: CommandBuffer,
3429 event: Event,
3430 stage_mask: PipelineStageFlags2,
3431 ) {
3432 let __result = (self.commands().cmd_reset_event2)(command_buffer, event, stage_mask);
3433 }
3434
3435 #[inline]
3437 unsafe fn cmd_resolve_image2(
3438 &self,
3439 command_buffer: CommandBuffer,
3440 resolve_image_info: &ResolveImageInfo2,
3441 ) {
3442 let __result = (self.commands().cmd_resolve_image2)(command_buffer, resolve_image_info);
3443 }
3444
3445 #[inline]
3447 unsafe fn cmd_set_cull_mode(&self, command_buffer: CommandBuffer, cull_mode: CullModeFlags) {
3448 let __result = (self.commands().cmd_set_cull_mode)(command_buffer, cull_mode);
3449 }
3450
3451 #[inline]
3453 unsafe fn cmd_set_depth_bias_enable(
3454 &self,
3455 command_buffer: CommandBuffer,
3456 depth_bias_enable: bool,
3457 ) {
3458 let __result = (self.commands().cmd_set_depth_bias_enable)(
3459 command_buffer,
3460 depth_bias_enable as Bool32,
3461 );
3462 }
3463
3464 #[inline]
3466 unsafe fn cmd_set_depth_bounds_test_enable(
3467 &self,
3468 command_buffer: CommandBuffer,
3469 depth_bounds_test_enable: bool,
3470 ) {
3471 let __result = (self.commands().cmd_set_depth_bounds_test_enable)(
3472 command_buffer,
3473 depth_bounds_test_enable as Bool32,
3474 );
3475 }
3476
3477 #[inline]
3479 unsafe fn cmd_set_depth_compare_op(
3480 &self,
3481 command_buffer: CommandBuffer,
3482 depth_compare_op: CompareOp,
3483 ) {
3484 let __result = (self.commands().cmd_set_depth_compare_op)(command_buffer, depth_compare_op);
3485 }
3486
3487 #[inline]
3489 unsafe fn cmd_set_depth_test_enable(
3490 &self,
3491 command_buffer: CommandBuffer,
3492 depth_test_enable: bool,
3493 ) {
3494 let __result = (self.commands().cmd_set_depth_test_enable)(
3495 command_buffer,
3496 depth_test_enable as Bool32,
3497 );
3498 }
3499
3500 #[inline]
3502 unsafe fn cmd_set_depth_write_enable(
3503 &self,
3504 command_buffer: CommandBuffer,
3505 depth_write_enable: bool,
3506 ) {
3507 let __result = (self.commands().cmd_set_depth_write_enable)(
3508 command_buffer,
3509 depth_write_enable as Bool32,
3510 );
3511 }
3512
3513 #[inline]
3515 unsafe fn cmd_set_event2(
3516 &self,
3517 command_buffer: CommandBuffer,
3518 event: Event,
3519 dependency_info: &DependencyInfo,
3520 ) {
3521 let __result = (self.commands().cmd_set_event2)(command_buffer, event, dependency_info);
3522 }
3523
3524 #[inline]
3526 unsafe fn cmd_set_front_face(&self, command_buffer: CommandBuffer, front_face: FrontFace) {
3527 let __result = (self.commands().cmd_set_front_face)(command_buffer, front_face);
3528 }
3529
3530 #[inline]
3532 unsafe fn cmd_set_primitive_restart_enable(
3533 &self,
3534 command_buffer: CommandBuffer,
3535 primitive_restart_enable: bool,
3536 ) {
3537 let __result = (self.commands().cmd_set_primitive_restart_enable)(
3538 command_buffer,
3539 primitive_restart_enable as Bool32,
3540 );
3541 }
3542
3543 #[inline]
3545 unsafe fn cmd_set_primitive_topology(
3546 &self,
3547 command_buffer: CommandBuffer,
3548 primitive_topology: PrimitiveTopology,
3549 ) {
3550 let __result =
3551 (self.commands().cmd_set_primitive_topology)(command_buffer, primitive_topology);
3552 }
3553
3554 #[inline]
3556 unsafe fn cmd_set_rasterizer_discard_enable(
3557 &self,
3558 command_buffer: CommandBuffer,
3559 rasterizer_discard_enable: bool,
3560 ) {
3561 let __result = (self.commands().cmd_set_rasterizer_discard_enable)(
3562 command_buffer,
3563 rasterizer_discard_enable as Bool32,
3564 );
3565 }
3566
3567 #[inline]
3569 unsafe fn cmd_set_scissor_with_count(
3570 &self,
3571 command_buffer: CommandBuffer,
3572 scissors: &[impl Cast<Target = Rect2D>],
3573 ) {
3574 let __result = (self.commands().cmd_set_scissor_with_count)(
3575 command_buffer,
3576 scissors.len() as u32,
3577 scissors.as_ptr().cast(),
3578 );
3579 }
3580
3581 #[inline]
3583 unsafe fn cmd_set_stencil_op(
3584 &self,
3585 command_buffer: CommandBuffer,
3586 face_mask: StencilFaceFlags,
3587 fail_op: StencilOp,
3588 pass_op: StencilOp,
3589 depth_fail_op: StencilOp,
3590 compare_op: CompareOp,
3591 ) {
3592 let __result = (self.commands().cmd_set_stencil_op)(
3593 command_buffer,
3594 face_mask,
3595 fail_op,
3596 pass_op,
3597 depth_fail_op,
3598 compare_op,
3599 );
3600 }
3601
3602 #[inline]
3604 unsafe fn cmd_set_stencil_test_enable(
3605 &self,
3606 command_buffer: CommandBuffer,
3607 stencil_test_enable: bool,
3608 ) {
3609 let __result = (self.commands().cmd_set_stencil_test_enable)(
3610 command_buffer,
3611 stencil_test_enable as Bool32,
3612 );
3613 }
3614
3615 #[inline]
3617 unsafe fn cmd_set_viewport_with_count(
3618 &self,
3619 command_buffer: CommandBuffer,
3620 viewports: &[impl Cast<Target = Viewport>],
3621 ) {
3622 let __result = (self.commands().cmd_set_viewport_with_count)(
3623 command_buffer,
3624 viewports.len() as u32,
3625 viewports.as_ptr().cast(),
3626 );
3627 }
3628
3629 #[inline]
3631 unsafe fn cmd_wait_events2(
3632 &self,
3633 command_buffer: CommandBuffer,
3634 events: &[Event],
3635 dependency_infos: &[impl Cast<Target = DependencyInfo>],
3636 ) {
3637 let __result = (self.commands().cmd_wait_events2)(
3638 command_buffer,
3639 events.len() as u32,
3640 events.as_ptr(),
3641 dependency_infos.as_ptr().cast(),
3642 );
3643 }
3644
3645 #[inline]
3647 unsafe fn cmd_write_timestamp2(
3648 &self,
3649 command_buffer: CommandBuffer,
3650 stage: PipelineStageFlags2,
3651 query_pool: QueryPool,
3652 query: u32,
3653 ) {
3654 let __result =
3655 (self.commands().cmd_write_timestamp2)(command_buffer, stage, query_pool, query);
3656 }
3657
3658 #[inline]
3660 unsafe fn create_private_data_slot(
3661 &self,
3662 create_info: &PrivateDataSlotCreateInfo,
3663 allocator: Option<&AllocationCallbacks>,
3664 ) -> crate::VkResult<PrivateDataSlot> {
3665 let mut private_data_slot = MaybeUninit::<PrivateDataSlot>::uninit();
3666
3667 let __result = (self.commands().create_private_data_slot)(
3668 self.handle(),
3669 create_info,
3670 allocator.map_or(ptr::null(), |v| v),
3671 private_data_slot.as_mut_ptr(),
3672 );
3673
3674 if __result == Result::SUCCESS {
3675 Ok(private_data_slot.assume_init())
3676 } else {
3677 Err(__result.into())
3678 }
3679 }
3680
3681 #[inline]
3683 unsafe fn destroy_private_data_slot(
3684 &self,
3685 private_data_slot: PrivateDataSlot,
3686 allocator: Option<&AllocationCallbacks>,
3687 ) {
3688 let __result = (self.commands().destroy_private_data_slot)(
3689 self.handle(),
3690 private_data_slot,
3691 allocator.map_or(ptr::null(), |v| v),
3692 );
3693 }
3694
3695 #[inline]
3697 unsafe fn get_device_buffer_memory_requirements(
3698 &self,
3699 info: &DeviceBufferMemoryRequirements,
3700 memory_requirements: &mut MemoryRequirements2,
3701 ) {
3702 let __result = (self.commands().get_device_buffer_memory_requirements)(
3703 self.handle(),
3704 info,
3705 memory_requirements,
3706 );
3707 }
3708
3709 #[inline]
3711 unsafe fn get_device_image_memory_requirements(
3712 &self,
3713 info: &DeviceImageMemoryRequirements,
3714 memory_requirements: &mut MemoryRequirements2,
3715 ) {
3716 let __result = (self.commands().get_device_image_memory_requirements)(
3717 self.handle(),
3718 info,
3719 memory_requirements,
3720 );
3721 }
3722
3723 #[inline]
3725 unsafe fn get_device_image_sparse_memory_requirements(
3726 &self,
3727 info: &DeviceImageMemoryRequirements,
3728 ) -> Vec<SparseImageMemoryRequirements2> {
3729 let mut sparse_memory_requirement_count = 0;
3730
3731 (self.commands().get_device_image_sparse_memory_requirements)(
3732 self.handle(),
3733 info,
3734 &mut sparse_memory_requirement_count,
3735 ptr::null_mut(),
3736 );
3737
3738 let mut sparse_memory_requirements =
3739 Vec::with_capacity(sparse_memory_requirement_count as usize);
3740
3741 let __result = (self.commands().get_device_image_sparse_memory_requirements)(
3742 self.handle(),
3743 info,
3744 &mut sparse_memory_requirement_count,
3745 sparse_memory_requirements.as_mut_ptr(),
3746 );
3747
3748 debug_assert!(
3749 sparse_memory_requirements.capacity() >= sparse_memory_requirement_count as usize
3750 );
3751 sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize);
3752
3753 sparse_memory_requirements
3754 }
3755
3756 #[inline]
3758 unsafe fn get_private_data(
3759 &self,
3760 object_type: ObjectType,
3761 object_handle: u64,
3762 private_data_slot: PrivateDataSlot,
3763 ) -> u64 {
3764 let mut data = MaybeUninit::<u64>::uninit();
3765
3766 let __result = (self.commands().get_private_data)(
3767 self.handle(),
3768 object_type,
3769 object_handle,
3770 private_data_slot,
3771 data.as_mut_ptr(),
3772 );
3773
3774 data.assume_init()
3775 }
3776
3777 #[inline]
3779 unsafe fn queue_submit2(
3780 &self,
3781 queue: Queue,
3782 submits: &[impl Cast<Target = SubmitInfo2>],
3783 fence: Fence,
3784 ) -> crate::VkResult<()> {
3785 let __result = (self.commands().queue_submit2)(
3786 queue,
3787 submits.len() as u32,
3788 submits.as_ptr().cast(),
3789 fence,
3790 );
3791
3792 if __result == Result::SUCCESS {
3793 Ok(())
3794 } else {
3795 Err(__result.into())
3796 }
3797 }
3798
3799 #[inline]
3801 unsafe fn set_private_data(
3802 &self,
3803 object_type: ObjectType,
3804 object_handle: u64,
3805 private_data_slot: PrivateDataSlot,
3806 data: u64,
3807 ) -> crate::VkResult<()> {
3808 let __result = (self.commands().set_private_data)(
3809 self.handle(),
3810 object_type,
3811 object_handle,
3812 private_data_slot,
3813 data,
3814 );
3815
3816 if __result == Result::SUCCESS {
3817 Ok(())
3818 } else {
3819 Err(__result.into())
3820 }
3821 }
3822}
3823
3824impl<C: DeviceV1_0 + ?Sized> DeviceV1_3 for C {}
3825
3826pub trait EntryV1_4: EntryV1_3 {}
3828
3829impl<C: EntryV1_0 + ?Sized> EntryV1_4 for C {}
3830
3831pub trait InstanceV1_4: InstanceV1_3 {}
3833
3834impl<C: InstanceV1_0 + ?Sized> InstanceV1_4 for C {}
3835
3836pub trait DeviceV1_4: DeviceV1_3 {
3838 #[inline]
3840 unsafe fn cmd_bind_descriptor_sets2(
3841 &self,
3842 command_buffer: CommandBuffer,
3843 bind_descriptor_sets_info: &BindDescriptorSetsInfo,
3844 ) {
3845 let __result =
3846 (self.commands().cmd_bind_descriptor_sets2)(command_buffer, bind_descriptor_sets_info);
3847 }
3848
3849 #[inline]
3851 unsafe fn cmd_bind_index_buffer2(
3852 &self,
3853 command_buffer: CommandBuffer,
3854 buffer: Buffer,
3855 offset: DeviceSize,
3856 size: DeviceSize,
3857 index_type: IndexType,
3858 ) {
3859 let __result = (self.commands().cmd_bind_index_buffer2)(
3860 command_buffer,
3861 buffer,
3862 offset,
3863 size,
3864 index_type,
3865 );
3866 }
3867
3868 #[inline]
3870 unsafe fn cmd_push_constants2(
3871 &self,
3872 command_buffer: CommandBuffer,
3873 push_constants_info: &PushConstantsInfo,
3874 ) {
3875 let __result = (self.commands().cmd_push_constants2)(command_buffer, push_constants_info);
3876 }
3877
3878 #[inline]
3880 unsafe fn cmd_push_descriptor_set(
3881 &self,
3882 command_buffer: CommandBuffer,
3883 pipeline_bind_point: PipelineBindPoint,
3884 layout: PipelineLayout,
3885 set: u32,
3886 descriptor_writes: &[impl Cast<Target = WriteDescriptorSet>],
3887 ) {
3888 let __result = (self.commands().cmd_push_descriptor_set)(
3889 command_buffer,
3890 pipeline_bind_point,
3891 layout,
3892 set,
3893 descriptor_writes.len() as u32,
3894 descriptor_writes.as_ptr().cast(),
3895 );
3896 }
3897
3898 #[inline]
3900 unsafe fn cmd_push_descriptor_set2(
3901 &self,
3902 command_buffer: CommandBuffer,
3903 push_descriptor_set_info: &PushDescriptorSetInfo,
3904 ) {
3905 let __result =
3906 (self.commands().cmd_push_descriptor_set2)(command_buffer, push_descriptor_set_info);
3907 }
3908
3909 #[inline]
3911 unsafe fn cmd_push_descriptor_set_with_template(
3912 &self,
3913 command_buffer: CommandBuffer,
3914 descriptor_update_template: DescriptorUpdateTemplate,
3915 layout: PipelineLayout,
3916 set: u32,
3917 data: *const c_void,
3918 ) {
3919 let __result = (self.commands().cmd_push_descriptor_set_with_template)(
3920 command_buffer,
3921 descriptor_update_template,
3922 layout,
3923 set,
3924 data,
3925 );
3926 }
3927
3928 #[inline]
3930 unsafe fn cmd_push_descriptor_set_with_template2(
3931 &self,
3932 command_buffer: CommandBuffer,
3933 push_descriptor_set_with_template_info: &PushDescriptorSetWithTemplateInfo,
3934 ) {
3935 let __result = (self.commands().cmd_push_descriptor_set_with_template2)(
3936 command_buffer,
3937 push_descriptor_set_with_template_info,
3938 );
3939 }
3940
3941 #[inline]
3943 unsafe fn cmd_set_line_stipple(
3944 &self,
3945 command_buffer: CommandBuffer,
3946 line_stipple_factor: u32,
3947 line_stipple_pattern: u16,
3948 ) {
3949 let __result = (self.commands().cmd_set_line_stipple)(
3950 command_buffer,
3951 line_stipple_factor,
3952 line_stipple_pattern,
3953 );
3954 }
3955
3956 #[inline]
3958 unsafe fn cmd_set_rendering_attachment_locations(
3959 &self,
3960 command_buffer: CommandBuffer,
3961 location_info: &RenderingAttachmentLocationInfo,
3962 ) {
3963 let __result =
3964 (self.commands().cmd_set_rendering_attachment_locations)(command_buffer, location_info);
3965 }
3966
3967 #[inline]
3969 unsafe fn cmd_set_rendering_input_attachment_indices(
3970 &self,
3971 command_buffer: CommandBuffer,
3972 input_attachment_index_info: &RenderingInputAttachmentIndexInfo,
3973 ) {
3974 let __result = (self.commands().cmd_set_rendering_input_attachment_indices)(
3975 command_buffer,
3976 input_attachment_index_info,
3977 );
3978 }
3979
3980 #[inline]
3982 unsafe fn copy_image_to_image(
3983 &self,
3984 copy_image_to_image_info: &CopyImageToImageInfo,
3985 ) -> crate::VkResult<()> {
3986 let __result =
3987 (self.commands().copy_image_to_image)(self.handle(), copy_image_to_image_info);
3988
3989 if __result == Result::SUCCESS {
3990 Ok(())
3991 } else {
3992 Err(__result.into())
3993 }
3994 }
3995
3996 #[inline]
3998 unsafe fn copy_image_to_memory(
3999 &self,
4000 copy_image_to_memory_info: &CopyImageToMemoryInfo,
4001 ) -> crate::VkResult<()> {
4002 let __result =
4003 (self.commands().copy_image_to_memory)(self.handle(), copy_image_to_memory_info);
4004
4005 if __result == Result::SUCCESS {
4006 Ok(())
4007 } else {
4008 Err(__result.into())
4009 }
4010 }
4011
4012 #[inline]
4014 unsafe fn copy_memory_to_image(
4015 &self,
4016 copy_memory_to_image_info: &CopyMemoryToImageInfo,
4017 ) -> crate::VkResult<()> {
4018 let __result =
4019 (self.commands().copy_memory_to_image)(self.handle(), copy_memory_to_image_info);
4020
4021 if __result == Result::SUCCESS {
4022 Ok(())
4023 } else {
4024 Err(__result.into())
4025 }
4026 }
4027
4028 #[inline]
4030 unsafe fn get_device_image_subresource_layout(
4031 &self,
4032 info: &DeviceImageSubresourceInfo,
4033 layout: &mut SubresourceLayout2,
4034 ) {
4035 let __result =
4036 (self.commands().get_device_image_subresource_layout)(self.handle(), info, layout);
4037 }
4038
4039 #[inline]
4041 unsafe fn get_image_subresource_layout2(
4042 &self,
4043 image: Image,
4044 subresource: &ImageSubresource2,
4045 layout: &mut SubresourceLayout2,
4046 ) {
4047 let __result = (self.commands().get_image_subresource_layout2)(
4048 self.handle(),
4049 image,
4050 subresource,
4051 layout,
4052 );
4053 }
4054
4055 #[inline]
4057 unsafe fn get_rendering_area_granularity(
4058 &self,
4059 rendering_area_info: &RenderingAreaInfo,
4060 ) -> Extent2D {
4061 let mut granularity = MaybeUninit::<Extent2D>::uninit();
4062
4063 let __result = (self.commands().get_rendering_area_granularity)(
4064 self.handle(),
4065 rendering_area_info,
4066 granularity.as_mut_ptr(),
4067 );
4068
4069 granularity.assume_init()
4070 }
4071
4072 #[inline]
4074 unsafe fn map_memory2(&self, memory_map_info: &MemoryMapInfo) -> crate::VkResult<*mut c_void> {
4075 let mut data = MaybeUninit::<*mut c_void>::uninit();
4076
4077 let __result =
4078 (self.commands().map_memory2)(self.handle(), memory_map_info, data.as_mut_ptr());
4079
4080 if __result == Result::SUCCESS {
4081 Ok(data.assume_init())
4082 } else {
4083 Err(__result.into())
4084 }
4085 }
4086
4087 #[inline]
4089 unsafe fn transition_image_layout(
4090 &self,
4091 transitions: &[impl Cast<Target = HostImageLayoutTransitionInfo>],
4092 ) -> crate::VkResult<()> {
4093 let __result = (self.commands().transition_image_layout)(
4094 self.handle(),
4095 transitions.len() as u32,
4096 transitions.as_ptr().cast(),
4097 );
4098
4099 if __result == Result::SUCCESS {
4100 Ok(())
4101 } else {
4102 Err(__result.into())
4103 }
4104 }
4105
4106 #[inline]
4108 unsafe fn unmap_memory2(&self, memory_unmap_info: &MemoryUnmapInfo) -> crate::VkResult<()> {
4109 let __result = (self.commands().unmap_memory2)(self.handle(), memory_unmap_info);
4110
4111 if __result == Result::SUCCESS {
4112 Ok(())
4113 } else {
4114 Err(__result.into())
4115 }
4116 }
4117}
4118
4119impl<C: DeviceV1_0 + ?Sized> DeviceV1_4 for C {}