1#![allow(clippy::trivially_copy_pass_by_ref)]
2use crate::RawPtr;
3use crate::prelude::*;
4use crate::vk;
5use alloc::vec::Vec;
6use core::ffi;
7use core::mem;
8use core::ptr;
9
10#[derive(Clone)]
12pub struct Device {
13 pub(crate) handle: vk::Device,
14
15 pub(crate) device_fn_1_0: crate::DeviceFnV1_0,
16 pub(crate) device_fn_1_1: crate::DeviceFnV1_1,
17 pub(crate) device_fn_1_2: crate::DeviceFnV1_2,
18 pub(crate) device_fn_1_3: crate::DeviceFnV1_3,
19}
20
21impl Device {
22 pub unsafe fn load(instance_fn: &crate::InstanceFnV1_0, device: vk::Device) -> Self {
23 unsafe {
24 Self::load_with(
25 |name| mem::transmute((instance_fn.get_device_proc_addr)(device, name.as_ptr())),
26 device,
27 )
28 }
29 }
30
31 pub unsafe fn load_with(
32 mut load_fn: impl FnMut(&ffi::CStr) -> *const ffi::c_void,
33 device: vk::Device,
34 ) -> Self {
35 Self::from_parts_1_3(
36 device,
37 crate::DeviceFnV1_0::load(&mut load_fn),
38 crate::DeviceFnV1_1::load(&mut load_fn),
39 crate::DeviceFnV1_2::load(&mut load_fn),
40 crate::DeviceFnV1_3::load(&mut load_fn),
41 )
42 }
43
44 #[inline]
45 pub fn from_parts_1_3(
46 handle: vk::Device,
47 device_fn_1_0: crate::DeviceFnV1_0,
48 device_fn_1_1: crate::DeviceFnV1_1,
49 device_fn_1_2: crate::DeviceFnV1_2,
50 device_fn_1_3: crate::DeviceFnV1_3,
51 ) -> Self {
52 Self {
53 handle,
54
55 device_fn_1_0,
56 device_fn_1_1,
57 device_fn_1_2,
58 device_fn_1_3,
59 }
60 }
61
62 #[inline]
63 pub fn handle(&self) -> vk::Device {
64 self.handle
65 }
66}
67
68impl Device {
70 #[inline]
71 pub fn fp_v1_3(&self) -> &crate::DeviceFnV1_3 {
72 &self.device_fn_1_3
73 }
74
75 #[inline]
77 pub unsafe fn create_private_data_slot(
78 &self,
79 create_info: &vk::PrivateDataSlotCreateInfo<'_>,
80 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
81 ) -> VkResult<vk::PrivateDataSlot> {
82 unsafe {
83 let mut private_data_slot = mem::MaybeUninit::uninit();
84 (self.device_fn_1_3.create_private_data_slot)(
85 self.handle,
86 create_info,
87 allocation_callbacks.as_raw_ptr(),
88 private_data_slot.as_mut_ptr(),
89 )
90 .assume_init_on_success(private_data_slot)
91 }
92 }
93
94 #[inline]
96 pub unsafe fn destroy_private_data_slot(
97 &self,
98 private_data_slot: vk::PrivateDataSlot,
99 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
100 ) {
101 unsafe {
102 (self.device_fn_1_3.destroy_private_data_slot)(
103 self.handle,
104 private_data_slot,
105 allocation_callbacks.as_raw_ptr(),
106 )
107 }
108 }
109
110 #[inline]
112 pub unsafe fn set_private_data<T: vk::Handle>(
113 &self,
114 object: T,
115 private_data_slot: vk::PrivateDataSlot,
116 data: u64,
117 ) -> VkResult<()> {
118 unsafe {
119 (self.device_fn_1_3.set_private_data)(
120 self.handle,
121 T::TYPE,
122 object.as_raw(),
123 private_data_slot,
124 data,
125 )
126 .result()
127 }
128 }
129
130 #[inline]
132 pub unsafe fn get_private_data<T: vk::Handle>(
133 &self,
134 object: T,
135 private_data_slot: vk::PrivateDataSlot,
136 ) -> u64 {
137 unsafe {
138 let mut data = mem::MaybeUninit::uninit();
139 (self.device_fn_1_3.get_private_data)(
140 self.handle,
141 T::TYPE,
142 object.as_raw(),
143 private_data_slot,
144 data.as_mut_ptr(),
145 );
146 data.assume_init()
147 }
148 }
149
150 #[inline]
152 pub unsafe fn cmd_pipeline_barrier2(
153 &self,
154 command_buffer: vk::CommandBuffer,
155 dependency_info: &vk::DependencyInfo<'_>,
156 ) {
157 unsafe { (self.device_fn_1_3.cmd_pipeline_barrier2)(command_buffer, dependency_info) }
158 }
159
160 #[inline]
162 pub unsafe fn cmd_reset_event2(
163 &self,
164 command_buffer: vk::CommandBuffer,
165 event: vk::Event,
166 stage_mask: vk::PipelineStageFlags2,
167 ) {
168 unsafe { (self.device_fn_1_3.cmd_reset_event2)(command_buffer, event, stage_mask) }
169 }
170
171 #[inline]
173 pub unsafe fn cmd_set_event2(
174 &self,
175 command_buffer: vk::CommandBuffer,
176 event: vk::Event,
177 dependency_info: &vk::DependencyInfo<'_>,
178 ) {
179 unsafe { (self.device_fn_1_3.cmd_set_event2)(command_buffer, event, dependency_info) }
180 }
181
182 #[inline]
184 pub unsafe fn cmd_wait_events2(
185 &self,
186 command_buffer: vk::CommandBuffer,
187 events: &[vk::Event],
188 dependency_infos: &[vk::DependencyInfo<'_>],
189 ) {
190 unsafe {
191 assert_eq!(events.len(), dependency_infos.len());
192 (self.device_fn_1_3.cmd_wait_events2)(
193 command_buffer,
194 events.len() as u32,
195 events.as_ptr(),
196 dependency_infos.as_ptr(),
197 )
198 }
199 }
200
201 #[inline]
203 pub unsafe fn cmd_write_timestamp2(
204 &self,
205 command_buffer: vk::CommandBuffer,
206 stage: vk::PipelineStageFlags2,
207 query_pool: vk::QueryPool,
208 query: u32,
209 ) {
210 unsafe {
211 (self.device_fn_1_3.cmd_write_timestamp2)(command_buffer, stage, query_pool, query)
212 }
213 }
214
215 #[inline]
217 pub unsafe fn queue_submit2(
218 &self,
219 queue: vk::Queue,
220 submits: &[vk::SubmitInfo2<'_>],
221 fence: vk::Fence,
222 ) -> VkResult<()> {
223 unsafe {
224 (self.device_fn_1_3.queue_submit2)(queue, submits.len() as u32, submits.as_ptr(), fence)
225 .result()
226 }
227 }
228
229 #[inline]
231 pub unsafe fn cmd_copy_buffer2(
232 &self,
233 command_buffer: vk::CommandBuffer,
234 copy_buffer_info: &vk::CopyBufferInfo2<'_>,
235 ) {
236 unsafe { (self.device_fn_1_3.cmd_copy_buffer2)(command_buffer, copy_buffer_info) }
237 }
238 #[inline]
240 pub unsafe fn cmd_copy_image2(
241 &self,
242 command_buffer: vk::CommandBuffer,
243 copy_image_info: &vk::CopyImageInfo2<'_>,
244 ) {
245 unsafe { (self.device_fn_1_3.cmd_copy_image2)(command_buffer, copy_image_info) }
246 }
247 #[inline]
249 pub unsafe fn cmd_copy_buffer_to_image2(
250 &self,
251 command_buffer: vk::CommandBuffer,
252 copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2<'_>,
253 ) {
254 unsafe {
255 (self.device_fn_1_3.cmd_copy_buffer_to_image2)(
256 command_buffer,
257 copy_buffer_to_image_info,
258 )
259 }
260 }
261 #[inline]
263 pub unsafe fn cmd_copy_image_to_buffer2(
264 &self,
265 command_buffer: vk::CommandBuffer,
266 copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2<'_>,
267 ) {
268 unsafe {
269 (self.device_fn_1_3.cmd_copy_image_to_buffer2)(
270 command_buffer,
271 copy_image_to_buffer_info,
272 )
273 }
274 }
275 #[inline]
277 pub unsafe fn cmd_blit_image2(
278 &self,
279 command_buffer: vk::CommandBuffer,
280 blit_image_info: &vk::BlitImageInfo2<'_>,
281 ) {
282 unsafe { (self.device_fn_1_3.cmd_blit_image2)(command_buffer, blit_image_info) }
283 }
284 #[inline]
286 pub unsafe fn cmd_resolve_image2(
287 &self,
288 command_buffer: vk::CommandBuffer,
289 resolve_image_info: &vk::ResolveImageInfo2<'_>,
290 ) {
291 unsafe { (self.device_fn_1_3.cmd_resolve_image2)(command_buffer, resolve_image_info) }
292 }
293
294 #[inline]
296 pub unsafe fn cmd_begin_rendering(
297 &self,
298 command_buffer: vk::CommandBuffer,
299 rendering_info: &vk::RenderingInfo<'_>,
300 ) {
301 unsafe { (self.device_fn_1_3.cmd_begin_rendering)(command_buffer, rendering_info) }
302 }
303
304 #[inline]
306 pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) {
307 unsafe { (self.device_fn_1_3.cmd_end_rendering)(command_buffer) }
308 }
309
310 #[inline]
312 pub unsafe fn cmd_set_cull_mode(
313 &self,
314 command_buffer: vk::CommandBuffer,
315 cull_mode: vk::CullModeFlags,
316 ) {
317 unsafe { (self.device_fn_1_3.cmd_set_cull_mode)(command_buffer, cull_mode) }
318 }
319
320 #[inline]
322 pub unsafe fn cmd_set_front_face(
323 &self,
324 command_buffer: vk::CommandBuffer,
325 front_face: vk::FrontFace,
326 ) {
327 unsafe { (self.device_fn_1_3.cmd_set_front_face)(command_buffer, front_face) }
328 }
329
330 #[inline]
332 pub unsafe fn cmd_set_primitive_topology(
333 &self,
334 command_buffer: vk::CommandBuffer,
335 primitive_topology: vk::PrimitiveTopology,
336 ) {
337 unsafe {
338 (self.device_fn_1_3.cmd_set_primitive_topology)(command_buffer, primitive_topology)
339 }
340 }
341
342 #[inline]
344 pub unsafe fn cmd_set_viewport_with_count(
345 &self,
346 command_buffer: vk::CommandBuffer,
347 viewports: &[vk::Viewport],
348 ) {
349 unsafe {
350 (self.device_fn_1_3.cmd_set_viewport_with_count)(
351 command_buffer,
352 viewports.len() as u32,
353 viewports.as_ptr(),
354 )
355 }
356 }
357
358 #[inline]
360 pub unsafe fn cmd_set_scissor_with_count(
361 &self,
362 command_buffer: vk::CommandBuffer,
363 scissors: &[vk::Rect2D],
364 ) {
365 unsafe {
366 (self.device_fn_1_3.cmd_set_scissor_with_count)(
367 command_buffer,
368 scissors.len() as u32,
369 scissors.as_ptr(),
370 )
371 }
372 }
373
374 #[inline]
376 pub unsafe fn cmd_bind_vertex_buffers2(
377 &self,
378 command_buffer: vk::CommandBuffer,
379 first_binding: u32,
380 buffers: &[vk::Buffer],
381 offsets: &[vk::DeviceSize],
382 sizes: Option<&[vk::DeviceSize]>,
383 strides: Option<&[vk::DeviceSize]>,
384 ) {
385 unsafe {
386 assert_eq!(offsets.len(), buffers.len());
387 let p_sizes = if let Some(sizes) = sizes {
388 assert_eq!(sizes.len(), buffers.len());
389 sizes.as_ptr()
390 } else {
391 ptr::null()
392 };
393 let p_strides = if let Some(strides) = strides {
394 assert_eq!(strides.len(), buffers.len());
395 strides.as_ptr()
396 } else {
397 ptr::null()
398 };
399 (self.device_fn_1_3.cmd_bind_vertex_buffers2)(
400 command_buffer,
401 first_binding,
402 buffers.len() as u32,
403 buffers.as_ptr(),
404 offsets.as_ptr(),
405 p_sizes,
406 p_strides,
407 )
408 }
409 }
410
411 #[inline]
413 pub unsafe fn cmd_set_depth_test_enable(
414 &self,
415 command_buffer: vk::CommandBuffer,
416 depth_test_enable: bool,
417 ) {
418 unsafe {
419 (self.device_fn_1_3.cmd_set_depth_test_enable)(command_buffer, depth_test_enable.into())
420 }
421 }
422
423 #[inline]
425 pub unsafe fn cmd_set_depth_write_enable(
426 &self,
427 command_buffer: vk::CommandBuffer,
428 depth_write_enable: bool,
429 ) {
430 unsafe {
431 (self.device_fn_1_3.cmd_set_depth_write_enable)(
432 command_buffer,
433 depth_write_enable.into(),
434 )
435 }
436 }
437
438 #[inline]
440 pub unsafe fn cmd_set_depth_compare_op(
441 &self,
442 command_buffer: vk::CommandBuffer,
443 depth_compare_op: vk::CompareOp,
444 ) {
445 unsafe { (self.device_fn_1_3.cmd_set_depth_compare_op)(command_buffer, depth_compare_op) }
446 }
447
448 #[inline]
450 pub unsafe fn cmd_set_depth_bounds_test_enable(
451 &self,
452 command_buffer: vk::CommandBuffer,
453 depth_bounds_test_enable: bool,
454 ) {
455 unsafe {
456 (self.device_fn_1_3.cmd_set_depth_bounds_test_enable)(
457 command_buffer,
458 depth_bounds_test_enable.into(),
459 )
460 }
461 }
462
463 #[inline]
465 pub unsafe fn cmd_set_stencil_test_enable(
466 &self,
467 command_buffer: vk::CommandBuffer,
468 stencil_test_enable: bool,
469 ) {
470 unsafe {
471 (self.device_fn_1_3.cmd_set_stencil_test_enable)(
472 command_buffer,
473 stencil_test_enable.into(),
474 )
475 }
476 }
477
478 #[inline]
480 pub unsafe fn cmd_set_stencil_op(
481 &self,
482 command_buffer: vk::CommandBuffer,
483 face_mask: vk::StencilFaceFlags,
484 fail_op: vk::StencilOp,
485 pass_op: vk::StencilOp,
486 depth_fail_op: vk::StencilOp,
487 compare_op: vk::CompareOp,
488 ) {
489 unsafe {
490 (self.device_fn_1_3.cmd_set_stencil_op)(
491 command_buffer,
492 face_mask,
493 fail_op,
494 pass_op,
495 depth_fail_op,
496 compare_op,
497 )
498 }
499 }
500
501 #[inline]
503 pub unsafe fn cmd_set_rasterizer_discard_enable(
504 &self,
505 command_buffer: vk::CommandBuffer,
506 rasterizer_discard_enable: bool,
507 ) {
508 unsafe {
509 (self.device_fn_1_3.cmd_set_rasterizer_discard_enable)(
510 command_buffer,
511 rasterizer_discard_enable.into(),
512 )
513 }
514 }
515
516 #[inline]
518 pub unsafe fn cmd_set_depth_bias_enable(
519 &self,
520 command_buffer: vk::CommandBuffer,
521 depth_bias_enable: bool,
522 ) {
523 unsafe {
524 (self.device_fn_1_3.cmd_set_depth_bias_enable)(command_buffer, depth_bias_enable.into())
525 }
526 }
527
528 #[inline]
530 pub unsafe fn cmd_set_primitive_restart_enable(
531 &self,
532 command_buffer: vk::CommandBuffer,
533 primitive_restart_enable: bool,
534 ) {
535 unsafe {
536 (self.device_fn_1_3.cmd_set_primitive_restart_enable)(
537 command_buffer,
538 primitive_restart_enable.into(),
539 )
540 }
541 }
542
543 #[inline]
545 pub unsafe fn get_device_buffer_memory_requirements(
546 &self,
547 memory_requirements: &vk::DeviceBufferMemoryRequirements<'_>,
548 out: &mut vk::MemoryRequirements2<'_>,
549 ) {
550 unsafe {
551 (self.device_fn_1_3.get_device_buffer_memory_requirements)(
552 self.handle,
553 memory_requirements,
554 out,
555 )
556 }
557 }
558
559 #[inline]
561 pub unsafe fn get_device_image_memory_requirements(
562 &self,
563 memory_requirements: &vk::DeviceImageMemoryRequirements<'_>,
564 out: &mut vk::MemoryRequirements2<'_>,
565 ) {
566 unsafe {
567 (self.device_fn_1_3.get_device_image_memory_requirements)(
568 self.handle,
569 memory_requirements,
570 out,
571 )
572 }
573 }
574
575 #[inline]
577 pub unsafe fn get_device_image_sparse_memory_requirements_len(
578 &self,
579 memory_requirements: &vk::DeviceImageMemoryRequirements<'_>,
580 ) -> usize {
581 unsafe {
582 let mut count = mem::MaybeUninit::uninit();
583 (self
584 .device_fn_1_3
585 .get_device_image_sparse_memory_requirements)(
586 self.handle,
587 memory_requirements,
588 count.as_mut_ptr(),
589 ptr::null_mut(),
590 );
591 count.assume_init() as usize
592 }
593 }
594
595 #[inline]
600 pub unsafe fn get_device_image_sparse_memory_requirements(
601 &self,
602 memory_requirements: &vk::DeviceImageMemoryRequirements<'_>,
603 out: &mut [vk::SparseImageMemoryRequirements2<'_>],
604 ) {
605 unsafe {
606 let mut count = out.len() as u32;
607 (self
608 .device_fn_1_3
609 .get_device_image_sparse_memory_requirements)(
610 self.handle,
611 memory_requirements,
612 &mut count,
613 out.as_mut_ptr(),
614 );
615 assert_eq!(count as usize, out.len());
616 }
617 }
618}
619
620impl Device {
622 #[inline]
623 pub fn fp_v1_2(&self) -> &crate::DeviceFnV1_2 {
624 &self.device_fn_1_2
625 }
626
627 #[inline]
629 pub unsafe fn cmd_draw_indirect_count(
630 &self,
631 command_buffer: vk::CommandBuffer,
632 buffer: vk::Buffer,
633 offset: vk::DeviceSize,
634 count_buffer: vk::Buffer,
635 count_buffer_offset: vk::DeviceSize,
636 max_draw_count: u32,
637 stride: u32,
638 ) {
639 unsafe {
640 (self.device_fn_1_2.cmd_draw_indirect_count)(
641 command_buffer,
642 buffer,
643 offset,
644 count_buffer,
645 count_buffer_offset,
646 max_draw_count,
647 stride,
648 );
649 }
650 }
651
652 #[inline]
654 pub unsafe fn cmd_draw_indexed_indirect_count(
655 &self,
656 command_buffer: vk::CommandBuffer,
657 buffer: vk::Buffer,
658 offset: vk::DeviceSize,
659 count_buffer: vk::Buffer,
660 count_buffer_offset: vk::DeviceSize,
661 max_draw_count: u32,
662 stride: u32,
663 ) {
664 unsafe {
665 (self.device_fn_1_2.cmd_draw_indexed_indirect_count)(
666 command_buffer,
667 buffer,
668 offset,
669 count_buffer,
670 count_buffer_offset,
671 max_draw_count,
672 stride,
673 );
674 }
675 }
676
677 #[inline]
679 pub unsafe fn create_render_pass2(
680 &self,
681 create_info: &vk::RenderPassCreateInfo2<'_>,
682 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
683 ) -> VkResult<vk::RenderPass> {
684 unsafe {
685 let mut renderpass = mem::MaybeUninit::uninit();
686 (self.device_fn_1_2.create_render_pass2)(
687 self.handle(),
688 create_info,
689 allocation_callbacks.as_raw_ptr(),
690 renderpass.as_mut_ptr(),
691 )
692 .assume_init_on_success(renderpass)
693 }
694 }
695
696 #[inline]
698 pub unsafe fn cmd_begin_render_pass2(
699 &self,
700 command_buffer: vk::CommandBuffer,
701 render_pass_begin_info: &vk::RenderPassBeginInfo<'_>,
702 subpass_begin_info: &vk::SubpassBeginInfo<'_>,
703 ) {
704 unsafe {
705 (self.device_fn_1_2.cmd_begin_render_pass2)(
706 command_buffer,
707 render_pass_begin_info,
708 subpass_begin_info,
709 );
710 }
711 }
712
713 #[inline]
715 pub unsafe fn cmd_next_subpass2(
716 &self,
717 command_buffer: vk::CommandBuffer,
718 subpass_begin_info: &vk::SubpassBeginInfo<'_>,
719 subpass_end_info: &vk::SubpassEndInfo<'_>,
720 ) {
721 unsafe {
722 (self.device_fn_1_2.cmd_next_subpass2)(
723 command_buffer,
724 subpass_begin_info,
725 subpass_end_info,
726 );
727 }
728 }
729
730 #[inline]
732 pub unsafe fn cmd_end_render_pass2(
733 &self,
734 command_buffer: vk::CommandBuffer,
735 subpass_end_info: &vk::SubpassEndInfo<'_>,
736 ) {
737 unsafe {
738 (self.device_fn_1_2.cmd_end_render_pass2)(command_buffer, subpass_end_info);
739 }
740 }
741
742 #[inline]
744 pub unsafe fn reset_query_pool(
745 &self,
746 query_pool: vk::QueryPool,
747 first_query: u32,
748 query_count: u32,
749 ) {
750 unsafe {
751 (self.device_fn_1_2.reset_query_pool)(
752 self.handle(),
753 query_pool,
754 first_query,
755 query_count,
756 );
757 }
758 }
759
760 #[inline]
762 pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult<u64> {
763 unsafe {
764 let mut value = mem::MaybeUninit::uninit();
765 (self.device_fn_1_2.get_semaphore_counter_value)(
766 self.handle(),
767 semaphore,
768 value.as_mut_ptr(),
769 )
770 .assume_init_on_success(value)
771 }
772 }
773
774 #[inline]
776 pub unsafe fn wait_semaphores(
777 &self,
778 wait_info: &vk::SemaphoreWaitInfo<'_>,
779 timeout: u64,
780 ) -> VkResult<()> {
781 unsafe { (self.device_fn_1_2.wait_semaphores)(self.handle(), wait_info, timeout).result() }
782 }
783
784 #[inline]
786 pub unsafe fn signal_semaphore(
787 &self,
788 signal_info: &vk::SemaphoreSignalInfo<'_>,
789 ) -> VkResult<()> {
790 unsafe { (self.device_fn_1_2.signal_semaphore)(self.handle(), signal_info).result() }
791 }
792
793 #[inline]
795 pub unsafe fn get_buffer_device_address(
796 &self,
797 info: &vk::BufferDeviceAddressInfo<'_>,
798 ) -> vk::DeviceAddress {
799 unsafe { (self.device_fn_1_2.get_buffer_device_address)(self.handle(), info) }
800 }
801
802 #[inline]
804 pub unsafe fn get_buffer_opaque_capture_address(
805 &self,
806 info: &vk::BufferDeviceAddressInfo<'_>,
807 ) -> u64 {
808 unsafe { (self.device_fn_1_2.get_buffer_opaque_capture_address)(self.handle(), info) }
809 }
810
811 #[inline]
813 pub unsafe fn get_device_memory_opaque_capture_address(
814 &self,
815 info: &vk::DeviceMemoryOpaqueCaptureAddressInfo<'_>,
816 ) -> u64 {
817 unsafe {
818 (self.device_fn_1_2.get_device_memory_opaque_capture_address)(self.handle(), info)
819 }
820 }
821}
822
823impl Device {
825 #[inline]
826 pub fn fp_v1_1(&self) -> &crate::DeviceFnV1_1 {
827 &self.device_fn_1_1
828 }
829
830 #[inline]
832 pub unsafe fn bind_buffer_memory2(
833 &self,
834 bind_infos: &[vk::BindBufferMemoryInfo<'_>],
835 ) -> VkResult<()> {
836 unsafe {
837 (self.device_fn_1_1.bind_buffer_memory2)(
838 self.handle(),
839 bind_infos.len() as _,
840 bind_infos.as_ptr(),
841 )
842 .result()
843 }
844 }
845
846 #[inline]
848 pub unsafe fn bind_image_memory2(
849 &self,
850 bind_infos: &[vk::BindImageMemoryInfo<'_>],
851 ) -> VkResult<()> {
852 unsafe {
853 (self.device_fn_1_1.bind_image_memory2)(
854 self.handle(),
855 bind_infos.len() as _,
856 bind_infos.as_ptr(),
857 )
858 .result()
859 }
860 }
861
862 #[inline]
864 pub unsafe fn get_device_group_peer_memory_features(
865 &self,
866 heap_index: u32,
867 local_device_index: u32,
868 remote_device_index: u32,
869 ) -> vk::PeerMemoryFeatureFlags {
870 unsafe {
871 let mut peer_memory_features = mem::MaybeUninit::uninit();
872 (self.device_fn_1_1.get_device_group_peer_memory_features)(
873 self.handle(),
874 heap_index,
875 local_device_index,
876 remote_device_index,
877 peer_memory_features.as_mut_ptr(),
878 );
879 peer_memory_features.assume_init()
880 }
881 }
882
883 #[inline]
885 pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) {
886 unsafe {
887 (self.device_fn_1_1.cmd_set_device_mask)(command_buffer, device_mask);
888 }
889 }
890
891 #[inline]
893 pub unsafe fn cmd_dispatch_base(
894 &self,
895 command_buffer: vk::CommandBuffer,
896 base_group_x: u32,
897 base_group_y: u32,
898 base_group_z: u32,
899 group_count_x: u32,
900 group_count_y: u32,
901 group_count_z: u32,
902 ) {
903 unsafe {
904 (self.device_fn_1_1.cmd_dispatch_base)(
905 command_buffer,
906 base_group_x,
907 base_group_y,
908 base_group_z,
909 group_count_x,
910 group_count_y,
911 group_count_z,
912 );
913 }
914 }
915
916 #[inline]
918 pub unsafe fn get_image_memory_requirements2(
919 &self,
920 info: &vk::ImageMemoryRequirementsInfo2<'_>,
921 out: &mut vk::MemoryRequirements2<'_>,
922 ) {
923 unsafe {
924 (self.device_fn_1_1.get_image_memory_requirements2)(self.handle(), info, out);
925 }
926 }
927
928 #[inline]
930 pub unsafe fn get_buffer_memory_requirements2(
931 &self,
932 info: &vk::BufferMemoryRequirementsInfo2<'_>,
933 out: &mut vk::MemoryRequirements2<'_>,
934 ) {
935 unsafe {
936 (self.device_fn_1_1.get_buffer_memory_requirements2)(self.handle(), info, out);
937 }
938 }
939
940 #[inline]
942 pub unsafe fn get_image_sparse_memory_requirements2_len(
943 &self,
944 info: &vk::ImageSparseMemoryRequirementsInfo2<'_>,
945 ) -> usize {
946 unsafe {
947 let mut count = mem::MaybeUninit::uninit();
948 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
949 self.handle(),
950 info,
951 count.as_mut_ptr(),
952 ptr::null_mut(),
953 );
954 count.assume_init() as usize
955 }
956 }
957
958 #[inline]
963 pub unsafe fn get_image_sparse_memory_requirements2(
964 &self,
965 info: &vk::ImageSparseMemoryRequirementsInfo2<'_>,
966 out: &mut [vk::SparseImageMemoryRequirements2<'_>],
967 ) {
968 unsafe {
969 let mut count = out.len() as u32;
970 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
971 self.handle(),
972 info,
973 &mut count,
974 out.as_mut_ptr(),
975 );
976 assert_eq!(count as usize, out.len());
977 }
978 }
979
980 #[inline]
982 pub unsafe fn trim_command_pool(
983 &self,
984 command_pool: vk::CommandPool,
985 flags: vk::CommandPoolTrimFlags,
986 ) {
987 unsafe {
988 (self.device_fn_1_1.trim_command_pool)(self.handle(), command_pool, flags);
989 }
990 }
991
992 #[inline]
994 pub unsafe fn get_device_queue2(&self, queue_info: &vk::DeviceQueueInfo2<'_>) -> vk::Queue {
995 unsafe {
996 let mut queue = mem::MaybeUninit::uninit();
997 (self.device_fn_1_1.get_device_queue2)(self.handle(), queue_info, queue.as_mut_ptr());
998 queue.assume_init()
999 }
1000 }
1001
1002 #[inline]
1004 pub unsafe fn create_sampler_ycbcr_conversion(
1005 &self,
1006 create_info: &vk::SamplerYcbcrConversionCreateInfo<'_>,
1007 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1008 ) -> VkResult<vk::SamplerYcbcrConversion> {
1009 unsafe {
1010 let mut ycbcr_conversion = mem::MaybeUninit::uninit();
1011 (self.device_fn_1_1.create_sampler_ycbcr_conversion)(
1012 self.handle(),
1013 create_info,
1014 allocation_callbacks.as_raw_ptr(),
1015 ycbcr_conversion.as_mut_ptr(),
1016 )
1017 .assume_init_on_success(ycbcr_conversion)
1018 }
1019 }
1020
1021 #[inline]
1023 pub unsafe fn destroy_sampler_ycbcr_conversion(
1024 &self,
1025 ycbcr_conversion: vk::SamplerYcbcrConversion,
1026 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1027 ) {
1028 unsafe {
1029 (self.device_fn_1_1.destroy_sampler_ycbcr_conversion)(
1030 self.handle(),
1031 ycbcr_conversion,
1032 allocation_callbacks.as_raw_ptr(),
1033 );
1034 }
1035 }
1036
1037 #[inline]
1039 pub unsafe fn create_descriptor_update_template(
1040 &self,
1041 create_info: &vk::DescriptorUpdateTemplateCreateInfo<'_>,
1042 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1043 ) -> VkResult<vk::DescriptorUpdateTemplate> {
1044 unsafe {
1045 let mut descriptor_update_template = mem::MaybeUninit::uninit();
1046 (self.device_fn_1_1.create_descriptor_update_template)(
1047 self.handle(),
1048 create_info,
1049 allocation_callbacks.as_raw_ptr(),
1050 descriptor_update_template.as_mut_ptr(),
1051 )
1052 .assume_init_on_success(descriptor_update_template)
1053 }
1054 }
1055
1056 #[inline]
1058 pub unsafe fn destroy_descriptor_update_template(
1059 &self,
1060 descriptor_update_template: vk::DescriptorUpdateTemplate,
1061 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1062 ) {
1063 unsafe {
1064 (self.device_fn_1_1.destroy_descriptor_update_template)(
1065 self.handle(),
1066 descriptor_update_template,
1067 allocation_callbacks.as_raw_ptr(),
1068 );
1069 }
1070 }
1071
1072 #[inline]
1074 pub unsafe fn update_descriptor_set_with_template(
1075 &self,
1076 descriptor_set: vk::DescriptorSet,
1077 descriptor_update_template: vk::DescriptorUpdateTemplate,
1078 data: *const ffi::c_void,
1079 ) {
1080 unsafe {
1081 (self.device_fn_1_1.update_descriptor_set_with_template)(
1082 self.handle(),
1083 descriptor_set,
1084 descriptor_update_template,
1085 data,
1086 );
1087 }
1088 }
1089
1090 #[inline]
1092 pub unsafe fn get_descriptor_set_layout_support(
1093 &self,
1094 create_info: &vk::DescriptorSetLayoutCreateInfo<'_>,
1095 out: &mut vk::DescriptorSetLayoutSupport<'_>,
1096 ) {
1097 unsafe {
1098 (self.device_fn_1_1.get_descriptor_set_layout_support)(self.handle(), create_info, out);
1099 }
1100 }
1101}
1102
1103impl Device {
1105 #[inline]
1106 pub fn fp_v1_0(&self) -> &crate::DeviceFnV1_0 {
1107 &self.device_fn_1_0
1108 }
1109
1110 #[inline]
1112 pub unsafe fn destroy_device(
1113 &self,
1114 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1115 ) {
1116 unsafe {
1117 (self.device_fn_1_0.destroy_device)(self.handle(), allocation_callbacks.as_raw_ptr());
1118 }
1119 }
1120
1121 #[inline]
1123 pub unsafe fn destroy_sampler(
1124 &self,
1125 sampler: vk::Sampler,
1126 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1127 ) {
1128 unsafe {
1129 (self.device_fn_1_0.destroy_sampler)(
1130 self.handle(),
1131 sampler,
1132 allocation_callbacks.as_raw_ptr(),
1133 );
1134 }
1135 }
1136
1137 #[inline]
1139 pub unsafe fn free_memory(
1140 &self,
1141 memory: vk::DeviceMemory,
1142 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1143 ) {
1144 unsafe {
1145 (self.device_fn_1_0.free_memory)(
1146 self.handle(),
1147 memory,
1148 allocation_callbacks.as_raw_ptr(),
1149 );
1150 }
1151 }
1152
1153 #[inline]
1155 pub unsafe fn free_command_buffers(
1156 &self,
1157 command_pool: vk::CommandPool,
1158 command_buffers: &[vk::CommandBuffer],
1159 ) {
1160 unsafe {
1161 (self.device_fn_1_0.free_command_buffers)(
1162 self.handle(),
1163 command_pool,
1164 command_buffers.len() as u32,
1165 command_buffers.as_ptr(),
1166 );
1167 }
1168 }
1169
1170 #[inline]
1172 pub unsafe fn create_event(
1173 &self,
1174 create_info: &vk::EventCreateInfo<'_>,
1175 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1176 ) -> VkResult<vk::Event> {
1177 unsafe {
1178 let mut event = mem::MaybeUninit::uninit();
1179 (self.device_fn_1_0.create_event)(
1180 self.handle(),
1181 create_info,
1182 allocation_callbacks.as_raw_ptr(),
1183 event.as_mut_ptr(),
1184 )
1185 .assume_init_on_success(event)
1186 }
1187 }
1188
1189 #[inline]
1193 pub unsafe fn get_event_status(&self, event: vk::Event) -> VkResult<bool> {
1194 unsafe {
1195 let err_code = (self.device_fn_1_0.get_event_status)(self.handle(), event);
1196 match err_code {
1197 vk::Result::EVENT_SET => Ok(true),
1198 vk::Result::EVENT_RESET => Ok(false),
1199 _ => Err(err_code),
1200 }
1201 }
1202 }
1203
1204 #[inline]
1206 pub unsafe fn set_event(&self, event: vk::Event) -> VkResult<()> {
1207 unsafe { (self.device_fn_1_0.set_event)(self.handle(), event).result() }
1208 }
1209
1210 #[inline]
1212 pub unsafe fn reset_event(&self, event: vk::Event) -> VkResult<()> {
1213 unsafe { (self.device_fn_1_0.reset_event)(self.handle(), event).result() }
1214 }
1215 #[inline]
1217 pub unsafe fn cmd_set_event(
1218 &self,
1219 command_buffer: vk::CommandBuffer,
1220 event: vk::Event,
1221 stage_mask: vk::PipelineStageFlags,
1222 ) {
1223 unsafe {
1224 (self.device_fn_1_0.cmd_set_event)(command_buffer, event, stage_mask);
1225 }
1226 }
1227 #[inline]
1229 pub unsafe fn cmd_reset_event(
1230 &self,
1231 command_buffer: vk::CommandBuffer,
1232 event: vk::Event,
1233 stage_mask: vk::PipelineStageFlags,
1234 ) {
1235 unsafe {
1236 (self.device_fn_1_0.cmd_reset_event)(command_buffer, event, stage_mask);
1237 }
1238 }
1239
1240 #[inline]
1242 pub unsafe fn cmd_wait_events(
1243 &self,
1244 command_buffer: vk::CommandBuffer,
1245 events: &[vk::Event],
1246 src_stage_mask: vk::PipelineStageFlags,
1247 dst_stage_mask: vk::PipelineStageFlags,
1248 memory_barriers: &[vk::MemoryBarrier<'_>],
1249 buffer_memory_barriers: &[vk::BufferMemoryBarrier<'_>],
1250 image_memory_barriers: &[vk::ImageMemoryBarrier<'_>],
1251 ) {
1252 unsafe {
1253 (self.device_fn_1_0.cmd_wait_events)(
1254 command_buffer,
1255 events.len() as _,
1256 events.as_ptr(),
1257 src_stage_mask,
1258 dst_stage_mask,
1259 memory_barriers.len() as _,
1260 memory_barriers.as_ptr(),
1261 buffer_memory_barriers.len() as _,
1262 buffer_memory_barriers.as_ptr(),
1263 image_memory_barriers.len() as _,
1264 image_memory_barriers.as_ptr(),
1265 );
1266 }
1267 }
1268
1269 #[inline]
1271 pub unsafe fn destroy_fence(
1272 &self,
1273 fence: vk::Fence,
1274 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1275 ) {
1276 unsafe {
1277 (self.device_fn_1_0.destroy_fence)(
1278 self.handle(),
1279 fence,
1280 allocation_callbacks.as_raw_ptr(),
1281 );
1282 }
1283 }
1284
1285 #[inline]
1287 pub unsafe fn destroy_event(
1288 &self,
1289 event: vk::Event,
1290 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1291 ) {
1292 unsafe {
1293 (self.device_fn_1_0.destroy_event)(
1294 self.handle(),
1295 event,
1296 allocation_callbacks.as_raw_ptr(),
1297 );
1298 }
1299 }
1300
1301 #[inline]
1303 pub unsafe fn destroy_image(
1304 &self,
1305 image: vk::Image,
1306 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1307 ) {
1308 unsafe {
1309 (self.device_fn_1_0.destroy_image)(
1310 self.handle(),
1311 image,
1312 allocation_callbacks.as_raw_ptr(),
1313 );
1314 }
1315 }
1316
1317 #[inline]
1319 pub unsafe fn destroy_command_pool(
1320 &self,
1321 pool: vk::CommandPool,
1322 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1323 ) {
1324 unsafe {
1325 (self.device_fn_1_0.destroy_command_pool)(
1326 self.handle(),
1327 pool,
1328 allocation_callbacks.as_raw_ptr(),
1329 );
1330 }
1331 }
1332
1333 #[inline]
1335 pub unsafe fn destroy_image_view(
1336 &self,
1337 image_view: vk::ImageView,
1338 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1339 ) {
1340 unsafe {
1341 (self.device_fn_1_0.destroy_image_view)(
1342 self.handle(),
1343 image_view,
1344 allocation_callbacks.as_raw_ptr(),
1345 );
1346 }
1347 }
1348
1349 #[inline]
1351 pub unsafe fn destroy_render_pass(
1352 &self,
1353 renderpass: vk::RenderPass,
1354 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1355 ) {
1356 unsafe {
1357 (self.device_fn_1_0.destroy_render_pass)(
1358 self.handle(),
1359 renderpass,
1360 allocation_callbacks.as_raw_ptr(),
1361 );
1362 }
1363 }
1364
1365 #[inline]
1367 pub unsafe fn destroy_framebuffer(
1368 &self,
1369 framebuffer: vk::Framebuffer,
1370 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1371 ) {
1372 unsafe {
1373 (self.device_fn_1_0.destroy_framebuffer)(
1374 self.handle(),
1375 framebuffer,
1376 allocation_callbacks.as_raw_ptr(),
1377 );
1378 }
1379 }
1380
1381 #[inline]
1383 pub unsafe fn destroy_pipeline_layout(
1384 &self,
1385 pipeline_layout: vk::PipelineLayout,
1386 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1387 ) {
1388 unsafe {
1389 (self.device_fn_1_0.destroy_pipeline_layout)(
1390 self.handle(),
1391 pipeline_layout,
1392 allocation_callbacks.as_raw_ptr(),
1393 );
1394 }
1395 }
1396
1397 #[inline]
1399 pub unsafe fn destroy_pipeline_cache(
1400 &self,
1401 pipeline_cache: vk::PipelineCache,
1402 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1403 ) {
1404 unsafe {
1405 (self.device_fn_1_0.destroy_pipeline_cache)(
1406 self.handle(),
1407 pipeline_cache,
1408 allocation_callbacks.as_raw_ptr(),
1409 );
1410 }
1411 }
1412
1413 #[inline]
1415 pub unsafe fn destroy_buffer(
1416 &self,
1417 buffer: vk::Buffer,
1418 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1419 ) {
1420 unsafe {
1421 (self.device_fn_1_0.destroy_buffer)(
1422 self.handle(),
1423 buffer,
1424 allocation_callbacks.as_raw_ptr(),
1425 );
1426 }
1427 }
1428
1429 #[inline]
1431 pub unsafe fn destroy_shader_module(
1432 &self,
1433 shader: vk::ShaderModule,
1434 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1435 ) {
1436 unsafe {
1437 (self.device_fn_1_0.destroy_shader_module)(
1438 self.handle(),
1439 shader,
1440 allocation_callbacks.as_raw_ptr(),
1441 );
1442 }
1443 }
1444
1445 #[inline]
1447 pub unsafe fn destroy_pipeline(
1448 &self,
1449 pipeline: vk::Pipeline,
1450 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1451 ) {
1452 unsafe {
1453 (self.device_fn_1_0.destroy_pipeline)(
1454 self.handle(),
1455 pipeline,
1456 allocation_callbacks.as_raw_ptr(),
1457 );
1458 }
1459 }
1460
1461 #[inline]
1463 pub unsafe fn destroy_semaphore(
1464 &self,
1465 semaphore: vk::Semaphore,
1466 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1467 ) {
1468 unsafe {
1469 (self.device_fn_1_0.destroy_semaphore)(
1470 self.handle(),
1471 semaphore,
1472 allocation_callbacks.as_raw_ptr(),
1473 );
1474 }
1475 }
1476
1477 #[inline]
1479 pub unsafe fn destroy_descriptor_pool(
1480 &self,
1481 pool: vk::DescriptorPool,
1482 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1483 ) {
1484 unsafe {
1485 (self.device_fn_1_0.destroy_descriptor_pool)(
1486 self.handle(),
1487 pool,
1488 allocation_callbacks.as_raw_ptr(),
1489 );
1490 }
1491 }
1492
1493 #[inline]
1495 pub unsafe fn destroy_query_pool(
1496 &self,
1497 pool: vk::QueryPool,
1498 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1499 ) {
1500 unsafe {
1501 (self.device_fn_1_0.destroy_query_pool)(
1502 self.handle(),
1503 pool,
1504 allocation_callbacks.as_raw_ptr(),
1505 );
1506 }
1507 }
1508
1509 #[inline]
1511 pub unsafe fn destroy_descriptor_set_layout(
1512 &self,
1513 layout: vk::DescriptorSetLayout,
1514 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1515 ) {
1516 unsafe {
1517 (self.device_fn_1_0.destroy_descriptor_set_layout)(
1518 self.handle(),
1519 layout,
1520 allocation_callbacks.as_raw_ptr(),
1521 );
1522 }
1523 }
1524
1525 #[inline]
1527 pub unsafe fn free_descriptor_sets(
1528 &self,
1529 pool: vk::DescriptorPool,
1530 descriptor_sets: &[vk::DescriptorSet],
1531 ) -> VkResult<()> {
1532 unsafe {
1533 (self.device_fn_1_0.free_descriptor_sets)(
1534 self.handle(),
1535 pool,
1536 descriptor_sets.len() as u32,
1537 descriptor_sets.as_ptr(),
1538 )
1539 .result()
1540 }
1541 }
1542
1543 #[inline]
1545 pub unsafe fn update_descriptor_sets(
1546 &self,
1547 descriptor_writes: &[vk::WriteDescriptorSet<'_>],
1548 descriptor_copies: &[vk::CopyDescriptorSet<'_>],
1549 ) {
1550 unsafe {
1551 (self.device_fn_1_0.update_descriptor_sets)(
1552 self.handle(),
1553 descriptor_writes.len() as u32,
1554 descriptor_writes.as_ptr(),
1555 descriptor_copies.len() as u32,
1556 descriptor_copies.as_ptr(),
1557 );
1558 }
1559 }
1560
1561 #[inline]
1563 pub unsafe fn create_sampler(
1564 &self,
1565 create_info: &vk::SamplerCreateInfo<'_>,
1566 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1567 ) -> VkResult<vk::Sampler> {
1568 unsafe {
1569 let mut sampler = mem::MaybeUninit::uninit();
1570 (self.device_fn_1_0.create_sampler)(
1571 self.handle(),
1572 create_info,
1573 allocation_callbacks.as_raw_ptr(),
1574 sampler.as_mut_ptr(),
1575 )
1576 .assume_init_on_success(sampler)
1577 }
1578 }
1579
1580 #[inline]
1582 pub unsafe fn cmd_blit_image(
1583 &self,
1584 command_buffer: vk::CommandBuffer,
1585 src_image: vk::Image,
1586 src_image_layout: vk::ImageLayout,
1587 dst_image: vk::Image,
1588 dst_image_layout: vk::ImageLayout,
1589 regions: &[vk::ImageBlit],
1590 filter: vk::Filter,
1591 ) {
1592 unsafe {
1593 (self.device_fn_1_0.cmd_blit_image)(
1594 command_buffer,
1595 src_image,
1596 src_image_layout,
1597 dst_image,
1598 dst_image_layout,
1599 regions.len() as _,
1600 regions.as_ptr(),
1601 filter,
1602 );
1603 }
1604 }
1605
1606 #[inline]
1608 pub unsafe fn cmd_resolve_image(
1609 &self,
1610 command_buffer: vk::CommandBuffer,
1611 src_image: vk::Image,
1612 src_image_layout: vk::ImageLayout,
1613 dst_image: vk::Image,
1614 dst_image_layout: vk::ImageLayout,
1615 regions: &[vk::ImageResolve],
1616 ) {
1617 unsafe {
1618 (self.device_fn_1_0.cmd_resolve_image)(
1619 command_buffer,
1620 src_image,
1621 src_image_layout,
1622 dst_image,
1623 dst_image_layout,
1624 regions.len() as u32,
1625 regions.as_ptr(),
1626 );
1627 }
1628 }
1629
1630 #[inline]
1632 pub unsafe fn cmd_fill_buffer(
1633 &self,
1634 command_buffer: vk::CommandBuffer,
1635 buffer: vk::Buffer,
1636 offset: vk::DeviceSize,
1637 size: vk::DeviceSize,
1638 data: u32,
1639 ) {
1640 unsafe {
1641 (self.device_fn_1_0.cmd_fill_buffer)(command_buffer, buffer, offset, size, data);
1642 }
1643 }
1644
1645 #[inline]
1647 pub unsafe fn cmd_update_buffer(
1648 &self,
1649 command_buffer: vk::CommandBuffer,
1650 buffer: vk::Buffer,
1651 offset: vk::DeviceSize,
1652 data: &[u8],
1653 ) {
1654 unsafe {
1655 (self.device_fn_1_0.cmd_update_buffer)(
1656 command_buffer,
1657 buffer,
1658 offset,
1659 data.len() as u64,
1660 data.as_ptr() as _,
1661 );
1662 }
1663 }
1664
1665 #[inline]
1667 pub unsafe fn cmd_copy_buffer(
1668 &self,
1669 command_buffer: vk::CommandBuffer,
1670 src_buffer: vk::Buffer,
1671 dst_buffer: vk::Buffer,
1672 regions: &[vk::BufferCopy],
1673 ) {
1674 unsafe {
1675 (self.device_fn_1_0.cmd_copy_buffer)(
1676 command_buffer,
1677 src_buffer,
1678 dst_buffer,
1679 regions.len() as u32,
1680 regions.as_ptr(),
1681 );
1682 }
1683 }
1684
1685 #[inline]
1687 pub unsafe fn cmd_copy_image_to_buffer(
1688 &self,
1689 command_buffer: vk::CommandBuffer,
1690 src_image: vk::Image,
1691 src_image_layout: vk::ImageLayout,
1692 dst_buffer: vk::Buffer,
1693 regions: &[vk::BufferImageCopy],
1694 ) {
1695 unsafe {
1696 (self.device_fn_1_0.cmd_copy_image_to_buffer)(
1697 command_buffer,
1698 src_image,
1699 src_image_layout,
1700 dst_buffer,
1701 regions.len() as u32,
1702 regions.as_ptr(),
1703 );
1704 }
1705 }
1706
1707 #[inline]
1709 pub unsafe fn cmd_copy_buffer_to_image(
1710 &self,
1711 command_buffer: vk::CommandBuffer,
1712 src_buffer: vk::Buffer,
1713 dst_image: vk::Image,
1714 dst_image_layout: vk::ImageLayout,
1715 regions: &[vk::BufferImageCopy],
1716 ) {
1717 unsafe {
1718 (self.device_fn_1_0.cmd_copy_buffer_to_image)(
1719 command_buffer,
1720 src_buffer,
1721 dst_image,
1722 dst_image_layout,
1723 regions.len() as u32,
1724 regions.as_ptr(),
1725 );
1726 }
1727 }
1728
1729 #[inline]
1731 pub unsafe fn cmd_copy_image(
1732 &self,
1733 command_buffer: vk::CommandBuffer,
1734 src_image: vk::Image,
1735 src_image_layout: vk::ImageLayout,
1736 dst_image: vk::Image,
1737 dst_image_layout: vk::ImageLayout,
1738 regions: &[vk::ImageCopy],
1739 ) {
1740 unsafe {
1741 (self.device_fn_1_0.cmd_copy_image)(
1742 command_buffer,
1743 src_image,
1744 src_image_layout,
1745 dst_image,
1746 dst_image_layout,
1747 regions.len() as u32,
1748 regions.as_ptr(),
1749 );
1750 }
1751 }
1752
1753 #[inline]
1755 pub unsafe fn allocate_descriptor_sets(
1756 &self,
1757 allocate_info: &vk::DescriptorSetAllocateInfo<'_>,
1758 ) -> VkResult<Vec<vk::DescriptorSet>> {
1759 unsafe {
1760 let mut desc_set = Vec::with_capacity(allocate_info.descriptor_set_count as usize);
1761 (self.device_fn_1_0.allocate_descriptor_sets)(
1762 self.handle(),
1763 allocate_info,
1764 desc_set.as_mut_ptr(),
1765 )
1766 .set_vec_len_on_success(desc_set, allocate_info.descriptor_set_count as usize)
1767 }
1768 }
1769
1770 #[inline]
1772 pub unsafe fn create_descriptor_set_layout(
1773 &self,
1774 create_info: &vk::DescriptorSetLayoutCreateInfo<'_>,
1775 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1776 ) -> VkResult<vk::DescriptorSetLayout> {
1777 unsafe {
1778 let mut layout = mem::MaybeUninit::uninit();
1779 (self.device_fn_1_0.create_descriptor_set_layout)(
1780 self.handle(),
1781 create_info,
1782 allocation_callbacks.as_raw_ptr(),
1783 layout.as_mut_ptr(),
1784 )
1785 .assume_init_on_success(layout)
1786 }
1787 }
1788
1789 #[inline]
1791 pub unsafe fn device_wait_idle(&self) -> VkResult<()> {
1792 unsafe { (self.device_fn_1_0.device_wait_idle)(self.handle()).result() }
1793 }
1794
1795 #[inline]
1797 pub unsafe fn create_descriptor_pool(
1798 &self,
1799 create_info: &vk::DescriptorPoolCreateInfo<'_>,
1800 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1801 ) -> VkResult<vk::DescriptorPool> {
1802 unsafe {
1803 let mut pool = mem::MaybeUninit::uninit();
1804 (self.device_fn_1_0.create_descriptor_pool)(
1805 self.handle(),
1806 create_info,
1807 allocation_callbacks.as_raw_ptr(),
1808 pool.as_mut_ptr(),
1809 )
1810 .assume_init_on_success(pool)
1811 }
1812 }
1813
1814 #[inline]
1816 pub unsafe fn reset_descriptor_pool(
1817 &self,
1818 pool: vk::DescriptorPool,
1819 flags: vk::DescriptorPoolResetFlags,
1820 ) -> VkResult<()> {
1821 unsafe { (self.device_fn_1_0.reset_descriptor_pool)(self.handle(), pool, flags).result() }
1822 }
1823
1824 #[inline]
1826 pub unsafe fn reset_command_pool(
1827 &self,
1828 command_pool: vk::CommandPool,
1829 flags: vk::CommandPoolResetFlags,
1830 ) -> VkResult<()> {
1831 unsafe {
1832 (self.device_fn_1_0.reset_command_pool)(self.handle(), command_pool, flags).result()
1833 }
1834 }
1835
1836 #[inline]
1838 pub unsafe fn reset_command_buffer(
1839 &self,
1840 command_buffer: vk::CommandBuffer,
1841 flags: vk::CommandBufferResetFlags,
1842 ) -> VkResult<()> {
1843 unsafe { (self.device_fn_1_0.reset_command_buffer)(command_buffer, flags).result() }
1844 }
1845
1846 #[inline]
1848 pub unsafe fn reset_fences(&self, fences: &[vk::Fence]) -> VkResult<()> {
1849 unsafe {
1850 (self.device_fn_1_0.reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr())
1851 .result()
1852 }
1853 }
1854
1855 #[inline]
1857 pub unsafe fn cmd_bind_index_buffer(
1858 &self,
1859 command_buffer: vk::CommandBuffer,
1860 buffer: vk::Buffer,
1861 offset: vk::DeviceSize,
1862 index_type: vk::IndexType,
1863 ) {
1864 unsafe {
1865 (self.device_fn_1_0.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
1866 }
1867 }
1868
1869 #[inline]
1871 pub unsafe fn cmd_clear_color_image(
1872 &self,
1873 command_buffer: vk::CommandBuffer,
1874 image: vk::Image,
1875 image_layout: vk::ImageLayout,
1876 clear_color_value: &vk::ClearColorValue,
1877 ranges: &[vk::ImageSubresourceRange],
1878 ) {
1879 unsafe {
1880 (self.device_fn_1_0.cmd_clear_color_image)(
1881 command_buffer,
1882 image,
1883 image_layout,
1884 clear_color_value,
1885 ranges.len() as u32,
1886 ranges.as_ptr(),
1887 );
1888 }
1889 }
1890
1891 #[inline]
1893 pub unsafe fn cmd_clear_depth_stencil_image(
1894 &self,
1895 command_buffer: vk::CommandBuffer,
1896 image: vk::Image,
1897 image_layout: vk::ImageLayout,
1898 clear_depth_stencil_value: &vk::ClearDepthStencilValue,
1899 ranges: &[vk::ImageSubresourceRange],
1900 ) {
1901 unsafe {
1902 (self.device_fn_1_0.cmd_clear_depth_stencil_image)(
1903 command_buffer,
1904 image,
1905 image_layout,
1906 clear_depth_stencil_value,
1907 ranges.len() as u32,
1908 ranges.as_ptr(),
1909 );
1910 }
1911 }
1912
1913 #[inline]
1915 pub unsafe fn cmd_clear_attachments(
1916 &self,
1917 command_buffer: vk::CommandBuffer,
1918 attachments: &[vk::ClearAttachment],
1919 rects: &[vk::ClearRect],
1920 ) {
1921 unsafe {
1922 (self.device_fn_1_0.cmd_clear_attachments)(
1923 command_buffer,
1924 attachments.len() as u32,
1925 attachments.as_ptr(),
1926 rects.len() as u32,
1927 rects.as_ptr(),
1928 );
1929 }
1930 }
1931
1932 #[inline]
1934 pub unsafe fn cmd_draw_indexed(
1935 &self,
1936 command_buffer: vk::CommandBuffer,
1937 index_count: u32,
1938 instance_count: u32,
1939 first_index: u32,
1940 vertex_offset: i32,
1941 first_instance: u32,
1942 ) {
1943 unsafe {
1944 (self.device_fn_1_0.cmd_draw_indexed)(
1945 command_buffer,
1946 index_count,
1947 instance_count,
1948 first_index,
1949 vertex_offset,
1950 first_instance,
1951 );
1952 }
1953 }
1954
1955 #[inline]
1957 pub unsafe fn cmd_draw_indexed_indirect(
1958 &self,
1959 command_buffer: vk::CommandBuffer,
1960 buffer: vk::Buffer,
1961 offset: vk::DeviceSize,
1962 draw_count: u32,
1963 stride: u32,
1964 ) {
1965 unsafe {
1966 (self.device_fn_1_0.cmd_draw_indexed_indirect)(
1967 command_buffer,
1968 buffer,
1969 offset,
1970 draw_count,
1971 stride,
1972 );
1973 }
1974 }
1975
1976 #[inline]
1978 pub unsafe fn cmd_execute_commands(
1979 &self,
1980 primary_command_buffer: vk::CommandBuffer,
1981 secondary_command_buffers: &[vk::CommandBuffer],
1982 ) {
1983 unsafe {
1984 (self.device_fn_1_0.cmd_execute_commands)(
1985 primary_command_buffer,
1986 secondary_command_buffers.len() as u32,
1987 secondary_command_buffers.as_ptr(),
1988 );
1989 }
1990 }
1991
1992 #[inline]
1994 pub unsafe fn cmd_bind_descriptor_sets(
1995 &self,
1996 command_buffer: vk::CommandBuffer,
1997 pipeline_bind_point: vk::PipelineBindPoint,
1998 layout: vk::PipelineLayout,
1999 first_set: u32,
2000 descriptor_sets: &[vk::DescriptorSet],
2001 dynamic_offsets: &[u32],
2002 ) {
2003 unsafe {
2004 (self.device_fn_1_0.cmd_bind_descriptor_sets)(
2005 command_buffer,
2006 pipeline_bind_point,
2007 layout,
2008 first_set,
2009 descriptor_sets.len() as u32,
2010 descriptor_sets.as_ptr(),
2011 dynamic_offsets.len() as u32,
2012 dynamic_offsets.as_ptr(),
2013 );
2014 }
2015 }
2016
2017 #[inline]
2019 pub unsafe fn cmd_copy_query_pool_results(
2020 &self,
2021 command_buffer: vk::CommandBuffer,
2022 query_pool: vk::QueryPool,
2023 first_query: u32,
2024 query_count: u32,
2025 dst_buffer: vk::Buffer,
2026 dst_offset: vk::DeviceSize,
2027 stride: vk::DeviceSize,
2028 flags: vk::QueryResultFlags,
2029 ) {
2030 unsafe {
2031 (self.device_fn_1_0.cmd_copy_query_pool_results)(
2032 command_buffer,
2033 query_pool,
2034 first_query,
2035 query_count,
2036 dst_buffer,
2037 dst_offset,
2038 stride,
2039 flags,
2040 );
2041 }
2042 }
2043
2044 #[inline]
2046 pub unsafe fn cmd_push_constants(
2047 &self,
2048 command_buffer: vk::CommandBuffer,
2049 layout: vk::PipelineLayout,
2050 stage_flags: vk::ShaderStageFlags,
2051 offset: u32,
2052 constants: &[u8],
2053 ) {
2054 unsafe {
2055 (self.device_fn_1_0.cmd_push_constants)(
2056 command_buffer,
2057 layout,
2058 stage_flags,
2059 offset,
2060 constants.len() as _,
2061 constants.as_ptr() as _,
2062 );
2063 }
2064 }
2065
2066 #[inline]
2068 pub unsafe fn cmd_begin_render_pass(
2069 &self,
2070 command_buffer: vk::CommandBuffer,
2071 render_pass_begin: &vk::RenderPassBeginInfo<'_>,
2072 contents: vk::SubpassContents,
2073 ) {
2074 unsafe {
2075 (self.device_fn_1_0.cmd_begin_render_pass)(command_buffer, render_pass_begin, contents);
2076 }
2077 }
2078
2079 #[inline]
2081 pub unsafe fn cmd_next_subpass(
2082 &self,
2083 command_buffer: vk::CommandBuffer,
2084 contents: vk::SubpassContents,
2085 ) {
2086 unsafe {
2087 (self.device_fn_1_0.cmd_next_subpass)(command_buffer, contents);
2088 }
2089 }
2090
2091 #[inline]
2093 pub unsafe fn cmd_bind_pipeline(
2094 &self,
2095 command_buffer: vk::CommandBuffer,
2096 pipeline_bind_point: vk::PipelineBindPoint,
2097 pipeline: vk::Pipeline,
2098 ) {
2099 unsafe {
2100 (self.device_fn_1_0.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
2101 }
2102 }
2103
2104 #[inline]
2106 pub unsafe fn cmd_set_scissor(
2107 &self,
2108 command_buffer: vk::CommandBuffer,
2109 first_scissor: u32,
2110 scissors: &[vk::Rect2D],
2111 ) {
2112 unsafe {
2113 (self.device_fn_1_0.cmd_set_scissor)(
2114 command_buffer,
2115 first_scissor,
2116 scissors.len() as u32,
2117 scissors.as_ptr(),
2118 );
2119 }
2120 }
2121
2122 #[inline]
2124 pub unsafe fn cmd_set_line_width(&self, command_buffer: vk::CommandBuffer, line_width: f32) {
2125 unsafe {
2126 (self.device_fn_1_0.cmd_set_line_width)(command_buffer, line_width);
2127 }
2128 }
2129
2130 #[inline]
2132 pub unsafe fn cmd_bind_vertex_buffers(
2133 &self,
2134 command_buffer: vk::CommandBuffer,
2135 first_binding: u32,
2136 buffers: &[vk::Buffer],
2137 offsets: &[vk::DeviceSize],
2138 ) {
2139 unsafe {
2140 debug_assert_eq!(buffers.len(), offsets.len());
2141 (self.device_fn_1_0.cmd_bind_vertex_buffers)(
2142 command_buffer,
2143 first_binding,
2144 buffers.len() as u32,
2145 buffers.as_ptr(),
2146 offsets.as_ptr(),
2147 );
2148 }
2149 }
2150
2151 #[inline]
2153 pub unsafe fn cmd_end_render_pass(&self, command_buffer: vk::CommandBuffer) {
2154 unsafe {
2155 (self.device_fn_1_0.cmd_end_render_pass)(command_buffer);
2156 }
2157 }
2158
2159 #[inline]
2161 pub unsafe fn cmd_draw(
2162 &self,
2163 command_buffer: vk::CommandBuffer,
2164 vertex_count: u32,
2165 instance_count: u32,
2166 first_vertex: u32,
2167 first_instance: u32,
2168 ) {
2169 unsafe {
2170 (self.device_fn_1_0.cmd_draw)(
2171 command_buffer,
2172 vertex_count,
2173 instance_count,
2174 first_vertex,
2175 first_instance,
2176 );
2177 }
2178 }
2179
2180 #[inline]
2182 pub unsafe fn cmd_draw_indirect(
2183 &self,
2184 command_buffer: vk::CommandBuffer,
2185 buffer: vk::Buffer,
2186 offset: vk::DeviceSize,
2187 draw_count: u32,
2188 stride: u32,
2189 ) {
2190 unsafe {
2191 (self.device_fn_1_0.cmd_draw_indirect)(
2192 command_buffer,
2193 buffer,
2194 offset,
2195 draw_count,
2196 stride,
2197 );
2198 }
2199 }
2200
2201 #[inline]
2203 pub unsafe fn cmd_dispatch(
2204 &self,
2205 command_buffer: vk::CommandBuffer,
2206 group_count_x: u32,
2207 group_count_y: u32,
2208 group_count_z: u32,
2209 ) {
2210 unsafe {
2211 (self.device_fn_1_0.cmd_dispatch)(
2212 command_buffer,
2213 group_count_x,
2214 group_count_y,
2215 group_count_z,
2216 );
2217 }
2218 }
2219
2220 #[inline]
2222 pub unsafe fn cmd_dispatch_indirect(
2223 &self,
2224 command_buffer: vk::CommandBuffer,
2225 buffer: vk::Buffer,
2226 offset: vk::DeviceSize,
2227 ) {
2228 unsafe {
2229 (self.device_fn_1_0.cmd_dispatch_indirect)(command_buffer, buffer, offset);
2230 }
2231 }
2232
2233 #[inline]
2235 pub unsafe fn cmd_set_viewport(
2236 &self,
2237 command_buffer: vk::CommandBuffer,
2238 first_viewport: u32,
2239 viewports: &[vk::Viewport],
2240 ) {
2241 unsafe {
2242 (self.device_fn_1_0.cmd_set_viewport)(
2243 command_buffer,
2244 first_viewport,
2245 viewports.len() as u32,
2246 viewports.as_ptr(),
2247 );
2248 }
2249 }
2250
2251 #[inline]
2253 pub unsafe fn cmd_set_depth_bias(
2254 &self,
2255 command_buffer: vk::CommandBuffer,
2256 constant_factor: f32,
2257 clamp: f32,
2258 slope_factor: f32,
2259 ) {
2260 unsafe {
2261 (self.device_fn_1_0.cmd_set_depth_bias)(
2262 command_buffer,
2263 constant_factor,
2264 clamp,
2265 slope_factor,
2266 );
2267 }
2268 }
2269
2270 #[inline]
2272 pub unsafe fn cmd_set_blend_constants(
2273 &self,
2274 command_buffer: vk::CommandBuffer,
2275 blend_constants: &[f32; 4],
2276 ) {
2277 unsafe {
2278 (self.device_fn_1_0.cmd_set_blend_constants)(command_buffer, blend_constants);
2279 }
2280 }
2281
2282 #[inline]
2284 pub unsafe fn cmd_set_depth_bounds(
2285 &self,
2286 command_buffer: vk::CommandBuffer,
2287 min_depth_bounds: f32,
2288 max_depth_bounds: f32,
2289 ) {
2290 unsafe {
2291 (self.device_fn_1_0.cmd_set_depth_bounds)(
2292 command_buffer,
2293 min_depth_bounds,
2294 max_depth_bounds,
2295 );
2296 }
2297 }
2298
2299 #[inline]
2301 pub unsafe fn cmd_set_stencil_compare_mask(
2302 &self,
2303 command_buffer: vk::CommandBuffer,
2304 face_mask: vk::StencilFaceFlags,
2305 compare_mask: u32,
2306 ) {
2307 unsafe {
2308 (self.device_fn_1_0.cmd_set_stencil_compare_mask)(
2309 command_buffer,
2310 face_mask,
2311 compare_mask,
2312 );
2313 }
2314 }
2315
2316 #[inline]
2318 pub unsafe fn cmd_set_stencil_write_mask(
2319 &self,
2320 command_buffer: vk::CommandBuffer,
2321 face_mask: vk::StencilFaceFlags,
2322 write_mask: u32,
2323 ) {
2324 unsafe {
2325 (self.device_fn_1_0.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
2326 }
2327 }
2328
2329 #[inline]
2331 pub unsafe fn cmd_set_stencil_reference(
2332 &self,
2333 command_buffer: vk::CommandBuffer,
2334 face_mask: vk::StencilFaceFlags,
2335 reference: u32,
2336 ) {
2337 unsafe {
2338 (self.device_fn_1_0.cmd_set_stencil_reference)(command_buffer, face_mask, reference);
2339 }
2340 }
2341
2342 #[inline]
2344 pub unsafe fn get_query_pool_results<T>(
2345 &self,
2346 query_pool: vk::QueryPool,
2347 first_query: u32,
2348 data: &mut [T],
2349 flags: vk::QueryResultFlags,
2350 ) -> VkResult<()> {
2351 unsafe {
2352 let data_size = size_of_val(data);
2353 (self.device_fn_1_0.get_query_pool_results)(
2354 self.handle(),
2355 query_pool,
2356 first_query,
2357 data.len() as u32,
2358 data_size,
2359 data.as_mut_ptr().cast(),
2360 size_of::<T>() as _,
2361 flags,
2362 )
2363 .result()
2364 }
2365 }
2366
2367 #[inline]
2369 pub unsafe fn cmd_begin_query(
2370 &self,
2371 command_buffer: vk::CommandBuffer,
2372 query_pool: vk::QueryPool,
2373 query: u32,
2374 flags: vk::QueryControlFlags,
2375 ) {
2376 unsafe {
2377 (self.device_fn_1_0.cmd_begin_query)(command_buffer, query_pool, query, flags);
2378 }
2379 }
2380
2381 #[inline]
2383 pub unsafe fn cmd_end_query(
2384 &self,
2385 command_buffer: vk::CommandBuffer,
2386 query_pool: vk::QueryPool,
2387 query: u32,
2388 ) {
2389 unsafe {
2390 (self.device_fn_1_0.cmd_end_query)(command_buffer, query_pool, query);
2391 }
2392 }
2393
2394 #[inline]
2396 pub unsafe fn cmd_reset_query_pool(
2397 &self,
2398 command_buffer: vk::CommandBuffer,
2399 pool: vk::QueryPool,
2400 first_query: u32,
2401 query_count: u32,
2402 ) {
2403 unsafe {
2404 (self.device_fn_1_0.cmd_reset_query_pool)(
2405 command_buffer,
2406 pool,
2407 first_query,
2408 query_count,
2409 );
2410 }
2411 }
2412
2413 #[inline]
2415 pub unsafe fn cmd_write_timestamp(
2416 &self,
2417 command_buffer: vk::CommandBuffer,
2418 pipeline_stage: vk::PipelineStageFlags,
2419 query_pool: vk::QueryPool,
2420 query: u32,
2421 ) {
2422 unsafe {
2423 (self.device_fn_1_0.cmd_write_timestamp)(
2424 command_buffer,
2425 pipeline_stage,
2426 query_pool,
2427 query,
2428 );
2429 }
2430 }
2431
2432 #[inline]
2434 pub unsafe fn create_semaphore(
2435 &self,
2436 create_info: &vk::SemaphoreCreateInfo<'_>,
2437 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2438 ) -> VkResult<vk::Semaphore> {
2439 unsafe {
2440 let mut semaphore = mem::MaybeUninit::uninit();
2441 (self.device_fn_1_0.create_semaphore)(
2442 self.handle(),
2443 create_info,
2444 allocation_callbacks.as_raw_ptr(),
2445 semaphore.as_mut_ptr(),
2446 )
2447 .assume_init_on_success(semaphore)
2448 }
2449 }
2450
2451 #[inline]
2457 pub unsafe fn create_graphics_pipelines(
2458 &self,
2459 pipeline_cache: vk::PipelineCache,
2460 create_infos: &[vk::GraphicsPipelineCreateInfo<'_>],
2461 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2462 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
2463 unsafe {
2464 let mut pipelines = Vec::with_capacity(create_infos.len());
2465 let err_code = (self.device_fn_1_0.create_graphics_pipelines)(
2466 self.handle(),
2467 pipeline_cache,
2468 create_infos.len() as u32,
2469 create_infos.as_ptr(),
2470 allocation_callbacks.as_raw_ptr(),
2471 pipelines.as_mut_ptr(),
2472 );
2473 pipelines.set_len(create_infos.len());
2474 match err_code {
2475 vk::Result::SUCCESS => Ok(pipelines),
2476 _ => Err((pipelines, err_code)),
2477 }
2478 }
2479 }
2480
2481 #[inline]
2487 pub unsafe fn create_compute_pipelines(
2488 &self,
2489 pipeline_cache: vk::PipelineCache,
2490 create_infos: &[vk::ComputePipelineCreateInfo<'_>],
2491 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2492 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
2493 unsafe {
2494 let mut pipelines = Vec::with_capacity(create_infos.len());
2495 let err_code = (self.device_fn_1_0.create_compute_pipelines)(
2496 self.handle(),
2497 pipeline_cache,
2498 create_infos.len() as u32,
2499 create_infos.as_ptr(),
2500 allocation_callbacks.as_raw_ptr(),
2501 pipelines.as_mut_ptr(),
2502 );
2503 pipelines.set_len(create_infos.len());
2504 match err_code {
2505 vk::Result::SUCCESS => Ok(pipelines),
2506 _ => Err((pipelines, err_code)),
2507 }
2508 }
2509 }
2510
2511 #[inline]
2513 pub unsafe fn create_buffer(
2514 &self,
2515 create_info: &vk::BufferCreateInfo<'_>,
2516 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2517 ) -> VkResult<vk::Buffer> {
2518 unsafe {
2519 let mut buffer = mem::MaybeUninit::uninit();
2520 (self.device_fn_1_0.create_buffer)(
2521 self.handle(),
2522 create_info,
2523 allocation_callbacks.as_raw_ptr(),
2524 buffer.as_mut_ptr(),
2525 )
2526 .assume_init_on_success(buffer)
2527 }
2528 }
2529
2530 #[inline]
2532 pub unsafe fn create_pipeline_layout(
2533 &self,
2534 create_info: &vk::PipelineLayoutCreateInfo<'_>,
2535 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2536 ) -> VkResult<vk::PipelineLayout> {
2537 unsafe {
2538 let mut pipeline_layout = mem::MaybeUninit::uninit();
2539 (self.device_fn_1_0.create_pipeline_layout)(
2540 self.handle(),
2541 create_info,
2542 allocation_callbacks.as_raw_ptr(),
2543 pipeline_layout.as_mut_ptr(),
2544 )
2545 .assume_init_on_success(pipeline_layout)
2546 }
2547 }
2548
2549 #[inline]
2551 pub unsafe fn create_pipeline_cache(
2552 &self,
2553 create_info: &vk::PipelineCacheCreateInfo<'_>,
2554 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2555 ) -> VkResult<vk::PipelineCache> {
2556 unsafe {
2557 let mut pipeline_cache = mem::MaybeUninit::uninit();
2558 (self.device_fn_1_0.create_pipeline_cache)(
2559 self.handle(),
2560 create_info,
2561 allocation_callbacks.as_raw_ptr(),
2562 pipeline_cache.as_mut_ptr(),
2563 )
2564 .assume_init_on_success(pipeline_cache)
2565 }
2566 }
2567
2568 #[inline]
2570 pub unsafe fn get_pipeline_cache_data(
2571 &self,
2572 pipeline_cache: vk::PipelineCache,
2573 ) -> VkResult<Vec<u8>> {
2574 unsafe {
2575 read_into_uninitialized_vector(|count, data: *mut u8| {
2576 (self.device_fn_1_0.get_pipeline_cache_data)(
2577 self.handle(),
2578 pipeline_cache,
2579 count,
2580 data.cast(),
2581 )
2582 })
2583 }
2584 }
2585
2586 #[inline]
2588 pub unsafe fn merge_pipeline_caches(
2589 &self,
2590 dst_cache: vk::PipelineCache,
2591 src_caches: &[vk::PipelineCache],
2592 ) -> VkResult<()> {
2593 unsafe {
2594 (self.device_fn_1_0.merge_pipeline_caches)(
2595 self.handle(),
2596 dst_cache,
2597 src_caches.len() as u32,
2598 src_caches.as_ptr(),
2599 )
2600 .result()
2601 }
2602 }
2603
2604 #[inline]
2606 pub unsafe fn map_memory(
2607 &self,
2608 memory: vk::DeviceMemory,
2609 offset: vk::DeviceSize,
2610 size: vk::DeviceSize,
2611 flags: vk::MemoryMapFlags,
2612 ) -> VkResult<*mut ffi::c_void> {
2613 unsafe {
2614 let mut data = mem::MaybeUninit::uninit();
2615 (self.device_fn_1_0.map_memory)(
2616 self.handle(),
2617 memory,
2618 offset,
2619 size,
2620 flags,
2621 data.as_mut_ptr(),
2622 )
2623 .assume_init_on_success(data)
2624 }
2625 }
2626
2627 #[inline]
2629 pub unsafe fn unmap_memory(&self, memory: vk::DeviceMemory) {
2630 unsafe {
2631 (self.device_fn_1_0.unmap_memory)(self.handle(), memory);
2632 }
2633 }
2634
2635 #[inline]
2637 pub unsafe fn invalidate_mapped_memory_ranges(
2638 &self,
2639 ranges: &[vk::MappedMemoryRange<'_>],
2640 ) -> VkResult<()> {
2641 unsafe {
2642 (self.device_fn_1_0.invalidate_mapped_memory_ranges)(
2643 self.handle(),
2644 ranges.len() as u32,
2645 ranges.as_ptr(),
2646 )
2647 .result()
2648 }
2649 }
2650
2651 #[inline]
2653 pub unsafe fn flush_mapped_memory_ranges(
2654 &self,
2655 ranges: &[vk::MappedMemoryRange<'_>],
2656 ) -> VkResult<()> {
2657 unsafe {
2658 (self.device_fn_1_0.flush_mapped_memory_ranges)(
2659 self.handle(),
2660 ranges.len() as u32,
2661 ranges.as_ptr(),
2662 )
2663 .result()
2664 }
2665 }
2666
2667 #[inline]
2669 pub unsafe fn create_framebuffer(
2670 &self,
2671 create_info: &vk::FramebufferCreateInfo<'_>,
2672 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2673 ) -> VkResult<vk::Framebuffer> {
2674 unsafe {
2675 let mut framebuffer = mem::MaybeUninit::uninit();
2676 (self.device_fn_1_0.create_framebuffer)(
2677 self.handle(),
2678 create_info,
2679 allocation_callbacks.as_raw_ptr(),
2680 framebuffer.as_mut_ptr(),
2681 )
2682 .assume_init_on_success(framebuffer)
2683 }
2684 }
2685
2686 #[inline]
2688 pub unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> vk::Queue {
2689 unsafe {
2690 let mut queue = mem::MaybeUninit::uninit();
2691 (self.device_fn_1_0.get_device_queue)(
2692 self.handle(),
2693 queue_family_index,
2694 queue_index,
2695 queue.as_mut_ptr(),
2696 );
2697 queue.assume_init()
2698 }
2699 }
2700
2701 #[inline]
2703 pub unsafe fn cmd_pipeline_barrier(
2704 &self,
2705 command_buffer: vk::CommandBuffer,
2706 src_stage_mask: vk::PipelineStageFlags,
2707 dst_stage_mask: vk::PipelineStageFlags,
2708 dependency_flags: vk::DependencyFlags,
2709 memory_barriers: &[vk::MemoryBarrier<'_>],
2710 buffer_memory_barriers: &[vk::BufferMemoryBarrier<'_>],
2711 image_memory_barriers: &[vk::ImageMemoryBarrier<'_>],
2712 ) {
2713 unsafe {
2714 (self.device_fn_1_0.cmd_pipeline_barrier)(
2715 command_buffer,
2716 src_stage_mask,
2717 dst_stage_mask,
2718 dependency_flags,
2719 memory_barriers.len() as u32,
2720 memory_barriers.as_ptr(),
2721 buffer_memory_barriers.len() as u32,
2722 buffer_memory_barriers.as_ptr(),
2723 image_memory_barriers.len() as u32,
2724 image_memory_barriers.as_ptr(),
2725 );
2726 }
2727 }
2728
2729 #[inline]
2731 pub unsafe fn create_render_pass(
2732 &self,
2733 create_info: &vk::RenderPassCreateInfo<'_>,
2734 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2735 ) -> VkResult<vk::RenderPass> {
2736 unsafe {
2737 let mut renderpass = mem::MaybeUninit::uninit();
2738 (self.device_fn_1_0.create_render_pass)(
2739 self.handle(),
2740 create_info,
2741 allocation_callbacks.as_raw_ptr(),
2742 renderpass.as_mut_ptr(),
2743 )
2744 .assume_init_on_success(renderpass)
2745 }
2746 }
2747
2748 #[inline]
2750 pub unsafe fn begin_command_buffer(
2751 &self,
2752 command_buffer: vk::CommandBuffer,
2753 begin_info: &vk::CommandBufferBeginInfo<'_>,
2754 ) -> VkResult<()> {
2755 unsafe { (self.device_fn_1_0.begin_command_buffer)(command_buffer, begin_info).result() }
2756 }
2757
2758 #[inline]
2760 pub unsafe fn end_command_buffer(&self, command_buffer: vk::CommandBuffer) -> VkResult<()> {
2761 unsafe { (self.device_fn_1_0.end_command_buffer)(command_buffer).result() }
2762 }
2763
2764 #[inline]
2766 pub unsafe fn wait_for_fences(
2767 &self,
2768 fences: &[vk::Fence],
2769 wait_all: bool,
2770 timeout: u64,
2771 ) -> VkResult<()> {
2772 unsafe {
2773 (self.device_fn_1_0.wait_for_fences)(
2774 self.handle(),
2775 fences.len() as u32,
2776 fences.as_ptr(),
2777 wait_all as u32,
2778 timeout,
2779 )
2780 .result()
2781 }
2782 }
2783
2784 #[inline]
2786 pub unsafe fn get_fence_status(&self, fence: vk::Fence) -> VkResult<bool> {
2787 unsafe {
2788 let err_code = (self.device_fn_1_0.get_fence_status)(self.handle(), fence);
2789 match err_code {
2790 vk::Result::SUCCESS => Ok(true),
2791 vk::Result::NOT_READY => Ok(false),
2792 _ => Err(err_code),
2793 }
2794 }
2795 }
2796
2797 #[inline]
2799 pub unsafe fn queue_wait_idle(&self, queue: vk::Queue) -> VkResult<()> {
2800 unsafe { (self.device_fn_1_0.queue_wait_idle)(queue).result() }
2801 }
2802
2803 #[inline]
2805 pub unsafe fn queue_submit(
2806 &self,
2807 queue: vk::Queue,
2808 submits: &[vk::SubmitInfo<'_>],
2809 fence: vk::Fence,
2810 ) -> VkResult<()> {
2811 unsafe {
2812 (self.device_fn_1_0.queue_submit)(queue, submits.len() as u32, submits.as_ptr(), fence)
2813 .result()
2814 }
2815 }
2816
2817 #[inline]
2819 pub unsafe fn queue_bind_sparse(
2820 &self,
2821 queue: vk::Queue,
2822 bind_info: &[vk::BindSparseInfo<'_>],
2823 fence: vk::Fence,
2824 ) -> VkResult<()> {
2825 unsafe {
2826 (self.device_fn_1_0.queue_bind_sparse)(
2827 queue,
2828 bind_info.len() as u32,
2829 bind_info.as_ptr(),
2830 fence,
2831 )
2832 .result()
2833 }
2834 }
2835
2836 #[inline]
2838 pub unsafe fn create_buffer_view(
2839 &self,
2840 create_info: &vk::BufferViewCreateInfo<'_>,
2841 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2842 ) -> VkResult<vk::BufferView> {
2843 unsafe {
2844 let mut buffer_view = mem::MaybeUninit::uninit();
2845 (self.device_fn_1_0.create_buffer_view)(
2846 self.handle(),
2847 create_info,
2848 allocation_callbacks.as_raw_ptr(),
2849 buffer_view.as_mut_ptr(),
2850 )
2851 .assume_init_on_success(buffer_view)
2852 }
2853 }
2854
2855 #[inline]
2857 pub unsafe fn destroy_buffer_view(
2858 &self,
2859 buffer_view: vk::BufferView,
2860 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2861 ) {
2862 unsafe {
2863 (self.device_fn_1_0.destroy_buffer_view)(
2864 self.handle(),
2865 buffer_view,
2866 allocation_callbacks.as_raw_ptr(),
2867 );
2868 }
2869 }
2870
2871 #[inline]
2873 pub unsafe fn create_image_view(
2874 &self,
2875 create_info: &vk::ImageViewCreateInfo<'_>,
2876 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2877 ) -> VkResult<vk::ImageView> {
2878 unsafe {
2879 let mut image_view = mem::MaybeUninit::uninit();
2880 (self.device_fn_1_0.create_image_view)(
2881 self.handle(),
2882 create_info,
2883 allocation_callbacks.as_raw_ptr(),
2884 image_view.as_mut_ptr(),
2885 )
2886 .assume_init_on_success(image_view)
2887 }
2888 }
2889
2890 #[inline]
2892 pub unsafe fn allocate_command_buffers(
2893 &self,
2894 allocate_info: &vk::CommandBufferAllocateInfo<'_>,
2895 ) -> VkResult<Vec<vk::CommandBuffer>> {
2896 unsafe {
2897 let mut buffers = Vec::with_capacity(allocate_info.command_buffer_count as usize);
2898 (self.device_fn_1_0.allocate_command_buffers)(
2899 self.handle(),
2900 allocate_info,
2901 buffers.as_mut_ptr(),
2902 )
2903 .set_vec_len_on_success(buffers, allocate_info.command_buffer_count as usize)
2904 }
2905 }
2906
2907 #[inline]
2909 pub unsafe fn create_command_pool(
2910 &self,
2911 create_info: &vk::CommandPoolCreateInfo<'_>,
2912 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2913 ) -> VkResult<vk::CommandPool> {
2914 unsafe {
2915 let mut pool = mem::MaybeUninit::uninit();
2916 (self.device_fn_1_0.create_command_pool)(
2917 self.handle(),
2918 create_info,
2919 allocation_callbacks.as_raw_ptr(),
2920 pool.as_mut_ptr(),
2921 )
2922 .assume_init_on_success(pool)
2923 }
2924 }
2925
2926 #[inline]
2928 pub unsafe fn create_query_pool(
2929 &self,
2930 create_info: &vk::QueryPoolCreateInfo<'_>,
2931 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2932 ) -> VkResult<vk::QueryPool> {
2933 unsafe {
2934 let mut pool = mem::MaybeUninit::uninit();
2935 (self.device_fn_1_0.create_query_pool)(
2936 self.handle(),
2937 create_info,
2938 allocation_callbacks.as_raw_ptr(),
2939 pool.as_mut_ptr(),
2940 )
2941 .assume_init_on_success(pool)
2942 }
2943 }
2944
2945 #[inline]
2947 pub unsafe fn create_image(
2948 &self,
2949 create_info: &vk::ImageCreateInfo<'_>,
2950 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2951 ) -> VkResult<vk::Image> {
2952 unsafe {
2953 let mut image = mem::MaybeUninit::uninit();
2954 (self.device_fn_1_0.create_image)(
2955 self.handle(),
2956 create_info,
2957 allocation_callbacks.as_raw_ptr(),
2958 image.as_mut_ptr(),
2959 )
2960 .assume_init_on_success(image)
2961 }
2962 }
2963
2964 #[inline]
2966 pub unsafe fn get_image_subresource_layout(
2967 &self,
2968 image: vk::Image,
2969 subresource: vk::ImageSubresource,
2970 ) -> vk::SubresourceLayout {
2971 unsafe {
2972 let mut layout = mem::MaybeUninit::uninit();
2973 (self.device_fn_1_0.get_image_subresource_layout)(
2974 self.handle(),
2975 image,
2976 &subresource,
2977 layout.as_mut_ptr(),
2978 );
2979 layout.assume_init()
2980 }
2981 }
2982
2983 #[inline]
2985 pub unsafe fn get_image_memory_requirements(&self, image: vk::Image) -> vk::MemoryRequirements {
2986 unsafe {
2987 let mut mem_req = mem::MaybeUninit::uninit();
2988 (self.device_fn_1_0.get_image_memory_requirements)(
2989 self.handle(),
2990 image,
2991 mem_req.as_mut_ptr(),
2992 );
2993 mem_req.assume_init()
2994 }
2995 }
2996
2997 #[inline]
2999 pub unsafe fn get_buffer_memory_requirements(
3000 &self,
3001 buffer: vk::Buffer,
3002 ) -> vk::MemoryRequirements {
3003 unsafe {
3004 let mut mem_req = mem::MaybeUninit::uninit();
3005 (self.device_fn_1_0.get_buffer_memory_requirements)(
3006 self.handle(),
3007 buffer,
3008 mem_req.as_mut_ptr(),
3009 );
3010 mem_req.assume_init()
3011 }
3012 }
3013
3014 #[inline]
3016 pub unsafe fn allocate_memory(
3017 &self,
3018 allocate_info: &vk::MemoryAllocateInfo<'_>,
3019 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
3020 ) -> VkResult<vk::DeviceMemory> {
3021 unsafe {
3022 let mut memory = mem::MaybeUninit::uninit();
3023 (self.device_fn_1_0.allocate_memory)(
3024 self.handle(),
3025 allocate_info,
3026 allocation_callbacks.as_raw_ptr(),
3027 memory.as_mut_ptr(),
3028 )
3029 .assume_init_on_success(memory)
3030 }
3031 }
3032
3033 #[inline]
3035 pub unsafe fn create_shader_module(
3036 &self,
3037 create_info: &vk::ShaderModuleCreateInfo<'_>,
3038 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
3039 ) -> VkResult<vk::ShaderModule> {
3040 unsafe {
3041 let mut shader = mem::MaybeUninit::uninit();
3042 (self.device_fn_1_0.create_shader_module)(
3043 self.handle(),
3044 create_info,
3045 allocation_callbacks.as_raw_ptr(),
3046 shader.as_mut_ptr(),
3047 )
3048 .assume_init_on_success(shader)
3049 }
3050 }
3051
3052 #[inline]
3054 pub unsafe fn create_fence(
3055 &self,
3056 create_info: &vk::FenceCreateInfo<'_>,
3057 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
3058 ) -> VkResult<vk::Fence> {
3059 unsafe {
3060 let mut fence = mem::MaybeUninit::uninit();
3061 (self.device_fn_1_0.create_fence)(
3062 self.handle(),
3063 create_info,
3064 allocation_callbacks.as_raw_ptr(),
3065 fence.as_mut_ptr(),
3066 )
3067 .assume_init_on_success(fence)
3068 }
3069 }
3070
3071 #[inline]
3073 pub unsafe fn bind_buffer_memory(
3074 &self,
3075 buffer: vk::Buffer,
3076 device_memory: vk::DeviceMemory,
3077 offset: vk::DeviceSize,
3078 ) -> VkResult<()> {
3079 unsafe {
3080 (self.device_fn_1_0.bind_buffer_memory)(self.handle(), buffer, device_memory, offset)
3081 .result()
3082 }
3083 }
3084
3085 #[inline]
3087 pub unsafe fn bind_image_memory(
3088 &self,
3089 image: vk::Image,
3090 device_memory: vk::DeviceMemory,
3091 offset: vk::DeviceSize,
3092 ) -> VkResult<()> {
3093 unsafe {
3094 (self.device_fn_1_0.bind_image_memory)(self.handle(), image, device_memory, offset)
3095 .result()
3096 }
3097 }
3098
3099 #[inline]
3101 pub unsafe fn get_render_area_granularity(&self, render_pass: vk::RenderPass) -> vk::Extent2D {
3102 unsafe {
3103 let mut granularity = mem::MaybeUninit::uninit();
3104 (self.device_fn_1_0.get_render_area_granularity)(
3105 self.handle(),
3106 render_pass,
3107 granularity.as_mut_ptr(),
3108 );
3109 granularity.assume_init()
3110 }
3111 }
3112
3113 #[inline]
3115 pub unsafe fn get_device_memory_commitment(&self, memory: vk::DeviceMemory) -> vk::DeviceSize {
3116 unsafe {
3117 let mut committed_memory_in_bytes = mem::MaybeUninit::uninit();
3118 (self.device_fn_1_0.get_device_memory_commitment)(
3119 self.handle(),
3120 memory,
3121 committed_memory_in_bytes.as_mut_ptr(),
3122 );
3123 committed_memory_in_bytes.assume_init()
3124 }
3125 }
3126
3127 #[inline]
3129 pub unsafe fn get_image_sparse_memory_requirements(
3130 &self,
3131 image: vk::Image,
3132 ) -> Vec<vk::SparseImageMemoryRequirements> {
3133 unsafe {
3134 read_into_uninitialized_vector(|count, data| {
3135 (self.device_fn_1_0.get_image_sparse_memory_requirements)(
3136 self.handle(),
3137 image,
3138 count,
3139 data,
3140 );
3141 vk::Result::SUCCESS
3142 })
3143 .unwrap()
3145 }
3146 }
3147}