1#![allow(unused_imports)]
2use crate::vk::*;
3use core::ffi::*;
4#[doc = "Extensions tagged AMD"]
5pub mod amd {
6 #[doc = "VK_AMD_rasterization_order"]
7 pub mod rasterization_order {
8 use super::super::*;
9 pub use {
10 crate::vk::AMD_RASTERIZATION_ORDER_EXTENSION_NAME as NAME,
11 crate::vk::AMD_RASTERIZATION_ORDER_SPEC_VERSION as SPEC_VERSION,
12 };
13 }
14 #[doc = "VK_AMD_shader_trinary_minmax"]
15 pub mod shader_trinary_minmax {
16 use super::super::*;
17 pub use {
18 crate::vk::AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME as NAME,
19 crate::vk::AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION as SPEC_VERSION,
20 };
21 }
22 #[doc = "VK_AMD_shader_explicit_vertex_parameter"]
23 pub mod shader_explicit_vertex_parameter {
24 use super::super::*;
25 pub use {
26 crate::vk::AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME as NAME,
27 crate::vk::AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION as SPEC_VERSION,
28 };
29 }
30 #[doc = "VK_AMD_gcn_shader"]
31 pub mod gcn_shader {
32 use super::super::*;
33 pub use {
34 crate::vk::AMD_GCN_SHADER_EXTENSION_NAME as NAME,
35 crate::vk::AMD_GCN_SHADER_SPEC_VERSION as SPEC_VERSION,
36 };
37 }
38 #[doc = "VK_AMD_draw_indirect_count"]
39 pub mod draw_indirect_count {
40 use super::super::*;
41 pub use {
42 crate::vk::AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME as NAME,
43 crate::vk::AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION as SPEC_VERSION,
44 };
45 #[doc = "VK_AMD_draw_indirect_count device-level functions"]
46 #[derive(Clone)]
47 pub struct Device {
48 pub(crate) fp: DeviceFn,
49 pub(crate) handle: crate::vk::Device,
50 }
51 impl Device {
52 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
53 let handle = device.handle();
54 let fp = DeviceFn::load(|name| unsafe {
55 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
56 instance.get_device_proc_addr(handle, name.as_ptr()),
57 )
58 });
59 Self { handle, fp }
60 }
61 #[inline]
62 pub fn fp(&self) -> &DeviceFn {
63 &self.fp
64 }
65 #[inline]
66 pub fn device(&self) -> crate::vk::Device {
67 self.handle
68 }
69 }
70 #[derive(Clone)]
71 #[doc = "Raw VK_AMD_draw_indirect_count device-level function pointers"]
72 pub struct DeviceFn {
73 pub cmd_draw_indirect_count_amd: PFN_vkCmdDrawIndirectCount,
74 pub cmd_draw_indexed_indirect_count_amd: PFN_vkCmdDrawIndexedIndirectCount,
75 }
76 unsafe impl Send for DeviceFn {}
77 unsafe impl Sync for DeviceFn {}
78 impl DeviceFn {
79 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
80 Self::load_erased(&mut f)
81 }
82 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
83 Self {
84 cmd_draw_indirect_count_amd: unsafe {
85 unsafe extern "system" fn cmd_draw_indirect_count_amd(
86 _command_buffer: CommandBuffer,
87 _buffer: Buffer,
88 _offset: DeviceSize,
89 _count_buffer: Buffer,
90 _count_buffer_offset: DeviceSize,
91 _max_draw_count: u32,
92 _stride: u32,
93 ) {
94 panic!(concat!(
95 "Unable to load ",
96 stringify!(cmd_draw_indirect_count_amd)
97 ))
98 }
99 let val = _f(c"vkCmdDrawIndirectCountAMD");
100 if val.is_null() {
101 cmd_draw_indirect_count_amd
102 } else {
103 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawIndirectCount>(val)
104 }
105 },
106 cmd_draw_indexed_indirect_count_amd: unsafe {
107 unsafe extern "system" fn cmd_draw_indexed_indirect_count_amd(
108 _command_buffer: CommandBuffer,
109 _buffer: Buffer,
110 _offset: DeviceSize,
111 _count_buffer: Buffer,
112 _count_buffer_offset: DeviceSize,
113 _max_draw_count: u32,
114 _stride: u32,
115 ) {
116 panic!(concat!(
117 "Unable to load ",
118 stringify!(cmd_draw_indexed_indirect_count_amd)
119 ))
120 }
121 let val = _f(c"vkCmdDrawIndexedIndirectCountAMD");
122 if val.is_null() {
123 cmd_draw_indexed_indirect_count_amd
124 } else {
125 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawIndexedIndirectCount>(
126 val,
127 )
128 }
129 },
130 }
131 }
132 }
133 }
134 #[doc = "VK_AMD_negative_viewport_height"]
135 pub mod negative_viewport_height {
136 use super::super::*;
137 pub use {
138 crate::vk::AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME as NAME,
139 crate::vk::AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION as SPEC_VERSION,
140 };
141 }
142 #[doc = "VK_AMD_gpu_shader_half_float"]
143 pub mod gpu_shader_half_float {
144 use super::super::*;
145 pub use {
146 crate::vk::AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME as NAME,
147 crate::vk::AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION as SPEC_VERSION,
148 };
149 }
150 #[doc = "VK_AMD_shader_ballot"]
151 pub mod shader_ballot {
152 use super::super::*;
153 pub use {
154 crate::vk::AMD_SHADER_BALLOT_EXTENSION_NAME as NAME,
155 crate::vk::AMD_SHADER_BALLOT_SPEC_VERSION as SPEC_VERSION,
156 };
157 }
158 #[doc = "VK_AMD_texture_gather_bias_lod"]
159 pub mod texture_gather_bias_lod {
160 use super::super::*;
161 pub use {
162 crate::vk::AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME as NAME,
163 crate::vk::AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION as SPEC_VERSION,
164 };
165 }
166 #[doc = "VK_AMD_shader_info"]
167 pub mod shader_info {
168 use super::super::*;
169 pub use {
170 crate::vk::AMD_SHADER_INFO_EXTENSION_NAME as NAME,
171 crate::vk::AMD_SHADER_INFO_SPEC_VERSION as SPEC_VERSION,
172 };
173 #[doc = "VK_AMD_shader_info device-level functions"]
174 #[derive(Clone)]
175 pub struct Device {
176 pub(crate) fp: DeviceFn,
177 pub(crate) handle: crate::vk::Device,
178 }
179 impl Device {
180 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
181 let handle = device.handle();
182 let fp = DeviceFn::load(|name| unsafe {
183 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
184 instance.get_device_proc_addr(handle, name.as_ptr()),
185 )
186 });
187 Self { handle, fp }
188 }
189 #[inline]
190 pub fn fp(&self) -> &DeviceFn {
191 &self.fp
192 }
193 #[inline]
194 pub fn device(&self) -> crate::vk::Device {
195 self.handle
196 }
197 }
198 #[derive(Clone)]
199 #[doc = "Raw VK_AMD_shader_info device-level function pointers"]
200 pub struct DeviceFn {
201 pub get_shader_info_amd: PFN_vkGetShaderInfoAMD,
202 }
203 unsafe impl Send for DeviceFn {}
204 unsafe impl Sync for DeviceFn {}
205 impl DeviceFn {
206 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
207 Self::load_erased(&mut f)
208 }
209 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
210 Self {
211 get_shader_info_amd: unsafe {
212 unsafe extern "system" fn get_shader_info_amd(
213 _device: crate::vk::Device,
214 _pipeline: Pipeline,
215 _shader_stage: ShaderStageFlags,
216 _info_type: ShaderInfoTypeAMD,
217 _p_info_size: *mut usize,
218 _p_info: *mut c_void,
219 ) -> Result {
220 panic!(concat!("Unable to load ", stringify!(get_shader_info_amd)))
221 }
222 let val = _f(c"vkGetShaderInfoAMD");
223 if val.is_null() {
224 get_shader_info_amd
225 } else {
226 ::core::mem::transmute::<*const c_void, PFN_vkGetShaderInfoAMD>(val)
227 }
228 },
229 }
230 }
231 }
232 }
233 #[doc = "VK_AMD_shader_image_load_store_lod"]
234 pub mod shader_image_load_store_lod {
235 use super::super::*;
236 pub use {
237 crate::vk::AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME as NAME,
238 crate::vk::AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION as SPEC_VERSION,
239 };
240 }
241 #[doc = "VK_AMD_gpu_shader_int16"]
242 pub mod gpu_shader_int16 {
243 use super::super::*;
244 pub use {
245 crate::vk::AMD_GPU_SHADER_INT16_EXTENSION_NAME as NAME,
246 crate::vk::AMD_GPU_SHADER_INT16_SPEC_VERSION as SPEC_VERSION,
247 };
248 }
249 #[doc = "VK_AMD_mixed_attachment_samples"]
250 pub mod mixed_attachment_samples {
251 use super::super::*;
252 pub use {
253 crate::vk::AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME as NAME,
254 crate::vk::AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION as SPEC_VERSION,
255 };
256 }
257 #[doc = "VK_AMD_shader_fragment_mask"]
258 pub mod shader_fragment_mask {
259 use super::super::*;
260 pub use {
261 crate::vk::AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME as NAME,
262 crate::vk::AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION as SPEC_VERSION,
263 };
264 }
265 #[doc = "VK_AMD_buffer_marker"]
266 pub mod buffer_marker {
267 use super::super::*;
268 pub use {
269 crate::vk::AMD_BUFFER_MARKER_EXTENSION_NAME as NAME,
270 crate::vk::AMD_BUFFER_MARKER_SPEC_VERSION as SPEC_VERSION,
271 };
272 #[doc = "VK_AMD_buffer_marker device-level functions"]
273 #[derive(Clone)]
274 pub struct Device {
275 pub(crate) fp: DeviceFn,
276 pub(crate) handle: crate::vk::Device,
277 }
278 impl Device {
279 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
280 let handle = device.handle();
281 let fp = DeviceFn::load(|name| unsafe {
282 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
283 instance.get_device_proc_addr(handle, name.as_ptr()),
284 )
285 });
286 Self { handle, fp }
287 }
288 #[inline]
289 pub fn fp(&self) -> &DeviceFn {
290 &self.fp
291 }
292 #[inline]
293 pub fn device(&self) -> crate::vk::Device {
294 self.handle
295 }
296 }
297 #[derive(Clone)]
298 #[doc = "Raw VK_AMD_buffer_marker device-level function pointers"]
299 pub struct DeviceFn {
300 pub cmd_write_buffer_marker_amd: PFN_vkCmdWriteBufferMarkerAMD,
301 pub cmd_write_buffer_marker2_amd: PFN_vkCmdWriteBufferMarker2AMD,
302 }
303 unsafe impl Send for DeviceFn {}
304 unsafe impl Sync for DeviceFn {}
305 impl DeviceFn {
306 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
307 Self::load_erased(&mut f)
308 }
309 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
310 Self {
311 cmd_write_buffer_marker_amd: unsafe {
312 unsafe extern "system" fn cmd_write_buffer_marker_amd(
313 _command_buffer: CommandBuffer,
314 _pipeline_stage: PipelineStageFlags,
315 _dst_buffer: Buffer,
316 _dst_offset: DeviceSize,
317 _marker: u32,
318 ) {
319 panic!(concat!(
320 "Unable to load ",
321 stringify!(cmd_write_buffer_marker_amd)
322 ))
323 }
324 let val = _f(c"vkCmdWriteBufferMarkerAMD");
325 if val.is_null() {
326 cmd_write_buffer_marker_amd
327 } else {
328 ::core::mem::transmute::<*const c_void, PFN_vkCmdWriteBufferMarkerAMD>(
329 val,
330 )
331 }
332 },
333 cmd_write_buffer_marker2_amd: unsafe {
334 unsafe extern "system" fn cmd_write_buffer_marker2_amd(
335 _command_buffer: CommandBuffer,
336 _stage: PipelineStageFlags2,
337 _dst_buffer: Buffer,
338 _dst_offset: DeviceSize,
339 _marker: u32,
340 ) {
341 panic!(concat!(
342 "Unable to load ",
343 stringify!(cmd_write_buffer_marker2_amd)
344 ))
345 }
346 let val = _f(c"vkCmdWriteBufferMarker2AMD");
347 if val.is_null() {
348 cmd_write_buffer_marker2_amd
349 } else {
350 ::core::mem::transmute::<*const c_void, PFN_vkCmdWriteBufferMarker2AMD>(
351 val,
352 )
353 }
354 },
355 }
356 }
357 }
358 }
359 #[doc = "VK_AMD_pipeline_compiler_control"]
360 pub mod pipeline_compiler_control {
361 use super::super::*;
362 pub use {
363 crate::vk::AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME as NAME,
364 crate::vk::AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION as SPEC_VERSION,
365 };
366 }
367 #[doc = "VK_AMD_shader_core_properties"]
368 pub mod shader_core_properties {
369 use super::super::*;
370 pub use {
371 crate::vk::AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME as NAME,
372 crate::vk::AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
373 };
374 }
375 #[doc = "VK_AMD_memory_overallocation_behavior"]
376 pub mod memory_overallocation_behavior {
377 use super::super::*;
378 pub use {
379 crate::vk::AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME as NAME,
380 crate::vk::AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION as SPEC_VERSION,
381 };
382 }
383 #[doc = "VK_AMD_display_native_hdr"]
384 pub mod display_native_hdr {
385 use super::super::*;
386 pub use {
387 crate::vk::AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME as NAME,
388 crate::vk::AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION as SPEC_VERSION,
389 };
390 #[doc = "VK_AMD_display_native_hdr device-level functions"]
391 #[derive(Clone)]
392 pub struct Device {
393 pub(crate) fp: DeviceFn,
394 pub(crate) handle: crate::vk::Device,
395 }
396 impl Device {
397 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
398 let handle = device.handle();
399 let fp = DeviceFn::load(|name| unsafe {
400 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
401 instance.get_device_proc_addr(handle, name.as_ptr()),
402 )
403 });
404 Self { handle, fp }
405 }
406 #[inline]
407 pub fn fp(&self) -> &DeviceFn {
408 &self.fp
409 }
410 #[inline]
411 pub fn device(&self) -> crate::vk::Device {
412 self.handle
413 }
414 }
415 #[derive(Clone)]
416 #[doc = "Raw VK_AMD_display_native_hdr device-level function pointers"]
417 pub struct DeviceFn {
418 pub set_local_dimming_amd: PFN_vkSetLocalDimmingAMD,
419 }
420 unsafe impl Send for DeviceFn {}
421 unsafe impl Sync for DeviceFn {}
422 impl DeviceFn {
423 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
424 Self::load_erased(&mut f)
425 }
426 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
427 Self {
428 set_local_dimming_amd: unsafe {
429 unsafe extern "system" fn set_local_dimming_amd(
430 _device: crate::vk::Device,
431 _swap_chain: SwapchainKHR,
432 _local_dimming_enable: Bool32,
433 ) {
434 panic!(concat!(
435 "Unable to load ",
436 stringify!(set_local_dimming_amd)
437 ))
438 }
439 let val = _f(c"vkSetLocalDimmingAMD");
440 if val.is_null() {
441 set_local_dimming_amd
442 } else {
443 ::core::mem::transmute::<*const c_void, PFN_vkSetLocalDimmingAMD>(val)
444 }
445 },
446 }
447 }
448 }
449 }
450 #[doc = "VK_AMD_shader_core_properties2"]
451 pub mod shader_core_properties2 {
452 use super::super::*;
453 pub use {
454 crate::vk::AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME as NAME,
455 crate::vk::AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION as SPEC_VERSION,
456 };
457 }
458 #[doc = "VK_AMD_device_coherent_memory"]
459 pub mod device_coherent_memory {
460 use super::super::*;
461 pub use {
462 crate::vk::AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME as NAME,
463 crate::vk::AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION as SPEC_VERSION,
464 };
465 }
466 #[doc = "VK_AMD_shader_early_and_late_fragment_tests"]
467 pub mod shader_early_and_late_fragment_tests {
468 use super::super::*;
469 pub use {
470 crate::vk::AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME as NAME,
471 crate::vk::AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION as SPEC_VERSION,
472 };
473 }
474 #[doc = "VK_AMD_anti_lag"]
475 pub mod anti_lag {
476 use super::super::*;
477 pub use {
478 crate::vk::AMD_ANTI_LAG_EXTENSION_NAME as NAME,
479 crate::vk::AMD_ANTI_LAG_SPEC_VERSION as SPEC_VERSION,
480 };
481 #[doc = "VK_AMD_anti_lag device-level functions"]
482 #[derive(Clone)]
483 pub struct Device {
484 pub(crate) fp: DeviceFn,
485 pub(crate) handle: crate::vk::Device,
486 }
487 impl Device {
488 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
489 let handle = device.handle();
490 let fp = DeviceFn::load(|name| unsafe {
491 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
492 instance.get_device_proc_addr(handle, name.as_ptr()),
493 )
494 });
495 Self { handle, fp }
496 }
497 #[inline]
498 pub fn fp(&self) -> &DeviceFn {
499 &self.fp
500 }
501 #[inline]
502 pub fn device(&self) -> crate::vk::Device {
503 self.handle
504 }
505 }
506 #[derive(Clone)]
507 #[doc = "Raw VK_AMD_anti_lag device-level function pointers"]
508 pub struct DeviceFn {
509 pub anti_lag_update_amd: PFN_vkAntiLagUpdateAMD,
510 }
511 unsafe impl Send for DeviceFn {}
512 unsafe impl Sync for DeviceFn {}
513 impl DeviceFn {
514 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
515 Self::load_erased(&mut f)
516 }
517 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
518 Self {
519 anti_lag_update_amd: unsafe {
520 unsafe extern "system" fn anti_lag_update_amd(
521 _device: crate::vk::Device,
522 _p_data: *const AntiLagDataAMD<'_>,
523 ) {
524 panic!(concat!("Unable to load ", stringify!(anti_lag_update_amd)))
525 }
526 let val = _f(c"vkAntiLagUpdateAMD");
527 if val.is_null() {
528 anti_lag_update_amd
529 } else {
530 ::core::mem::transmute::<*const c_void, PFN_vkAntiLagUpdateAMD>(val)
531 }
532 },
533 }
534 }
535 }
536 }
537}
538#[doc = "Extensions tagged AMDX"]
539pub mod amdx {
540 #[doc = "VK_AMDX_shader_enqueue"]
541 pub mod shader_enqueue {
542 use super::super::*;
543 pub use {
544 crate::vk::AMDX_SHADER_ENQUEUE_EXTENSION_NAME as NAME,
545 crate::vk::AMDX_SHADER_ENQUEUE_SPEC_VERSION as SPEC_VERSION,
546 };
547 #[doc = "VK_AMDX_shader_enqueue device-level functions"]
548 #[derive(Clone)]
549 pub struct Device {
550 pub(crate) fp: DeviceFn,
551 pub(crate) handle: crate::vk::Device,
552 }
553 impl Device {
554 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
555 let handle = device.handle();
556 let fp = DeviceFn::load(|name| unsafe {
557 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
558 instance.get_device_proc_addr(handle, name.as_ptr()),
559 )
560 });
561 Self { handle, fp }
562 }
563 #[inline]
564 pub fn fp(&self) -> &DeviceFn {
565 &self.fp
566 }
567 #[inline]
568 pub fn device(&self) -> crate::vk::Device {
569 self.handle
570 }
571 }
572 #[derive(Clone)]
573 #[doc = "Raw VK_AMDX_shader_enqueue device-level function pointers"]
574 pub struct DeviceFn {
575 pub create_execution_graph_pipelines_amdx: PFN_vkCreateExecutionGraphPipelinesAMDX,
576 pub get_execution_graph_pipeline_scratch_size_amdx:
577 PFN_vkGetExecutionGraphPipelineScratchSizeAMDX,
578 pub get_execution_graph_pipeline_node_index_amdx:
579 PFN_vkGetExecutionGraphPipelineNodeIndexAMDX,
580 pub cmd_initialize_graph_scratch_memory_amdx: PFN_vkCmdInitializeGraphScratchMemoryAMDX,
581 pub cmd_dispatch_graph_amdx: PFN_vkCmdDispatchGraphAMDX,
582 pub cmd_dispatch_graph_indirect_amdx: PFN_vkCmdDispatchGraphIndirectAMDX,
583 pub cmd_dispatch_graph_indirect_count_amdx: PFN_vkCmdDispatchGraphIndirectCountAMDX,
584 }
585 unsafe impl Send for DeviceFn {}
586 unsafe impl Sync for DeviceFn {}
587 impl DeviceFn {
588 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
589 Self::load_erased(&mut f)
590 }
591 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
592 Self {
593 create_execution_graph_pipelines_amdx: unsafe {
594 unsafe extern "system" fn create_execution_graph_pipelines_amdx(
595 _device: crate::vk::Device,
596 _pipeline_cache: PipelineCache,
597 _create_info_count: u32,
598 _p_create_infos: *const ExecutionGraphPipelineCreateInfoAMDX<'_>,
599 _p_allocator: *const AllocationCallbacks<'_>,
600 _p_pipelines: *mut Pipeline,
601 ) -> Result {
602 panic!(concat!(
603 "Unable to load ",
604 stringify!(create_execution_graph_pipelines_amdx)
605 ))
606 }
607 let val = _f(c"vkCreateExecutionGraphPipelinesAMDX");
608 if val.is_null() {
609 create_execution_graph_pipelines_amdx
610 } else {
611 ::core::mem::transmute::<
612 *const c_void,
613 PFN_vkCreateExecutionGraphPipelinesAMDX,
614 >(val)
615 }
616 },
617 get_execution_graph_pipeline_scratch_size_amdx: unsafe {
618 unsafe extern "system" fn get_execution_graph_pipeline_scratch_size_amdx(
619 _device: crate::vk::Device,
620 _execution_graph: Pipeline,
621 _p_size_info: *mut ExecutionGraphPipelineScratchSizeAMDX<'_>,
622 ) -> Result {
623 panic!(concat!(
624 "Unable to load ",
625 stringify!(get_execution_graph_pipeline_scratch_size_amdx)
626 ))
627 }
628 let val = _f(c"vkGetExecutionGraphPipelineScratchSizeAMDX");
629 if val.is_null() {
630 get_execution_graph_pipeline_scratch_size_amdx
631 } else {
632 ::core::mem::transmute::<
633 *const c_void,
634 PFN_vkGetExecutionGraphPipelineScratchSizeAMDX,
635 >(val)
636 }
637 },
638 get_execution_graph_pipeline_node_index_amdx: unsafe {
639 unsafe extern "system" fn get_execution_graph_pipeline_node_index_amdx(
640 _device: crate::vk::Device,
641 _execution_graph: Pipeline,
642 _p_node_info: *const PipelineShaderStageNodeCreateInfoAMDX<'_>,
643 _p_node_index: *mut u32,
644 ) -> Result {
645 panic!(concat!(
646 "Unable to load ",
647 stringify!(get_execution_graph_pipeline_node_index_amdx)
648 ))
649 }
650 let val = _f(c"vkGetExecutionGraphPipelineNodeIndexAMDX");
651 if val.is_null() {
652 get_execution_graph_pipeline_node_index_amdx
653 } else {
654 ::core::mem::transmute::<
655 *const c_void,
656 PFN_vkGetExecutionGraphPipelineNodeIndexAMDX,
657 >(val)
658 }
659 },
660 cmd_initialize_graph_scratch_memory_amdx: unsafe {
661 unsafe extern "system" fn cmd_initialize_graph_scratch_memory_amdx(
662 _command_buffer: CommandBuffer,
663 _execution_graph: Pipeline,
664 _scratch: DeviceAddress,
665 _scratch_size: DeviceSize,
666 ) {
667 panic!(concat!(
668 "Unable to load ",
669 stringify!(cmd_initialize_graph_scratch_memory_amdx)
670 ))
671 }
672 let val = _f(c"vkCmdInitializeGraphScratchMemoryAMDX");
673 if val.is_null() {
674 cmd_initialize_graph_scratch_memory_amdx
675 } else {
676 ::core::mem::transmute::<
677 *const c_void,
678 PFN_vkCmdInitializeGraphScratchMemoryAMDX,
679 >(val)
680 }
681 },
682 cmd_dispatch_graph_amdx: unsafe {
683 unsafe extern "system" fn cmd_dispatch_graph_amdx(
684 _command_buffer: CommandBuffer,
685 _scratch: DeviceAddress,
686 _scratch_size: DeviceSize,
687 _p_count_info: *const DispatchGraphCountInfoAMDX,
688 ) {
689 panic!(concat!(
690 "Unable to load ",
691 stringify!(cmd_dispatch_graph_amdx)
692 ))
693 }
694 let val = _f(c"vkCmdDispatchGraphAMDX");
695 if val.is_null() {
696 cmd_dispatch_graph_amdx
697 } else {
698 ::core::mem::transmute::<*const c_void, PFN_vkCmdDispatchGraphAMDX>(val)
699 }
700 },
701 cmd_dispatch_graph_indirect_amdx: unsafe {
702 unsafe extern "system" fn cmd_dispatch_graph_indirect_amdx(
703 _command_buffer: CommandBuffer,
704 _scratch: DeviceAddress,
705 _scratch_size: DeviceSize,
706 _p_count_info: *const DispatchGraphCountInfoAMDX,
707 ) {
708 panic!(concat!(
709 "Unable to load ",
710 stringify!(cmd_dispatch_graph_indirect_amdx)
711 ))
712 }
713 let val = _f(c"vkCmdDispatchGraphIndirectAMDX");
714 if val.is_null() {
715 cmd_dispatch_graph_indirect_amdx
716 } else {
717 ::core::mem::transmute::<
718 *const c_void,
719 PFN_vkCmdDispatchGraphIndirectAMDX,
720 >(val)
721 }
722 },
723 cmd_dispatch_graph_indirect_count_amdx: unsafe {
724 unsafe extern "system" fn cmd_dispatch_graph_indirect_count_amdx(
725 _command_buffer: CommandBuffer,
726 _scratch: DeviceAddress,
727 _scratch_size: DeviceSize,
728 _count_info: DeviceAddress,
729 ) {
730 panic!(concat!(
731 "Unable to load ",
732 stringify!(cmd_dispatch_graph_indirect_count_amdx)
733 ))
734 }
735 let val = _f(c"vkCmdDispatchGraphIndirectCountAMDX");
736 if val.is_null() {
737 cmd_dispatch_graph_indirect_count_amdx
738 } else {
739 ::core::mem::transmute::<
740 *const c_void,
741 PFN_vkCmdDispatchGraphIndirectCountAMDX,
742 >(val)
743 }
744 },
745 }
746 }
747 }
748 }
749 #[doc = "VK_AMDX_dense_geometry_format"]
750 pub mod dense_geometry_format {
751 use super::super::*;
752 pub use {
753 crate::vk::AMDX_DENSE_GEOMETRY_FORMAT_EXTENSION_NAME as NAME,
754 crate::vk::AMDX_DENSE_GEOMETRY_FORMAT_SPEC_VERSION as SPEC_VERSION,
755 };
756 }
757}
758#[doc = "Extensions tagged ANDROID"]
759pub mod android {
760 #[doc = "VK_ANDROID_native_buffer"]
761 pub mod native_buffer {
762 use super::super::*;
763 pub use {
764 crate::vk::ANDROID_NATIVE_BUFFER_EXTENSION_NAME as NAME,
765 crate::vk::ANDROID_NATIVE_BUFFER_SPEC_VERSION as SPEC_VERSION,
766 };
767 #[doc = "VK_ANDROID_native_buffer device-level functions"]
768 #[derive(Clone)]
769 pub struct Device {
770 pub(crate) fp: DeviceFn,
771 pub(crate) handle: crate::vk::Device,
772 }
773 impl Device {
774 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
775 let handle = device.handle();
776 let fp = DeviceFn::load(|name| unsafe {
777 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
778 instance.get_device_proc_addr(handle, name.as_ptr()),
779 )
780 });
781 Self { handle, fp }
782 }
783 #[inline]
784 pub fn fp(&self) -> &DeviceFn {
785 &self.fp
786 }
787 #[inline]
788 pub fn device(&self) -> crate::vk::Device {
789 self.handle
790 }
791 }
792 #[derive(Clone)]
793 #[doc = "Raw VK_ANDROID_native_buffer device-level function pointers"]
794 pub struct DeviceFn {
795 pub get_swapchain_gralloc_usage_android: PFN_vkGetSwapchainGrallocUsageANDROID,
796 pub acquire_image_android: PFN_vkAcquireImageANDROID,
797 pub queue_signal_release_image_android: PFN_vkQueueSignalReleaseImageANDROID,
798 pub get_swapchain_gralloc_usage2_android: PFN_vkGetSwapchainGrallocUsage2ANDROID,
799 }
800 unsafe impl Send for DeviceFn {}
801 unsafe impl Sync for DeviceFn {}
802 impl DeviceFn {
803 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
804 Self::load_erased(&mut f)
805 }
806 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
807 Self {
808 get_swapchain_gralloc_usage_android: unsafe {
809 unsafe extern "system" fn get_swapchain_gralloc_usage_android(
810 _device: crate::vk::Device,
811 _format: Format,
812 _image_usage: ImageUsageFlags,
813 _gralloc_usage: *mut c_int,
814 ) -> Result {
815 panic!(concat!(
816 "Unable to load ",
817 stringify!(get_swapchain_gralloc_usage_android)
818 ))
819 }
820 let val = _f(c"vkGetSwapchainGrallocUsageANDROID");
821 if val.is_null() {
822 get_swapchain_gralloc_usage_android
823 } else {
824 ::core::mem::transmute::<
825 *const c_void,
826 PFN_vkGetSwapchainGrallocUsageANDROID,
827 >(val)
828 }
829 },
830 acquire_image_android: unsafe {
831 unsafe extern "system" fn acquire_image_android(
832 _device: crate::vk::Device,
833 _image: Image,
834 _native_fence_fd: c_int,
835 _semaphore: Semaphore,
836 _fence: Fence,
837 ) -> Result {
838 panic!(concat!(
839 "Unable to load ",
840 stringify!(acquire_image_android)
841 ))
842 }
843 let val = _f(c"vkAcquireImageANDROID");
844 if val.is_null() {
845 acquire_image_android
846 } else {
847 ::core::mem::transmute::<*const c_void, PFN_vkAcquireImageANDROID>(val)
848 }
849 },
850 queue_signal_release_image_android: unsafe {
851 unsafe extern "system" fn queue_signal_release_image_android(
852 _queue: Queue,
853 _wait_semaphore_count: u32,
854 _p_wait_semaphores: *const Semaphore,
855 _image: Image,
856 _p_native_fence_fd: *mut c_int,
857 ) -> Result {
858 panic!(concat!(
859 "Unable to load ",
860 stringify!(queue_signal_release_image_android)
861 ))
862 }
863 let val = _f(c"vkQueueSignalReleaseImageANDROID");
864 if val.is_null() {
865 queue_signal_release_image_android
866 } else {
867 ::core::mem::transmute::<
868 *const c_void,
869 PFN_vkQueueSignalReleaseImageANDROID,
870 >(val)
871 }
872 },
873 get_swapchain_gralloc_usage2_android: unsafe {
874 unsafe extern "system" fn get_swapchain_gralloc_usage2_android(
875 _device: crate::vk::Device,
876 _format: Format,
877 _image_usage: ImageUsageFlags,
878 _swapchain_image_usage: SwapchainImageUsageFlagsANDROID,
879 _gralloc_consumer_usage: *mut u64,
880 _gralloc_producer_usage: *mut u64,
881 ) -> Result {
882 panic!(concat!(
883 "Unable to load ",
884 stringify!(get_swapchain_gralloc_usage2_android)
885 ))
886 }
887 let val = _f(c"vkGetSwapchainGrallocUsage2ANDROID");
888 if val.is_null() {
889 get_swapchain_gralloc_usage2_android
890 } else {
891 ::core::mem::transmute::<
892 *const c_void,
893 PFN_vkGetSwapchainGrallocUsage2ANDROID,
894 >(val)
895 }
896 },
897 }
898 }
899 }
900 }
901 #[doc = "VK_ANDROID_external_memory_android_hardware_buffer"]
902 pub mod external_memory_android_hardware_buffer {
903 use super::super::*;
904 pub use {
905 crate::vk::ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME as NAME,
906 crate::vk::ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION as SPEC_VERSION,
907 };
908 #[doc = "VK_ANDROID_external_memory_android_hardware_buffer device-level functions"]
909 #[derive(Clone)]
910 pub struct Device {
911 pub(crate) fp: DeviceFn,
912 pub(crate) handle: crate::vk::Device,
913 }
914 impl Device {
915 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
916 let handle = device.handle();
917 let fp = DeviceFn::load(|name| unsafe {
918 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
919 instance.get_device_proc_addr(handle, name.as_ptr()),
920 )
921 });
922 Self { handle, fp }
923 }
924 #[inline]
925 pub fn fp(&self) -> &DeviceFn {
926 &self.fp
927 }
928 #[inline]
929 pub fn device(&self) -> crate::vk::Device {
930 self.handle
931 }
932 }
933 #[derive(Clone)]
934 #[doc = "Raw VK_ANDROID_external_memory_android_hardware_buffer device-level function pointers"]
935 pub struct DeviceFn {
936 pub get_android_hardware_buffer_properties_android:
937 PFN_vkGetAndroidHardwareBufferPropertiesANDROID,
938 pub get_memory_android_hardware_buffer_android:
939 PFN_vkGetMemoryAndroidHardwareBufferANDROID,
940 }
941 unsafe impl Send for DeviceFn {}
942 unsafe impl Sync for DeviceFn {}
943 impl DeviceFn {
944 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
945 Self::load_erased(&mut f)
946 }
947 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
948 Self {
949 get_android_hardware_buffer_properties_android: unsafe {
950 unsafe extern "system" fn get_android_hardware_buffer_properties_android(
951 _device: crate::vk::Device,
952 _buffer: *const AHardwareBuffer,
953 _p_properties: *mut AndroidHardwareBufferPropertiesANDROID<'_>,
954 ) -> Result {
955 panic!(concat!(
956 "Unable to load ",
957 stringify!(get_android_hardware_buffer_properties_android)
958 ))
959 }
960 let val = _f(c"vkGetAndroidHardwareBufferPropertiesANDROID");
961 if val.is_null() {
962 get_android_hardware_buffer_properties_android
963 } else {
964 ::core::mem::transmute::<
965 *const c_void,
966 PFN_vkGetAndroidHardwareBufferPropertiesANDROID,
967 >(val)
968 }
969 },
970 get_memory_android_hardware_buffer_android: unsafe {
971 unsafe extern "system" fn get_memory_android_hardware_buffer_android(
972 _device: crate::vk::Device,
973 _p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID<'_>,
974 _p_buffer: *mut *mut AHardwareBuffer,
975 ) -> Result {
976 panic!(concat!(
977 "Unable to load ",
978 stringify!(get_memory_android_hardware_buffer_android)
979 ))
980 }
981 let val = _f(c"vkGetMemoryAndroidHardwareBufferANDROID");
982 if val.is_null() {
983 get_memory_android_hardware_buffer_android
984 } else {
985 ::core::mem::transmute::<
986 *const c_void,
987 PFN_vkGetMemoryAndroidHardwareBufferANDROID,
988 >(val)
989 }
990 },
991 }
992 }
993 }
994 }
995 #[doc = "VK_ANDROID_external_format_resolve"]
996 pub mod external_format_resolve {
997 use super::super::*;
998 pub use {
999 crate::vk::ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME as NAME,
1000 crate::vk::ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION as SPEC_VERSION,
1001 };
1002 }
1003}
1004#[doc = "Extensions tagged ARM"]
1005pub mod arm {
1006 #[doc = "VK_ARM_rasterization_order_attachment_access"]
1007 pub mod rasterization_order_attachment_access {
1008 use super::super::*;
1009 pub use {
1010 crate::vk::ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME as NAME,
1011 crate::vk::ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION as SPEC_VERSION,
1012 };
1013 }
1014 #[doc = "VK_ARM_shader_core_properties"]
1015 pub mod shader_core_properties {
1016 use super::super::*;
1017 pub use {
1018 crate::vk::ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME as NAME,
1019 crate::vk::ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
1020 };
1021 }
1022 #[doc = "VK_ARM_scheduling_controls"]
1023 pub mod scheduling_controls {
1024 use super::super::*;
1025 pub use {
1026 crate::vk::ARM_SCHEDULING_CONTROLS_EXTENSION_NAME as NAME,
1027 crate::vk::ARM_SCHEDULING_CONTROLS_SPEC_VERSION as SPEC_VERSION,
1028 };
1029 }
1030 #[doc = "VK_ARM_render_pass_striped"]
1031 pub mod render_pass_striped {
1032 use super::super::*;
1033 pub use {
1034 crate::vk::ARM_RENDER_PASS_STRIPED_EXTENSION_NAME as NAME,
1035 crate::vk::ARM_RENDER_PASS_STRIPED_SPEC_VERSION as SPEC_VERSION,
1036 };
1037 }
1038 #[doc = "VK_ARM_tensors"]
1039 pub mod tensors {
1040 use super::super::*;
1041 pub use {
1042 crate::vk::ARM_TENSORS_EXTENSION_NAME as NAME,
1043 crate::vk::ARM_TENSORS_SPEC_VERSION as SPEC_VERSION,
1044 };
1045 #[doc = "VK_ARM_tensors instance-level functions"]
1046 #[derive(Clone)]
1047 pub struct Instance {
1048 pub(crate) fp: InstanceFn,
1049 pub(crate) handle: crate::vk::Instance,
1050 }
1051 impl Instance {
1052 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
1053 let handle = instance.handle();
1054 let fp = InstanceFn::load(|name| unsafe {
1055 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
1056 entry.get_instance_proc_addr(handle, name.as_ptr()),
1057 )
1058 });
1059 Self { handle, fp }
1060 }
1061 #[inline]
1062 pub fn fp(&self) -> &InstanceFn {
1063 &self.fp
1064 }
1065 #[inline]
1066 pub fn instance(&self) -> crate::vk::Instance {
1067 self.handle
1068 }
1069 }
1070 #[derive(Clone)]
1071 #[doc = "Raw VK_ARM_tensors instance-level function pointers"]
1072 pub struct InstanceFn {
1073 pub get_physical_device_external_tensor_properties_arm:
1074 PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM,
1075 }
1076 unsafe impl Send for InstanceFn {}
1077 unsafe impl Sync for InstanceFn {}
1078 impl InstanceFn {
1079 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
1080 Self::load_erased(&mut f)
1081 }
1082 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
1083 Self {
1084 get_physical_device_external_tensor_properties_arm: unsafe {
1085 unsafe extern "system" fn get_physical_device_external_tensor_properties_arm(
1086 _physical_device: PhysicalDevice,
1087 _p_external_tensor_info: *const PhysicalDeviceExternalTensorInfoARM<'_>,
1088 _p_external_tensor_properties: *mut ExternalTensorPropertiesARM<'_>,
1089 ) {
1090 panic!(concat!(
1091 "Unable to load ",
1092 stringify!(get_physical_device_external_tensor_properties_arm)
1093 ))
1094 }
1095 let val = _f(c"vkGetPhysicalDeviceExternalTensorPropertiesARM");
1096 if val.is_null() {
1097 get_physical_device_external_tensor_properties_arm
1098 } else {
1099 ::core::mem::transmute::<
1100 *const c_void,
1101 PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM,
1102 >(val)
1103 }
1104 },
1105 }
1106 }
1107 }
1108 #[doc = "VK_ARM_tensors device-level functions"]
1109 #[derive(Clone)]
1110 pub struct Device {
1111 pub(crate) fp: DeviceFn,
1112 pub(crate) handle: crate::vk::Device,
1113 }
1114 impl Device {
1115 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
1116 let handle = device.handle();
1117 let fp = DeviceFn::load(|name| unsafe {
1118 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
1119 instance.get_device_proc_addr(handle, name.as_ptr()),
1120 )
1121 });
1122 Self { handle, fp }
1123 }
1124 #[inline]
1125 pub fn fp(&self) -> &DeviceFn {
1126 &self.fp
1127 }
1128 #[inline]
1129 pub fn device(&self) -> crate::vk::Device {
1130 self.handle
1131 }
1132 }
1133 #[derive(Clone)]
1134 #[doc = "Raw VK_ARM_tensors device-level function pointers"]
1135 pub struct DeviceFn {
1136 pub create_tensor_arm: PFN_vkCreateTensorARM,
1137 pub destroy_tensor_arm: PFN_vkDestroyTensorARM,
1138 pub create_tensor_view_arm: PFN_vkCreateTensorViewARM,
1139 pub destroy_tensor_view_arm: PFN_vkDestroyTensorViewARM,
1140 pub get_tensor_memory_requirements_arm: PFN_vkGetTensorMemoryRequirementsARM,
1141 pub bind_tensor_memory_arm: PFN_vkBindTensorMemoryARM,
1142 pub get_device_tensor_memory_requirements_arm:
1143 PFN_vkGetDeviceTensorMemoryRequirementsARM,
1144 pub cmd_copy_tensor_arm: PFN_vkCmdCopyTensorARM,
1145 pub get_tensor_opaque_capture_descriptor_data_arm:
1146 PFN_vkGetTensorOpaqueCaptureDescriptorDataARM,
1147 pub get_tensor_view_opaque_capture_descriptor_data_arm:
1148 PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM,
1149 }
1150 unsafe impl Send for DeviceFn {}
1151 unsafe impl Sync for DeviceFn {}
1152 impl DeviceFn {
1153 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
1154 Self::load_erased(&mut f)
1155 }
1156 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
1157 Self {
1158 create_tensor_arm: unsafe {
1159 unsafe extern "system" fn create_tensor_arm(
1160 _device: crate::vk::Device,
1161 _p_create_info: *const TensorCreateInfoARM<'_>,
1162 _p_allocator: *const AllocationCallbacks<'_>,
1163 _p_tensor: *mut TensorARM,
1164 ) -> Result {
1165 panic!(concat!("Unable to load ", stringify!(create_tensor_arm)))
1166 }
1167 let val = _f(c"vkCreateTensorARM");
1168 if val.is_null() {
1169 create_tensor_arm
1170 } else {
1171 ::core::mem::transmute::<*const c_void, PFN_vkCreateTensorARM>(val)
1172 }
1173 },
1174 destroy_tensor_arm: unsafe {
1175 unsafe extern "system" fn destroy_tensor_arm(
1176 _device: crate::vk::Device,
1177 _tensor: TensorARM,
1178 _p_allocator: *const AllocationCallbacks<'_>,
1179 ) {
1180 panic!(concat!("Unable to load ", stringify!(destroy_tensor_arm)))
1181 }
1182 let val = _f(c"vkDestroyTensorARM");
1183 if val.is_null() {
1184 destroy_tensor_arm
1185 } else {
1186 ::core::mem::transmute::<*const c_void, PFN_vkDestroyTensorARM>(val)
1187 }
1188 },
1189 create_tensor_view_arm: unsafe {
1190 unsafe extern "system" fn create_tensor_view_arm(
1191 _device: crate::vk::Device,
1192 _p_create_info: *const TensorViewCreateInfoARM<'_>,
1193 _p_allocator: *const AllocationCallbacks<'_>,
1194 _p_view: *mut TensorViewARM,
1195 ) -> Result {
1196 panic!(concat!(
1197 "Unable to load ",
1198 stringify!(create_tensor_view_arm)
1199 ))
1200 }
1201 let val = _f(c"vkCreateTensorViewARM");
1202 if val.is_null() {
1203 create_tensor_view_arm
1204 } else {
1205 ::core::mem::transmute::<*const c_void, PFN_vkCreateTensorViewARM>(val)
1206 }
1207 },
1208 destroy_tensor_view_arm: unsafe {
1209 unsafe extern "system" fn destroy_tensor_view_arm(
1210 _device: crate::vk::Device,
1211 _tensor_view: TensorViewARM,
1212 _p_allocator: *const AllocationCallbacks<'_>,
1213 ) {
1214 panic!(concat!(
1215 "Unable to load ",
1216 stringify!(destroy_tensor_view_arm)
1217 ))
1218 }
1219 let val = _f(c"vkDestroyTensorViewARM");
1220 if val.is_null() {
1221 destroy_tensor_view_arm
1222 } else {
1223 ::core::mem::transmute::<*const c_void, PFN_vkDestroyTensorViewARM>(val)
1224 }
1225 },
1226 get_tensor_memory_requirements_arm: unsafe {
1227 unsafe extern "system" fn get_tensor_memory_requirements_arm(
1228 _device: crate::vk::Device,
1229 _p_info: *const TensorMemoryRequirementsInfoARM<'_>,
1230 _p_memory_requirements: *mut MemoryRequirements2<'_>,
1231 ) {
1232 panic!(concat!(
1233 "Unable to load ",
1234 stringify!(get_tensor_memory_requirements_arm)
1235 ))
1236 }
1237 let val = _f(c"vkGetTensorMemoryRequirementsARM");
1238 if val.is_null() {
1239 get_tensor_memory_requirements_arm
1240 } else {
1241 ::core::mem::transmute::<
1242 *const c_void,
1243 PFN_vkGetTensorMemoryRequirementsARM,
1244 >(val)
1245 }
1246 },
1247 bind_tensor_memory_arm: unsafe {
1248 unsafe extern "system" fn bind_tensor_memory_arm(
1249 _device: crate::vk::Device,
1250 _bind_info_count: u32,
1251 _p_bind_infos: *const BindTensorMemoryInfoARM<'_>,
1252 ) -> Result {
1253 panic!(concat!(
1254 "Unable to load ",
1255 stringify!(bind_tensor_memory_arm)
1256 ))
1257 }
1258 let val = _f(c"vkBindTensorMemoryARM");
1259 if val.is_null() {
1260 bind_tensor_memory_arm
1261 } else {
1262 ::core::mem::transmute::<*const c_void, PFN_vkBindTensorMemoryARM>(val)
1263 }
1264 },
1265 get_device_tensor_memory_requirements_arm: unsafe {
1266 unsafe extern "system" fn get_device_tensor_memory_requirements_arm(
1267 _device: crate::vk::Device,
1268 _p_info: *const DeviceTensorMemoryRequirementsARM<'_>,
1269 _p_memory_requirements: *mut MemoryRequirements2<'_>,
1270 ) {
1271 panic!(concat!(
1272 "Unable to load ",
1273 stringify!(get_device_tensor_memory_requirements_arm)
1274 ))
1275 }
1276 let val = _f(c"vkGetDeviceTensorMemoryRequirementsARM");
1277 if val.is_null() {
1278 get_device_tensor_memory_requirements_arm
1279 } else {
1280 ::core::mem::transmute::<
1281 *const c_void,
1282 PFN_vkGetDeviceTensorMemoryRequirementsARM,
1283 >(val)
1284 }
1285 },
1286 cmd_copy_tensor_arm: unsafe {
1287 unsafe extern "system" fn cmd_copy_tensor_arm(
1288 _command_buffer: CommandBuffer,
1289 _p_copy_tensor_info: *const CopyTensorInfoARM<'_>,
1290 ) {
1291 panic!(concat!("Unable to load ", stringify!(cmd_copy_tensor_arm)))
1292 }
1293 let val = _f(c"vkCmdCopyTensorARM");
1294 if val.is_null() {
1295 cmd_copy_tensor_arm
1296 } else {
1297 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyTensorARM>(val)
1298 }
1299 },
1300 get_tensor_opaque_capture_descriptor_data_arm: unsafe {
1301 unsafe extern "system" fn get_tensor_opaque_capture_descriptor_data_arm(
1302 _device: crate::vk::Device,
1303 _p_info: *const TensorCaptureDescriptorDataInfoARM<'_>,
1304 _p_data: *mut c_void,
1305 ) -> Result {
1306 panic!(concat!(
1307 "Unable to load ",
1308 stringify!(get_tensor_opaque_capture_descriptor_data_arm)
1309 ))
1310 }
1311 let val = _f(c"vkGetTensorOpaqueCaptureDescriptorDataARM");
1312 if val.is_null() {
1313 get_tensor_opaque_capture_descriptor_data_arm
1314 } else {
1315 ::core::mem::transmute::<
1316 *const c_void,
1317 PFN_vkGetTensorOpaqueCaptureDescriptorDataARM,
1318 >(val)
1319 }
1320 },
1321 get_tensor_view_opaque_capture_descriptor_data_arm: unsafe {
1322 unsafe extern "system" fn get_tensor_view_opaque_capture_descriptor_data_arm(
1323 _device: crate::vk::Device,
1324 _p_info: *const TensorViewCaptureDescriptorDataInfoARM<'_>,
1325 _p_data: *mut c_void,
1326 ) -> Result {
1327 panic!(concat!(
1328 "Unable to load ",
1329 stringify!(get_tensor_view_opaque_capture_descriptor_data_arm)
1330 ))
1331 }
1332 let val = _f(c"vkGetTensorViewOpaqueCaptureDescriptorDataARM");
1333 if val.is_null() {
1334 get_tensor_view_opaque_capture_descriptor_data_arm
1335 } else {
1336 ::core::mem::transmute::<
1337 *const c_void,
1338 PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM,
1339 >(val)
1340 }
1341 },
1342 }
1343 }
1344 }
1345 }
1346 #[doc = "VK_ARM_shader_core_builtins"]
1347 pub mod shader_core_builtins {
1348 use super::super::*;
1349 pub use {
1350 crate::vk::ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME as NAME,
1351 crate::vk::ARM_SHADER_CORE_BUILTINS_SPEC_VERSION as SPEC_VERSION,
1352 };
1353 }
1354 #[doc = "VK_ARM_data_graph"]
1355 pub mod data_graph {
1356 use super::super::*;
1357 pub use {
1358 crate::vk::ARM_DATA_GRAPH_EXTENSION_NAME as NAME,
1359 crate::vk::ARM_DATA_GRAPH_SPEC_VERSION as SPEC_VERSION,
1360 };
1361 #[doc = "VK_ARM_data_graph instance-level functions"]
1362 #[derive(Clone)]
1363 pub struct Instance {
1364 pub(crate) fp: InstanceFn,
1365 pub(crate) handle: crate::vk::Instance,
1366 }
1367 impl Instance {
1368 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
1369 let handle = instance.handle();
1370 let fp = InstanceFn::load(|name| unsafe {
1371 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
1372 entry.get_instance_proc_addr(handle, name.as_ptr()),
1373 )
1374 });
1375 Self { handle, fp }
1376 }
1377 #[inline]
1378 pub fn fp(&self) -> &InstanceFn {
1379 &self.fp
1380 }
1381 #[inline]
1382 pub fn instance(&self) -> crate::vk::Instance {
1383 self.handle
1384 }
1385 }
1386 #[derive(Clone)]
1387 #[doc = "Raw VK_ARM_data_graph instance-level function pointers"]
1388 pub struct InstanceFn {
1389 pub get_physical_device_queue_family_data_graph_properties_arm:
1390 PFN_vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM,
1391 pub get_physical_device_queue_family_data_graph_processing_engine_properties_arm:
1392 PFN_vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM,
1393 }
1394 unsafe impl Send for InstanceFn {}
1395 unsafe impl Sync for InstanceFn {}
1396 impl InstanceFn {
1397 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
1398 Self::load_erased(&mut f)
1399 }
1400 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
1401 Self {
1402 get_physical_device_queue_family_data_graph_properties_arm: unsafe {
1403 unsafe extern "system" fn get_physical_device_queue_family_data_graph_properties_arm(
1404 _physical_device: PhysicalDevice,
1405 _queue_family_index: u32,
1406 _p_queue_family_data_graph_property_count: *mut u32,
1407 _p_queue_family_data_graph_properties : * mut QueueFamilyDataGraphPropertiesARM < '_ >,
1408 ) -> Result {
1409 panic!(concat!(
1410 "Unable to load ",
1411 stringify!(
1412 get_physical_device_queue_family_data_graph_properties_arm
1413 )
1414 ))
1415 }
1416 let val = _f(c"vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM");
1417 if val.is_null() {
1418 get_physical_device_queue_family_data_graph_properties_arm
1419 } else {
1420 ::core::mem::transmute::<
1421 *const c_void,
1422 PFN_vkGetPhysicalDeviceQueueFamilyDataGraphPropertiesARM,
1423 >(val)
1424 }
1425 },
1426 get_physical_device_queue_family_data_graph_processing_engine_properties_arm: unsafe {
1427 unsafe extern "system" fn get_physical_device_queue_family_data_graph_processing_engine_properties_arm(
1428 _physical_device: PhysicalDevice,
1429 _p_queue_family_data_graph_processing_engine_info : * const PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM < '_ >,
1430 _p_queue_family_data_graph_processing_engine_properties : * mut QueueFamilyDataGraphProcessingEnginePropertiesARM < '_ >,
1431 ) {
1432 panic ! (concat ! ("Unable to load " , stringify ! (get_physical_device_queue_family_data_graph_processing_engine_properties_arm)))
1433 }
1434 let val = _f(
1435 c"vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM",
1436 );
1437 if val.is_null() {
1438 get_physical_device_queue_family_data_graph_processing_engine_properties_arm
1439 } else {
1440 :: core :: mem :: transmute :: < * const c_void , PFN_vkGetPhysicalDeviceQueueFamilyDataGraphProcessingEnginePropertiesARM > (val)
1441 }
1442 },
1443 }
1444 }
1445 }
1446 #[doc = "VK_ARM_data_graph device-level functions"]
1447 #[derive(Clone)]
1448 pub struct Device {
1449 pub(crate) fp: DeviceFn,
1450 pub(crate) handle: crate::vk::Device,
1451 }
1452 impl Device {
1453 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
1454 let handle = device.handle();
1455 let fp = DeviceFn::load(|name| unsafe {
1456 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
1457 instance.get_device_proc_addr(handle, name.as_ptr()),
1458 )
1459 });
1460 Self { handle, fp }
1461 }
1462 #[inline]
1463 pub fn fp(&self) -> &DeviceFn {
1464 &self.fp
1465 }
1466 #[inline]
1467 pub fn device(&self) -> crate::vk::Device {
1468 self.handle
1469 }
1470 }
1471 #[derive(Clone)]
1472 #[doc = "Raw VK_ARM_data_graph device-level function pointers"]
1473 pub struct DeviceFn {
1474 pub create_data_graph_pipelines_arm: PFN_vkCreateDataGraphPipelinesARM,
1475 pub create_data_graph_pipeline_session_arm: PFN_vkCreateDataGraphPipelineSessionARM,
1476 pub get_data_graph_pipeline_session_bind_point_requirements_arm:
1477 PFN_vkGetDataGraphPipelineSessionBindPointRequirementsARM,
1478 pub get_data_graph_pipeline_session_memory_requirements_arm:
1479 PFN_vkGetDataGraphPipelineSessionMemoryRequirementsARM,
1480 pub bind_data_graph_pipeline_session_memory_arm:
1481 PFN_vkBindDataGraphPipelineSessionMemoryARM,
1482 pub destroy_data_graph_pipeline_session_arm: PFN_vkDestroyDataGraphPipelineSessionARM,
1483 pub cmd_dispatch_data_graph_arm: PFN_vkCmdDispatchDataGraphARM,
1484 pub get_data_graph_pipeline_available_properties_arm:
1485 PFN_vkGetDataGraphPipelineAvailablePropertiesARM,
1486 pub get_data_graph_pipeline_properties_arm: PFN_vkGetDataGraphPipelinePropertiesARM,
1487 }
1488 unsafe impl Send for DeviceFn {}
1489 unsafe impl Sync for DeviceFn {}
1490 impl DeviceFn {
1491 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
1492 Self::load_erased(&mut f)
1493 }
1494 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
1495 Self {
1496 create_data_graph_pipelines_arm: unsafe {
1497 unsafe extern "system" fn create_data_graph_pipelines_arm(
1498 _device: crate::vk::Device,
1499 _deferred_operation: DeferredOperationKHR,
1500 _pipeline_cache: PipelineCache,
1501 _create_info_count: u32,
1502 _p_create_infos: *const DataGraphPipelineCreateInfoARM<'_>,
1503 _p_allocator: *const AllocationCallbacks<'_>,
1504 _p_pipelines: *mut Pipeline,
1505 ) -> Result {
1506 panic!(concat!(
1507 "Unable to load ",
1508 stringify!(create_data_graph_pipelines_arm)
1509 ))
1510 }
1511 let val = _f(c"vkCreateDataGraphPipelinesARM");
1512 if val.is_null() {
1513 create_data_graph_pipelines_arm
1514 } else {
1515 ::core::mem::transmute::<*const c_void, PFN_vkCreateDataGraphPipelinesARM>(
1516 val,
1517 )
1518 }
1519 },
1520 create_data_graph_pipeline_session_arm: unsafe {
1521 unsafe extern "system" fn create_data_graph_pipeline_session_arm(
1522 _device: crate::vk::Device,
1523 _p_create_info: *const DataGraphPipelineSessionCreateInfoARM<'_>,
1524 _p_allocator: *const AllocationCallbacks<'_>,
1525 _p_session: *mut DataGraphPipelineSessionARM,
1526 ) -> Result {
1527 panic!(concat!(
1528 "Unable to load ",
1529 stringify!(create_data_graph_pipeline_session_arm)
1530 ))
1531 }
1532 let val = _f(c"vkCreateDataGraphPipelineSessionARM");
1533 if val.is_null() {
1534 create_data_graph_pipeline_session_arm
1535 } else {
1536 ::core::mem::transmute::<
1537 *const c_void,
1538 PFN_vkCreateDataGraphPipelineSessionARM,
1539 >(val)
1540 }
1541 },
1542 get_data_graph_pipeline_session_bind_point_requirements_arm: unsafe {
1543 unsafe extern "system" fn get_data_graph_pipeline_session_bind_point_requirements_arm(
1544 _device: crate::vk::Device,
1545 _p_info: *const DataGraphPipelineSessionBindPointRequirementsInfoARM<
1546 '_,
1547 >,
1548 _p_bind_point_requirement_count: *mut u32,
1549 _p_bind_point_requirements : * mut DataGraphPipelineSessionBindPointRequirementARM < '_ >,
1550 ) -> Result {
1551 panic!(concat!(
1552 "Unable to load ",
1553 stringify!(
1554 get_data_graph_pipeline_session_bind_point_requirements_arm
1555 )
1556 ))
1557 }
1558 let val = _f(c"vkGetDataGraphPipelineSessionBindPointRequirementsARM");
1559 if val.is_null() {
1560 get_data_graph_pipeline_session_bind_point_requirements_arm
1561 } else {
1562 ::core::mem::transmute::<
1563 *const c_void,
1564 PFN_vkGetDataGraphPipelineSessionBindPointRequirementsARM,
1565 >(val)
1566 }
1567 },
1568 get_data_graph_pipeline_session_memory_requirements_arm: unsafe {
1569 unsafe extern "system" fn get_data_graph_pipeline_session_memory_requirements_arm(
1570 _device: crate::vk::Device,
1571 _p_info: *const DataGraphPipelineSessionMemoryRequirementsInfoARM<'_>,
1572 _p_memory_requirements: *mut MemoryRequirements2<'_>,
1573 ) {
1574 panic!(concat!(
1575 "Unable to load ",
1576 stringify!(get_data_graph_pipeline_session_memory_requirements_arm)
1577 ))
1578 }
1579 let val = _f(c"vkGetDataGraphPipelineSessionMemoryRequirementsARM");
1580 if val.is_null() {
1581 get_data_graph_pipeline_session_memory_requirements_arm
1582 } else {
1583 ::core::mem::transmute::<
1584 *const c_void,
1585 PFN_vkGetDataGraphPipelineSessionMemoryRequirementsARM,
1586 >(val)
1587 }
1588 },
1589 bind_data_graph_pipeline_session_memory_arm: unsafe {
1590 unsafe extern "system" fn bind_data_graph_pipeline_session_memory_arm(
1591 _device: crate::vk::Device,
1592 _bind_info_count: u32,
1593 _p_bind_infos: *const BindDataGraphPipelineSessionMemoryInfoARM<'_>,
1594 ) -> Result {
1595 panic!(concat!(
1596 "Unable to load ",
1597 stringify!(bind_data_graph_pipeline_session_memory_arm)
1598 ))
1599 }
1600 let val = _f(c"vkBindDataGraphPipelineSessionMemoryARM");
1601 if val.is_null() {
1602 bind_data_graph_pipeline_session_memory_arm
1603 } else {
1604 ::core::mem::transmute::<
1605 *const c_void,
1606 PFN_vkBindDataGraphPipelineSessionMemoryARM,
1607 >(val)
1608 }
1609 },
1610 destroy_data_graph_pipeline_session_arm: unsafe {
1611 unsafe extern "system" fn destroy_data_graph_pipeline_session_arm(
1612 _device: crate::vk::Device,
1613 _session: DataGraphPipelineSessionARM,
1614 _p_allocator: *const AllocationCallbacks<'_>,
1615 ) {
1616 panic!(concat!(
1617 "Unable to load ",
1618 stringify!(destroy_data_graph_pipeline_session_arm)
1619 ))
1620 }
1621 let val = _f(c"vkDestroyDataGraphPipelineSessionARM");
1622 if val.is_null() {
1623 destroy_data_graph_pipeline_session_arm
1624 } else {
1625 ::core::mem::transmute::<
1626 *const c_void,
1627 PFN_vkDestroyDataGraphPipelineSessionARM,
1628 >(val)
1629 }
1630 },
1631 cmd_dispatch_data_graph_arm: unsafe {
1632 unsafe extern "system" fn cmd_dispatch_data_graph_arm(
1633 _command_buffer: CommandBuffer,
1634 _session: DataGraphPipelineSessionARM,
1635 _p_info: *const DataGraphPipelineDispatchInfoARM<'_>,
1636 ) {
1637 panic!(concat!(
1638 "Unable to load ",
1639 stringify!(cmd_dispatch_data_graph_arm)
1640 ))
1641 }
1642 let val = _f(c"vkCmdDispatchDataGraphARM");
1643 if val.is_null() {
1644 cmd_dispatch_data_graph_arm
1645 } else {
1646 ::core::mem::transmute::<*const c_void, PFN_vkCmdDispatchDataGraphARM>(
1647 val,
1648 )
1649 }
1650 },
1651 get_data_graph_pipeline_available_properties_arm: unsafe {
1652 unsafe extern "system" fn get_data_graph_pipeline_available_properties_arm(
1653 _device: crate::vk::Device,
1654 _p_pipeline_info: *const DataGraphPipelineInfoARM<'_>,
1655 _p_properties_count: *mut u32,
1656 _p_properties: *mut DataGraphPipelinePropertyARM,
1657 ) -> Result {
1658 panic!(concat!(
1659 "Unable to load ",
1660 stringify!(get_data_graph_pipeline_available_properties_arm)
1661 ))
1662 }
1663 let val = _f(c"vkGetDataGraphPipelineAvailablePropertiesARM");
1664 if val.is_null() {
1665 get_data_graph_pipeline_available_properties_arm
1666 } else {
1667 ::core::mem::transmute::<
1668 *const c_void,
1669 PFN_vkGetDataGraphPipelineAvailablePropertiesARM,
1670 >(val)
1671 }
1672 },
1673 get_data_graph_pipeline_properties_arm: unsafe {
1674 unsafe extern "system" fn get_data_graph_pipeline_properties_arm(
1675 _device: crate::vk::Device,
1676 _p_pipeline_info: *const DataGraphPipelineInfoARM<'_>,
1677 _properties_count: u32,
1678 _p_properties: *mut DataGraphPipelinePropertyQueryResultARM<'_>,
1679 ) -> Result {
1680 panic!(concat!(
1681 "Unable to load ",
1682 stringify!(get_data_graph_pipeline_properties_arm)
1683 ))
1684 }
1685 let val = _f(c"vkGetDataGraphPipelinePropertiesARM");
1686 if val.is_null() {
1687 get_data_graph_pipeline_properties_arm
1688 } else {
1689 ::core::mem::transmute::<
1690 *const c_void,
1691 PFN_vkGetDataGraphPipelinePropertiesARM,
1692 >(val)
1693 }
1694 },
1695 }
1696 }
1697 }
1698 }
1699 #[doc = "VK_ARM_pipeline_opacity_micromap"]
1700 pub mod pipeline_opacity_micromap {
1701 use super::super::*;
1702 pub use {
1703 crate::vk::ARM_PIPELINE_OPACITY_MICROMAP_EXTENSION_NAME as NAME,
1704 crate::vk::ARM_PIPELINE_OPACITY_MICROMAP_SPEC_VERSION as SPEC_VERSION,
1705 };
1706 }
1707 #[doc = "VK_ARM_performance_counters_by_region"]
1708 pub mod performance_counters_by_region {
1709 use super::super::*;
1710 pub use {
1711 crate::vk::ARM_PERFORMANCE_COUNTERS_BY_REGION_EXTENSION_NAME as NAME,
1712 crate::vk::ARM_PERFORMANCE_COUNTERS_BY_REGION_SPEC_VERSION as SPEC_VERSION,
1713 };
1714 #[doc = "VK_ARM_performance_counters_by_region instance-level functions"]
1715 #[derive(Clone)]
1716 pub struct Instance {
1717 pub(crate) fp: InstanceFn,
1718 pub(crate) handle: crate::vk::Instance,
1719 }
1720 impl Instance {
1721 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
1722 let handle = instance.handle();
1723 let fp = InstanceFn::load(|name| unsafe {
1724 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
1725 entry.get_instance_proc_addr(handle, name.as_ptr()),
1726 )
1727 });
1728 Self { handle, fp }
1729 }
1730 #[inline]
1731 pub fn fp(&self) -> &InstanceFn {
1732 &self.fp
1733 }
1734 #[inline]
1735 pub fn instance(&self) -> crate::vk::Instance {
1736 self.handle
1737 }
1738 }
1739 #[derive(Clone)]
1740 #[doc = "Raw VK_ARM_performance_counters_by_region instance-level function pointers"]
1741 pub struct InstanceFn {
1742 pub enumerate_physical_device_queue_family_performance_counters_by_region_arm:
1743 PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM,
1744 }
1745 unsafe impl Send for InstanceFn {}
1746 unsafe impl Sync for InstanceFn {}
1747 impl InstanceFn {
1748 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
1749 Self::load_erased(&mut f)
1750 }
1751 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
1752 Self {
1753 enumerate_physical_device_queue_family_performance_counters_by_region_arm: unsafe {
1754 unsafe extern "system" fn enumerate_physical_device_queue_family_performance_counters_by_region_arm(
1755 _physical_device: PhysicalDevice,
1756 _queue_family_index: u32,
1757 _p_counter_count: *mut u32,
1758 _p_counters: *mut PerformanceCounterARM<'_>,
1759 _p_counter_descriptions: *mut PerformanceCounterDescriptionARM<'_>,
1760 ) -> Result {
1761 panic ! (concat ! ("Unable to load " , stringify ! (enumerate_physical_device_queue_family_performance_counters_by_region_arm)))
1762 }
1763 let val = _f(
1764 c"vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM",
1765 );
1766 if val.is_null() {
1767 enumerate_physical_device_queue_family_performance_counters_by_region_arm
1768 } else {
1769 :: core :: mem :: transmute :: < * const c_void , PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceCountersByRegionARM > (val)
1770 }
1771 },
1772 }
1773 }
1774 }
1775 }
1776 #[doc = "VK_ARM_format_pack"]
1777 pub mod format_pack {
1778 use super::super::*;
1779 pub use {
1780 crate::vk::ARM_FORMAT_PACK_EXTENSION_NAME as NAME,
1781 crate::vk::ARM_FORMAT_PACK_SPEC_VERSION as SPEC_VERSION,
1782 };
1783 }
1784}
1785#[doc = "Extensions tagged EXT"]
1786pub mod ext {
1787 #[doc = "VK_EXT_debug_report"]
1788 pub mod debug_report {
1789 use super::super::*;
1790 pub use {
1791 crate::vk::EXT_DEBUG_REPORT_EXTENSION_NAME as NAME,
1792 crate::vk::EXT_DEBUG_REPORT_SPEC_VERSION as SPEC_VERSION,
1793 };
1794 #[doc = "VK_EXT_debug_report instance-level functions"]
1795 #[derive(Clone)]
1796 pub struct Instance {
1797 pub(crate) fp: InstanceFn,
1798 pub(crate) handle: crate::vk::Instance,
1799 }
1800 impl Instance {
1801 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
1802 let handle = instance.handle();
1803 let fp = InstanceFn::load(|name| unsafe {
1804 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
1805 entry.get_instance_proc_addr(handle, name.as_ptr()),
1806 )
1807 });
1808 Self { handle, fp }
1809 }
1810 #[inline]
1811 pub fn fp(&self) -> &InstanceFn {
1812 &self.fp
1813 }
1814 #[inline]
1815 pub fn instance(&self) -> crate::vk::Instance {
1816 self.handle
1817 }
1818 }
1819 #[derive(Clone)]
1820 #[doc = "Raw VK_EXT_debug_report instance-level function pointers"]
1821 pub struct InstanceFn {
1822 pub create_debug_report_callback_ext: PFN_vkCreateDebugReportCallbackEXT,
1823 pub destroy_debug_report_callback_ext: PFN_vkDestroyDebugReportCallbackEXT,
1824 pub debug_report_message_ext: PFN_vkDebugReportMessageEXT,
1825 }
1826 unsafe impl Send for InstanceFn {}
1827 unsafe impl Sync for InstanceFn {}
1828 impl InstanceFn {
1829 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
1830 Self::load_erased(&mut f)
1831 }
1832 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
1833 Self {
1834 create_debug_report_callback_ext: unsafe {
1835 unsafe extern "system" fn create_debug_report_callback_ext(
1836 _instance: crate::vk::Instance,
1837 _p_create_info: *const DebugReportCallbackCreateInfoEXT<'_>,
1838 _p_allocator: *const AllocationCallbacks<'_>,
1839 _p_callback: *mut DebugReportCallbackEXT,
1840 ) -> Result {
1841 panic!(concat!(
1842 "Unable to load ",
1843 stringify!(create_debug_report_callback_ext)
1844 ))
1845 }
1846 let val = _f(c"vkCreateDebugReportCallbackEXT");
1847 if val.is_null() {
1848 create_debug_report_callback_ext
1849 } else {
1850 ::core::mem::transmute::<
1851 *const c_void,
1852 PFN_vkCreateDebugReportCallbackEXT,
1853 >(val)
1854 }
1855 },
1856 destroy_debug_report_callback_ext: unsafe {
1857 unsafe extern "system" fn destroy_debug_report_callback_ext(
1858 _instance: crate::vk::Instance,
1859 _callback: DebugReportCallbackEXT,
1860 _p_allocator: *const AllocationCallbacks<'_>,
1861 ) {
1862 panic!(concat!(
1863 "Unable to load ",
1864 stringify!(destroy_debug_report_callback_ext)
1865 ))
1866 }
1867 let val = _f(c"vkDestroyDebugReportCallbackEXT");
1868 if val.is_null() {
1869 destroy_debug_report_callback_ext
1870 } else {
1871 ::core::mem::transmute::<
1872 *const c_void,
1873 PFN_vkDestroyDebugReportCallbackEXT,
1874 >(val)
1875 }
1876 },
1877 debug_report_message_ext: unsafe {
1878 unsafe extern "system" fn debug_report_message_ext(
1879 _instance: crate::vk::Instance,
1880 _flags: DebugReportFlagsEXT,
1881 _object_type: DebugReportObjectTypeEXT,
1882 _object: u64,
1883 _location: usize,
1884 _message_code: i32,
1885 _p_layer_prefix: *const c_char,
1886 _p_message: *const c_char,
1887 ) {
1888 panic!(concat!(
1889 "Unable to load ",
1890 stringify!(debug_report_message_ext)
1891 ))
1892 }
1893 let val = _f(c"vkDebugReportMessageEXT");
1894 if val.is_null() {
1895 debug_report_message_ext
1896 } else {
1897 ::core::mem::transmute::<*const c_void, PFN_vkDebugReportMessageEXT>(
1898 val,
1899 )
1900 }
1901 },
1902 }
1903 }
1904 }
1905 }
1906 #[doc = "VK_EXT_depth_range_unrestricted"]
1907 pub mod depth_range_unrestricted {
1908 use super::super::*;
1909 pub use {
1910 crate::vk::EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME as NAME,
1911 crate::vk::EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION as SPEC_VERSION,
1912 };
1913 }
1914 #[doc = "VK_EXT_debug_marker"]
1915 pub mod debug_marker {
1916 use super::super::*;
1917 pub use {
1918 crate::vk::EXT_DEBUG_MARKER_EXTENSION_NAME as NAME,
1919 crate::vk::EXT_DEBUG_MARKER_SPEC_VERSION as SPEC_VERSION,
1920 };
1921 #[doc = "VK_EXT_debug_marker device-level functions"]
1922 #[derive(Clone)]
1923 pub struct Device {
1924 pub(crate) fp: DeviceFn,
1925 pub(crate) handle: crate::vk::Device,
1926 }
1927 impl Device {
1928 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
1929 let handle = device.handle();
1930 let fp = DeviceFn::load(|name| unsafe {
1931 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
1932 instance.get_device_proc_addr(handle, name.as_ptr()),
1933 )
1934 });
1935 Self { handle, fp }
1936 }
1937 #[inline]
1938 pub fn fp(&self) -> &DeviceFn {
1939 &self.fp
1940 }
1941 #[inline]
1942 pub fn device(&self) -> crate::vk::Device {
1943 self.handle
1944 }
1945 }
1946 #[derive(Clone)]
1947 #[doc = "Raw VK_EXT_debug_marker device-level function pointers"]
1948 pub struct DeviceFn {
1949 pub debug_marker_set_object_tag_ext: PFN_vkDebugMarkerSetObjectTagEXT,
1950 pub debug_marker_set_object_name_ext: PFN_vkDebugMarkerSetObjectNameEXT,
1951 pub cmd_debug_marker_begin_ext: PFN_vkCmdDebugMarkerBeginEXT,
1952 pub cmd_debug_marker_end_ext: PFN_vkCmdDebugMarkerEndEXT,
1953 pub cmd_debug_marker_insert_ext: PFN_vkCmdDebugMarkerInsertEXT,
1954 }
1955 unsafe impl Send for DeviceFn {}
1956 unsafe impl Sync for DeviceFn {}
1957 impl DeviceFn {
1958 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
1959 Self::load_erased(&mut f)
1960 }
1961 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
1962 Self {
1963 debug_marker_set_object_tag_ext: unsafe {
1964 unsafe extern "system" fn debug_marker_set_object_tag_ext(
1965 _device: crate::vk::Device,
1966 _p_tag_info: *const DebugMarkerObjectTagInfoEXT<'_>,
1967 ) -> Result {
1968 panic!(concat!(
1969 "Unable to load ",
1970 stringify!(debug_marker_set_object_tag_ext)
1971 ))
1972 }
1973 let val = _f(c"vkDebugMarkerSetObjectTagEXT");
1974 if val.is_null() {
1975 debug_marker_set_object_tag_ext
1976 } else {
1977 ::core::mem::transmute::<*const c_void, PFN_vkDebugMarkerSetObjectTagEXT>(
1978 val,
1979 )
1980 }
1981 },
1982 debug_marker_set_object_name_ext: unsafe {
1983 unsafe extern "system" fn debug_marker_set_object_name_ext(
1984 _device: crate::vk::Device,
1985 _p_name_info: *const DebugMarkerObjectNameInfoEXT<'_>,
1986 ) -> Result {
1987 panic!(concat!(
1988 "Unable to load ",
1989 stringify!(debug_marker_set_object_name_ext)
1990 ))
1991 }
1992 let val = _f(c"vkDebugMarkerSetObjectNameEXT");
1993 if val.is_null() {
1994 debug_marker_set_object_name_ext
1995 } else {
1996 ::core::mem::transmute::<*const c_void, PFN_vkDebugMarkerSetObjectNameEXT>(
1997 val,
1998 )
1999 }
2000 },
2001 cmd_debug_marker_begin_ext: unsafe {
2002 unsafe extern "system" fn cmd_debug_marker_begin_ext(
2003 _command_buffer: CommandBuffer,
2004 _p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>,
2005 ) {
2006 panic!(concat!(
2007 "Unable to load ",
2008 stringify!(cmd_debug_marker_begin_ext)
2009 ))
2010 }
2011 let val = _f(c"vkCmdDebugMarkerBeginEXT");
2012 if val.is_null() {
2013 cmd_debug_marker_begin_ext
2014 } else {
2015 ::core::mem::transmute::<*const c_void, PFN_vkCmdDebugMarkerBeginEXT>(
2016 val,
2017 )
2018 }
2019 },
2020 cmd_debug_marker_end_ext: unsafe {
2021 unsafe extern "system" fn cmd_debug_marker_end_ext(
2022 _command_buffer: CommandBuffer,
2023 ) {
2024 panic!(concat!(
2025 "Unable to load ",
2026 stringify!(cmd_debug_marker_end_ext)
2027 ))
2028 }
2029 let val = _f(c"vkCmdDebugMarkerEndEXT");
2030 if val.is_null() {
2031 cmd_debug_marker_end_ext
2032 } else {
2033 ::core::mem::transmute::<*const c_void, PFN_vkCmdDebugMarkerEndEXT>(val)
2034 }
2035 },
2036 cmd_debug_marker_insert_ext: unsafe {
2037 unsafe extern "system" fn cmd_debug_marker_insert_ext(
2038 _command_buffer: CommandBuffer,
2039 _p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>,
2040 ) {
2041 panic!(concat!(
2042 "Unable to load ",
2043 stringify!(cmd_debug_marker_insert_ext)
2044 ))
2045 }
2046 let val = _f(c"vkCmdDebugMarkerInsertEXT");
2047 if val.is_null() {
2048 cmd_debug_marker_insert_ext
2049 } else {
2050 ::core::mem::transmute::<*const c_void, PFN_vkCmdDebugMarkerInsertEXT>(
2051 val,
2052 )
2053 }
2054 },
2055 }
2056 }
2057 }
2058 }
2059 #[doc = "VK_EXT_transform_feedback"]
2060 pub mod transform_feedback {
2061 use super::super::*;
2062 pub use {
2063 crate::vk::EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME as NAME,
2064 crate::vk::EXT_TRANSFORM_FEEDBACK_SPEC_VERSION as SPEC_VERSION,
2065 };
2066 #[doc = "VK_EXT_transform_feedback device-level functions"]
2067 #[derive(Clone)]
2068 pub struct Device {
2069 pub(crate) fp: DeviceFn,
2070 pub(crate) handle: crate::vk::Device,
2071 }
2072 impl Device {
2073 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
2074 let handle = device.handle();
2075 let fp = DeviceFn::load(|name| unsafe {
2076 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2077 instance.get_device_proc_addr(handle, name.as_ptr()),
2078 )
2079 });
2080 Self { handle, fp }
2081 }
2082 #[inline]
2083 pub fn fp(&self) -> &DeviceFn {
2084 &self.fp
2085 }
2086 #[inline]
2087 pub fn device(&self) -> crate::vk::Device {
2088 self.handle
2089 }
2090 }
2091 #[derive(Clone)]
2092 #[doc = "Raw VK_EXT_transform_feedback device-level function pointers"]
2093 pub struct DeviceFn {
2094 pub cmd_bind_transform_feedback_buffers_ext: PFN_vkCmdBindTransformFeedbackBuffersEXT,
2095 pub cmd_begin_transform_feedback_ext: PFN_vkCmdBeginTransformFeedbackEXT,
2096 pub cmd_end_transform_feedback_ext: PFN_vkCmdEndTransformFeedbackEXT,
2097 pub cmd_begin_query_indexed_ext: PFN_vkCmdBeginQueryIndexedEXT,
2098 pub cmd_end_query_indexed_ext: PFN_vkCmdEndQueryIndexedEXT,
2099 pub cmd_draw_indirect_byte_count_ext: PFN_vkCmdDrawIndirectByteCountEXT,
2100 }
2101 unsafe impl Send for DeviceFn {}
2102 unsafe impl Sync for DeviceFn {}
2103 impl DeviceFn {
2104 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2105 Self::load_erased(&mut f)
2106 }
2107 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2108 Self {
2109 cmd_bind_transform_feedback_buffers_ext: unsafe {
2110 unsafe extern "system" fn cmd_bind_transform_feedback_buffers_ext(
2111 _command_buffer: CommandBuffer,
2112 _first_binding: u32,
2113 _binding_count: u32,
2114 _p_buffers: *const Buffer,
2115 _p_offsets: *const DeviceSize,
2116 _p_sizes: *const DeviceSize,
2117 ) {
2118 panic!(concat!(
2119 "Unable to load ",
2120 stringify!(cmd_bind_transform_feedback_buffers_ext)
2121 ))
2122 }
2123 let val = _f(c"vkCmdBindTransformFeedbackBuffersEXT");
2124 if val.is_null() {
2125 cmd_bind_transform_feedback_buffers_ext
2126 } else {
2127 ::core::mem::transmute::<
2128 *const c_void,
2129 PFN_vkCmdBindTransformFeedbackBuffersEXT,
2130 >(val)
2131 }
2132 },
2133 cmd_begin_transform_feedback_ext: unsafe {
2134 unsafe extern "system" fn cmd_begin_transform_feedback_ext(
2135 _command_buffer: CommandBuffer,
2136 _first_counter_buffer: u32,
2137 _counter_buffer_count: u32,
2138 _p_counter_buffers: *const Buffer,
2139 _p_counter_buffer_offsets: *const DeviceSize,
2140 ) {
2141 panic!(concat!(
2142 "Unable to load ",
2143 stringify!(cmd_begin_transform_feedback_ext)
2144 ))
2145 }
2146 let val = _f(c"vkCmdBeginTransformFeedbackEXT");
2147 if val.is_null() {
2148 cmd_begin_transform_feedback_ext
2149 } else {
2150 ::core::mem::transmute::<
2151 *const c_void,
2152 PFN_vkCmdBeginTransformFeedbackEXT,
2153 >(val)
2154 }
2155 },
2156 cmd_end_transform_feedback_ext: unsafe {
2157 unsafe extern "system" fn cmd_end_transform_feedback_ext(
2158 _command_buffer: CommandBuffer,
2159 _first_counter_buffer: u32,
2160 _counter_buffer_count: u32,
2161 _p_counter_buffers: *const Buffer,
2162 _p_counter_buffer_offsets: *const DeviceSize,
2163 ) {
2164 panic!(concat!(
2165 "Unable to load ",
2166 stringify!(cmd_end_transform_feedback_ext)
2167 ))
2168 }
2169 let val = _f(c"vkCmdEndTransformFeedbackEXT");
2170 if val.is_null() {
2171 cmd_end_transform_feedback_ext
2172 } else {
2173 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndTransformFeedbackEXT>(
2174 val,
2175 )
2176 }
2177 },
2178 cmd_begin_query_indexed_ext: unsafe {
2179 unsafe extern "system" fn cmd_begin_query_indexed_ext(
2180 _command_buffer: CommandBuffer,
2181 _query_pool: QueryPool,
2182 _query: u32,
2183 _flags: QueryControlFlags,
2184 _index: u32,
2185 ) {
2186 panic!(concat!(
2187 "Unable to load ",
2188 stringify!(cmd_begin_query_indexed_ext)
2189 ))
2190 }
2191 let val = _f(c"vkCmdBeginQueryIndexedEXT");
2192 if val.is_null() {
2193 cmd_begin_query_indexed_ext
2194 } else {
2195 ::core::mem::transmute::<*const c_void, PFN_vkCmdBeginQueryIndexedEXT>(
2196 val,
2197 )
2198 }
2199 },
2200 cmd_end_query_indexed_ext: unsafe {
2201 unsafe extern "system" fn cmd_end_query_indexed_ext(
2202 _command_buffer: CommandBuffer,
2203 _query_pool: QueryPool,
2204 _query: u32,
2205 _index: u32,
2206 ) {
2207 panic!(concat!(
2208 "Unable to load ",
2209 stringify!(cmd_end_query_indexed_ext)
2210 ))
2211 }
2212 let val = _f(c"vkCmdEndQueryIndexedEXT");
2213 if val.is_null() {
2214 cmd_end_query_indexed_ext
2215 } else {
2216 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndQueryIndexedEXT>(
2217 val,
2218 )
2219 }
2220 },
2221 cmd_draw_indirect_byte_count_ext: unsafe {
2222 unsafe extern "system" fn cmd_draw_indirect_byte_count_ext(
2223 _command_buffer: CommandBuffer,
2224 _instance_count: u32,
2225 _first_instance: u32,
2226 _counter_buffer: Buffer,
2227 _counter_buffer_offset: DeviceSize,
2228 _counter_offset: u32,
2229 _vertex_stride: u32,
2230 ) {
2231 panic!(concat!(
2232 "Unable to load ",
2233 stringify!(cmd_draw_indirect_byte_count_ext)
2234 ))
2235 }
2236 let val = _f(c"vkCmdDrawIndirectByteCountEXT");
2237 if val.is_null() {
2238 cmd_draw_indirect_byte_count_ext
2239 } else {
2240 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawIndirectByteCountEXT>(
2241 val,
2242 )
2243 }
2244 },
2245 }
2246 }
2247 }
2248 }
2249 #[doc = "VK_EXT_validation_flags"]
2250 pub mod validation_flags {
2251 use super::super::*;
2252 pub use {
2253 crate::vk::EXT_VALIDATION_FLAGS_EXTENSION_NAME as NAME,
2254 crate::vk::EXT_VALIDATION_FLAGS_SPEC_VERSION as SPEC_VERSION,
2255 };
2256 }
2257 #[doc = "VK_EXT_shader_subgroup_ballot"]
2258 pub mod shader_subgroup_ballot {
2259 use super::super::*;
2260 pub use {
2261 crate::vk::EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME as NAME,
2262 crate::vk::EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION as SPEC_VERSION,
2263 };
2264 }
2265 #[doc = "VK_EXT_shader_subgroup_vote"]
2266 pub mod shader_subgroup_vote {
2267 use super::super::*;
2268 pub use {
2269 crate::vk::EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME as NAME,
2270 crate::vk::EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION as SPEC_VERSION,
2271 };
2272 }
2273 #[doc = "VK_EXT_texture_compression_astc_hdr"]
2274 pub mod texture_compression_astc_hdr {
2275 use super::super::*;
2276 pub use {
2277 crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME as NAME,
2278 crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION as SPEC_VERSION,
2279 };
2280 }
2281 #[doc = "VK_EXT_astc_decode_mode"]
2282 pub mod astc_decode_mode {
2283 use super::super::*;
2284 pub use {
2285 crate::vk::EXT_ASTC_DECODE_MODE_EXTENSION_NAME as NAME,
2286 crate::vk::EXT_ASTC_DECODE_MODE_SPEC_VERSION as SPEC_VERSION,
2287 };
2288 }
2289 #[doc = "VK_EXT_pipeline_robustness"]
2290 pub mod pipeline_robustness {
2291 use super::super::*;
2292 pub use {
2293 crate::vk::EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME as NAME,
2294 crate::vk::EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION as SPEC_VERSION,
2295 };
2296 }
2297 #[doc = "VK_EXT_conditional_rendering"]
2298 pub mod conditional_rendering {
2299 use super::super::*;
2300 pub use {
2301 crate::vk::EXT_CONDITIONAL_RENDERING_EXTENSION_NAME as NAME,
2302 crate::vk::EXT_CONDITIONAL_RENDERING_SPEC_VERSION as SPEC_VERSION,
2303 };
2304 #[doc = "VK_EXT_conditional_rendering device-level functions"]
2305 #[derive(Clone)]
2306 pub struct Device {
2307 pub(crate) fp: DeviceFn,
2308 pub(crate) handle: crate::vk::Device,
2309 }
2310 impl Device {
2311 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
2312 let handle = device.handle();
2313 let fp = DeviceFn::load(|name| unsafe {
2314 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2315 instance.get_device_proc_addr(handle, name.as_ptr()),
2316 )
2317 });
2318 Self { handle, fp }
2319 }
2320 #[inline]
2321 pub fn fp(&self) -> &DeviceFn {
2322 &self.fp
2323 }
2324 #[inline]
2325 pub fn device(&self) -> crate::vk::Device {
2326 self.handle
2327 }
2328 }
2329 #[derive(Clone)]
2330 #[doc = "Raw VK_EXT_conditional_rendering device-level function pointers"]
2331 pub struct DeviceFn {
2332 pub cmd_begin_conditional_rendering_ext: PFN_vkCmdBeginConditionalRenderingEXT,
2333 pub cmd_end_conditional_rendering_ext: PFN_vkCmdEndConditionalRenderingEXT,
2334 }
2335 unsafe impl Send for DeviceFn {}
2336 unsafe impl Sync for DeviceFn {}
2337 impl DeviceFn {
2338 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2339 Self::load_erased(&mut f)
2340 }
2341 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2342 Self {
2343 cmd_begin_conditional_rendering_ext: unsafe {
2344 unsafe extern "system" fn cmd_begin_conditional_rendering_ext(
2345 _command_buffer: CommandBuffer,
2346 _p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT<
2347 '_,
2348 >,
2349 ) {
2350 panic!(concat!(
2351 "Unable to load ",
2352 stringify!(cmd_begin_conditional_rendering_ext)
2353 ))
2354 }
2355 let val = _f(c"vkCmdBeginConditionalRenderingEXT");
2356 if val.is_null() {
2357 cmd_begin_conditional_rendering_ext
2358 } else {
2359 ::core::mem::transmute::<
2360 *const c_void,
2361 PFN_vkCmdBeginConditionalRenderingEXT,
2362 >(val)
2363 }
2364 },
2365 cmd_end_conditional_rendering_ext: unsafe {
2366 unsafe extern "system" fn cmd_end_conditional_rendering_ext(
2367 _command_buffer: CommandBuffer,
2368 ) {
2369 panic!(concat!(
2370 "Unable to load ",
2371 stringify!(cmd_end_conditional_rendering_ext)
2372 ))
2373 }
2374 let val = _f(c"vkCmdEndConditionalRenderingEXT");
2375 if val.is_null() {
2376 cmd_end_conditional_rendering_ext
2377 } else {
2378 ::core::mem::transmute::<
2379 *const c_void,
2380 PFN_vkCmdEndConditionalRenderingEXT,
2381 >(val)
2382 }
2383 },
2384 }
2385 }
2386 }
2387 }
2388 #[doc = "VK_EXT_direct_mode_display"]
2389 pub mod direct_mode_display {
2390 use super::super::*;
2391 pub use {
2392 crate::vk::EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME as NAME,
2393 crate::vk::EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION as SPEC_VERSION,
2394 };
2395 #[doc = "VK_EXT_direct_mode_display instance-level functions"]
2396 #[derive(Clone)]
2397 pub struct Instance {
2398 pub(crate) fp: InstanceFn,
2399 pub(crate) handle: crate::vk::Instance,
2400 }
2401 impl Instance {
2402 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
2403 let handle = instance.handle();
2404 let fp = InstanceFn::load(|name| unsafe {
2405 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2406 entry.get_instance_proc_addr(handle, name.as_ptr()),
2407 )
2408 });
2409 Self { handle, fp }
2410 }
2411 #[inline]
2412 pub fn fp(&self) -> &InstanceFn {
2413 &self.fp
2414 }
2415 #[inline]
2416 pub fn instance(&self) -> crate::vk::Instance {
2417 self.handle
2418 }
2419 }
2420 #[derive(Clone)]
2421 #[doc = "Raw VK_EXT_direct_mode_display instance-level function pointers"]
2422 pub struct InstanceFn {
2423 pub release_display_ext: PFN_vkReleaseDisplayEXT,
2424 }
2425 unsafe impl Send for InstanceFn {}
2426 unsafe impl Sync for InstanceFn {}
2427 impl InstanceFn {
2428 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2429 Self::load_erased(&mut f)
2430 }
2431 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2432 Self {
2433 release_display_ext: unsafe {
2434 unsafe extern "system" fn release_display_ext(
2435 _physical_device: PhysicalDevice,
2436 _display: DisplayKHR,
2437 ) -> Result {
2438 panic!(concat!("Unable to load ", stringify!(release_display_ext)))
2439 }
2440 let val = _f(c"vkReleaseDisplayEXT");
2441 if val.is_null() {
2442 release_display_ext
2443 } else {
2444 ::core::mem::transmute::<*const c_void, PFN_vkReleaseDisplayEXT>(val)
2445 }
2446 },
2447 }
2448 }
2449 }
2450 }
2451 #[doc = "VK_EXT_acquire_xlib_display"]
2452 pub mod acquire_xlib_display {
2453 use super::super::*;
2454 pub use {
2455 crate::vk::EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME as NAME,
2456 crate::vk::EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION as SPEC_VERSION,
2457 };
2458 #[doc = "VK_EXT_acquire_xlib_display instance-level functions"]
2459 #[derive(Clone)]
2460 pub struct Instance {
2461 pub(crate) fp: InstanceFn,
2462 pub(crate) handle: crate::vk::Instance,
2463 }
2464 impl Instance {
2465 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
2466 let handle = instance.handle();
2467 let fp = InstanceFn::load(|name| unsafe {
2468 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2469 entry.get_instance_proc_addr(handle, name.as_ptr()),
2470 )
2471 });
2472 Self { handle, fp }
2473 }
2474 #[inline]
2475 pub fn fp(&self) -> &InstanceFn {
2476 &self.fp
2477 }
2478 #[inline]
2479 pub fn instance(&self) -> crate::vk::Instance {
2480 self.handle
2481 }
2482 }
2483 #[derive(Clone)]
2484 #[doc = "Raw VK_EXT_acquire_xlib_display instance-level function pointers"]
2485 pub struct InstanceFn {
2486 pub acquire_xlib_display_ext: PFN_vkAcquireXlibDisplayEXT,
2487 pub get_rand_r_output_display_ext: PFN_vkGetRandROutputDisplayEXT,
2488 }
2489 unsafe impl Send for InstanceFn {}
2490 unsafe impl Sync for InstanceFn {}
2491 impl InstanceFn {
2492 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2493 Self::load_erased(&mut f)
2494 }
2495 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2496 Self {
2497 acquire_xlib_display_ext: unsafe {
2498 unsafe extern "system" fn acquire_xlib_display_ext(
2499 _physical_device: PhysicalDevice,
2500 _dpy: *mut Display,
2501 _display: DisplayKHR,
2502 ) -> Result {
2503 panic!(concat!(
2504 "Unable to load ",
2505 stringify!(acquire_xlib_display_ext)
2506 ))
2507 }
2508 let val = _f(c"vkAcquireXlibDisplayEXT");
2509 if val.is_null() {
2510 acquire_xlib_display_ext
2511 } else {
2512 ::core::mem::transmute::<*const c_void, PFN_vkAcquireXlibDisplayEXT>(
2513 val,
2514 )
2515 }
2516 },
2517 get_rand_r_output_display_ext: unsafe {
2518 unsafe extern "system" fn get_rand_r_output_display_ext(
2519 _physical_device: PhysicalDevice,
2520 _dpy: *mut Display,
2521 _rr_output: RROutput,
2522 _p_display: *mut DisplayKHR,
2523 ) -> Result {
2524 panic!(concat!(
2525 "Unable to load ",
2526 stringify!(get_rand_r_output_display_ext)
2527 ))
2528 }
2529 let val = _f(c"vkGetRandROutputDisplayEXT");
2530 if val.is_null() {
2531 get_rand_r_output_display_ext
2532 } else {
2533 ::core::mem::transmute::<*const c_void, PFN_vkGetRandROutputDisplayEXT>(
2534 val,
2535 )
2536 }
2537 },
2538 }
2539 }
2540 }
2541 }
2542 #[doc = "VK_EXT_display_surface_counter"]
2543 pub mod display_surface_counter {
2544 use super::super::*;
2545 pub use {
2546 crate::vk::EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME as NAME,
2547 crate::vk::EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION as SPEC_VERSION,
2548 };
2549 #[doc = "VK_EXT_display_surface_counter instance-level functions"]
2550 #[derive(Clone)]
2551 pub struct Instance {
2552 pub(crate) fp: InstanceFn,
2553 pub(crate) handle: crate::vk::Instance,
2554 }
2555 impl Instance {
2556 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
2557 let handle = instance.handle();
2558 let fp = InstanceFn::load(|name| unsafe {
2559 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2560 entry.get_instance_proc_addr(handle, name.as_ptr()),
2561 )
2562 });
2563 Self { handle, fp }
2564 }
2565 #[inline]
2566 pub fn fp(&self) -> &InstanceFn {
2567 &self.fp
2568 }
2569 #[inline]
2570 pub fn instance(&self) -> crate::vk::Instance {
2571 self.handle
2572 }
2573 }
2574 #[derive(Clone)]
2575 #[doc = "Raw VK_EXT_display_surface_counter instance-level function pointers"]
2576 pub struct InstanceFn {
2577 pub get_physical_device_surface_capabilities2_ext:
2578 PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT,
2579 }
2580 unsafe impl Send for InstanceFn {}
2581 unsafe impl Sync for InstanceFn {}
2582 impl InstanceFn {
2583 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2584 Self::load_erased(&mut f)
2585 }
2586 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2587 Self {
2588 get_physical_device_surface_capabilities2_ext: unsafe {
2589 unsafe extern "system" fn get_physical_device_surface_capabilities2_ext(
2590 _physical_device: PhysicalDevice,
2591 _surface: SurfaceKHR,
2592 _p_surface_capabilities: *mut SurfaceCapabilities2EXT<'_>,
2593 ) -> Result {
2594 panic!(concat!(
2595 "Unable to load ",
2596 stringify!(get_physical_device_surface_capabilities2_ext)
2597 ))
2598 }
2599 let val = _f(c"vkGetPhysicalDeviceSurfaceCapabilities2EXT");
2600 if val.is_null() {
2601 get_physical_device_surface_capabilities2_ext
2602 } else {
2603 ::core::mem::transmute::<
2604 *const c_void,
2605 PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT,
2606 >(val)
2607 }
2608 },
2609 }
2610 }
2611 }
2612 }
2613 #[doc = "VK_EXT_display_control"]
2614 pub mod display_control {
2615 use super::super::*;
2616 pub use {
2617 crate::vk::EXT_DISPLAY_CONTROL_EXTENSION_NAME as NAME,
2618 crate::vk::EXT_DISPLAY_CONTROL_SPEC_VERSION as SPEC_VERSION,
2619 };
2620 #[doc = "VK_EXT_display_control device-level functions"]
2621 #[derive(Clone)]
2622 pub struct Device {
2623 pub(crate) fp: DeviceFn,
2624 pub(crate) handle: crate::vk::Device,
2625 }
2626 impl Device {
2627 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
2628 let handle = device.handle();
2629 let fp = DeviceFn::load(|name| unsafe {
2630 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2631 instance.get_device_proc_addr(handle, name.as_ptr()),
2632 )
2633 });
2634 Self { handle, fp }
2635 }
2636 #[inline]
2637 pub fn fp(&self) -> &DeviceFn {
2638 &self.fp
2639 }
2640 #[inline]
2641 pub fn device(&self) -> crate::vk::Device {
2642 self.handle
2643 }
2644 }
2645 #[derive(Clone)]
2646 #[doc = "Raw VK_EXT_display_control device-level function pointers"]
2647 pub struct DeviceFn {
2648 pub display_power_control_ext: PFN_vkDisplayPowerControlEXT,
2649 pub register_device_event_ext: PFN_vkRegisterDeviceEventEXT,
2650 pub register_display_event_ext: PFN_vkRegisterDisplayEventEXT,
2651 pub get_swapchain_counter_ext: PFN_vkGetSwapchainCounterEXT,
2652 }
2653 unsafe impl Send for DeviceFn {}
2654 unsafe impl Sync for DeviceFn {}
2655 impl DeviceFn {
2656 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2657 Self::load_erased(&mut f)
2658 }
2659 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2660 Self {
2661 display_power_control_ext: unsafe {
2662 unsafe extern "system" fn display_power_control_ext(
2663 _device: crate::vk::Device,
2664 _display: DisplayKHR,
2665 _p_display_power_info: *const DisplayPowerInfoEXT<'_>,
2666 ) -> Result {
2667 panic!(concat!(
2668 "Unable to load ",
2669 stringify!(display_power_control_ext)
2670 ))
2671 }
2672 let val = _f(c"vkDisplayPowerControlEXT");
2673 if val.is_null() {
2674 display_power_control_ext
2675 } else {
2676 ::core::mem::transmute::<*const c_void, PFN_vkDisplayPowerControlEXT>(
2677 val,
2678 )
2679 }
2680 },
2681 register_device_event_ext: unsafe {
2682 unsafe extern "system" fn register_device_event_ext(
2683 _device: crate::vk::Device,
2684 _p_device_event_info: *const DeviceEventInfoEXT<'_>,
2685 _p_allocator: *const AllocationCallbacks<'_>,
2686 _p_fence: *mut Fence,
2687 ) -> Result {
2688 panic!(concat!(
2689 "Unable to load ",
2690 stringify!(register_device_event_ext)
2691 ))
2692 }
2693 let val = _f(c"vkRegisterDeviceEventEXT");
2694 if val.is_null() {
2695 register_device_event_ext
2696 } else {
2697 ::core::mem::transmute::<*const c_void, PFN_vkRegisterDeviceEventEXT>(
2698 val,
2699 )
2700 }
2701 },
2702 register_display_event_ext: unsafe {
2703 unsafe extern "system" fn register_display_event_ext(
2704 _device: crate::vk::Device,
2705 _display: DisplayKHR,
2706 _p_display_event_info: *const DisplayEventInfoEXT<'_>,
2707 _p_allocator: *const AllocationCallbacks<'_>,
2708 _p_fence: *mut Fence,
2709 ) -> Result {
2710 panic!(concat!(
2711 "Unable to load ",
2712 stringify!(register_display_event_ext)
2713 ))
2714 }
2715 let val = _f(c"vkRegisterDisplayEventEXT");
2716 if val.is_null() {
2717 register_display_event_ext
2718 } else {
2719 ::core::mem::transmute::<*const c_void, PFN_vkRegisterDisplayEventEXT>(
2720 val,
2721 )
2722 }
2723 },
2724 get_swapchain_counter_ext: unsafe {
2725 unsafe extern "system" fn get_swapchain_counter_ext(
2726 _device: crate::vk::Device,
2727 _swapchain: SwapchainKHR,
2728 _counter: SurfaceCounterFlagsEXT,
2729 _p_counter_value: *mut u64,
2730 ) -> Result {
2731 panic!(concat!(
2732 "Unable to load ",
2733 stringify!(get_swapchain_counter_ext)
2734 ))
2735 }
2736 let val = _f(c"vkGetSwapchainCounterEXT");
2737 if val.is_null() {
2738 get_swapchain_counter_ext
2739 } else {
2740 ::core::mem::transmute::<*const c_void, PFN_vkGetSwapchainCounterEXT>(
2741 val,
2742 )
2743 }
2744 },
2745 }
2746 }
2747 }
2748 }
2749 #[doc = "VK_EXT_discard_rectangles"]
2750 pub mod discard_rectangles {
2751 use super::super::*;
2752 pub use {
2753 crate::vk::EXT_DISCARD_RECTANGLES_EXTENSION_NAME as NAME,
2754 crate::vk::EXT_DISCARD_RECTANGLES_SPEC_VERSION as SPEC_VERSION,
2755 };
2756 #[doc = "VK_EXT_discard_rectangles device-level functions"]
2757 #[derive(Clone)]
2758 pub struct Device {
2759 pub(crate) fp: DeviceFn,
2760 pub(crate) handle: crate::vk::Device,
2761 }
2762 impl Device {
2763 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
2764 let handle = device.handle();
2765 let fp = DeviceFn::load(|name| unsafe {
2766 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2767 instance.get_device_proc_addr(handle, name.as_ptr()),
2768 )
2769 });
2770 Self { handle, fp }
2771 }
2772 #[inline]
2773 pub fn fp(&self) -> &DeviceFn {
2774 &self.fp
2775 }
2776 #[inline]
2777 pub fn device(&self) -> crate::vk::Device {
2778 self.handle
2779 }
2780 }
2781 #[derive(Clone)]
2782 #[doc = "Raw VK_EXT_discard_rectangles device-level function pointers"]
2783 pub struct DeviceFn {
2784 pub cmd_set_discard_rectangle_ext: PFN_vkCmdSetDiscardRectangleEXT,
2785 pub cmd_set_discard_rectangle_enable_ext: PFN_vkCmdSetDiscardRectangleEnableEXT,
2786 pub cmd_set_discard_rectangle_mode_ext: PFN_vkCmdSetDiscardRectangleModeEXT,
2787 }
2788 unsafe impl Send for DeviceFn {}
2789 unsafe impl Sync for DeviceFn {}
2790 impl DeviceFn {
2791 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2792 Self::load_erased(&mut f)
2793 }
2794 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2795 Self {
2796 cmd_set_discard_rectangle_ext: unsafe {
2797 unsafe extern "system" fn cmd_set_discard_rectangle_ext(
2798 _command_buffer: CommandBuffer,
2799 _first_discard_rectangle: u32,
2800 _discard_rectangle_count: u32,
2801 _p_discard_rectangles: *const Rect2D,
2802 ) {
2803 panic!(concat!(
2804 "Unable to load ",
2805 stringify!(cmd_set_discard_rectangle_ext)
2806 ))
2807 }
2808 let val = _f(c"vkCmdSetDiscardRectangleEXT");
2809 if val.is_null() {
2810 cmd_set_discard_rectangle_ext
2811 } else {
2812 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDiscardRectangleEXT>(
2813 val,
2814 )
2815 }
2816 },
2817 cmd_set_discard_rectangle_enable_ext: unsafe {
2818 unsafe extern "system" fn cmd_set_discard_rectangle_enable_ext(
2819 _command_buffer: CommandBuffer,
2820 _discard_rectangle_enable: Bool32,
2821 ) {
2822 panic!(concat!(
2823 "Unable to load ",
2824 stringify!(cmd_set_discard_rectangle_enable_ext)
2825 ))
2826 }
2827 let val = _f(c"vkCmdSetDiscardRectangleEnableEXT");
2828 if val.is_null() {
2829 cmd_set_discard_rectangle_enable_ext
2830 } else {
2831 ::core::mem::transmute::<
2832 *const c_void,
2833 PFN_vkCmdSetDiscardRectangleEnableEXT,
2834 >(val)
2835 }
2836 },
2837 cmd_set_discard_rectangle_mode_ext: unsafe {
2838 unsafe extern "system" fn cmd_set_discard_rectangle_mode_ext(
2839 _command_buffer: CommandBuffer,
2840 _discard_rectangle_mode: DiscardRectangleModeEXT,
2841 ) {
2842 panic!(concat!(
2843 "Unable to load ",
2844 stringify!(cmd_set_discard_rectangle_mode_ext)
2845 ))
2846 }
2847 let val = _f(c"vkCmdSetDiscardRectangleModeEXT");
2848 if val.is_null() {
2849 cmd_set_discard_rectangle_mode_ext
2850 } else {
2851 ::core::mem::transmute::<
2852 *const c_void,
2853 PFN_vkCmdSetDiscardRectangleModeEXT,
2854 >(val)
2855 }
2856 },
2857 }
2858 }
2859 }
2860 }
2861 #[doc = "VK_EXT_conservative_rasterization"]
2862 pub mod conservative_rasterization {
2863 use super::super::*;
2864 pub use {
2865 crate::vk::EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME as NAME,
2866 crate::vk::EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION,
2867 };
2868 }
2869 #[doc = "VK_EXT_depth_clip_enable"]
2870 pub mod depth_clip_enable {
2871 use super::super::*;
2872 pub use {
2873 crate::vk::EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME as NAME,
2874 crate::vk::EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION as SPEC_VERSION,
2875 };
2876 }
2877 #[doc = "VK_EXT_swapchain_colorspace"]
2878 pub mod swapchain_colorspace {
2879 use super::super::*;
2880 pub use {
2881 crate::vk::EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME as NAME,
2882 crate::vk::EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION as SPEC_VERSION,
2883 };
2884 }
2885 #[doc = "VK_EXT_hdr_metadata"]
2886 pub mod hdr_metadata {
2887 use super::super::*;
2888 pub use {
2889 crate::vk::EXT_HDR_METADATA_EXTENSION_NAME as NAME,
2890 crate::vk::EXT_HDR_METADATA_SPEC_VERSION as SPEC_VERSION,
2891 };
2892 #[doc = "VK_EXT_hdr_metadata device-level functions"]
2893 #[derive(Clone)]
2894 pub struct Device {
2895 pub(crate) fp: DeviceFn,
2896 pub(crate) handle: crate::vk::Device,
2897 }
2898 impl Device {
2899 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
2900 let handle = device.handle();
2901 let fp = DeviceFn::load(|name| unsafe {
2902 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2903 instance.get_device_proc_addr(handle, name.as_ptr()),
2904 )
2905 });
2906 Self { handle, fp }
2907 }
2908 #[inline]
2909 pub fn fp(&self) -> &DeviceFn {
2910 &self.fp
2911 }
2912 #[inline]
2913 pub fn device(&self) -> crate::vk::Device {
2914 self.handle
2915 }
2916 }
2917 #[derive(Clone)]
2918 #[doc = "Raw VK_EXT_hdr_metadata device-level function pointers"]
2919 pub struct DeviceFn {
2920 pub set_hdr_metadata_ext: PFN_vkSetHdrMetadataEXT,
2921 }
2922 unsafe impl Send for DeviceFn {}
2923 unsafe impl Sync for DeviceFn {}
2924 impl DeviceFn {
2925 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
2926 Self::load_erased(&mut f)
2927 }
2928 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
2929 Self {
2930 set_hdr_metadata_ext: unsafe {
2931 unsafe extern "system" fn set_hdr_metadata_ext(
2932 _device: crate::vk::Device,
2933 _swapchain_count: u32,
2934 _p_swapchains: *const SwapchainKHR,
2935 _p_metadata: *const HdrMetadataEXT<'_>,
2936 ) {
2937 panic!(concat!("Unable to load ", stringify!(set_hdr_metadata_ext)))
2938 }
2939 let val = _f(c"vkSetHdrMetadataEXT");
2940 if val.is_null() {
2941 set_hdr_metadata_ext
2942 } else {
2943 ::core::mem::transmute::<*const c_void, PFN_vkSetHdrMetadataEXT>(val)
2944 }
2945 },
2946 }
2947 }
2948 }
2949 }
2950 #[doc = "VK_EXT_external_memory_dma_buf"]
2951 pub mod external_memory_dma_buf {
2952 use super::super::*;
2953 pub use {
2954 crate::vk::EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME as NAME,
2955 crate::vk::EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION as SPEC_VERSION,
2956 };
2957 }
2958 #[doc = "VK_EXT_queue_family_foreign"]
2959 pub mod queue_family_foreign {
2960 use super::super::*;
2961 pub use {
2962 crate::vk::EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME as NAME,
2963 crate::vk::EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION as SPEC_VERSION,
2964 };
2965 }
2966 #[doc = "VK_EXT_debug_utils"]
2967 pub mod debug_utils {
2968 use super::super::*;
2969 pub use {
2970 crate::vk::EXT_DEBUG_UTILS_EXTENSION_NAME as NAME,
2971 crate::vk::EXT_DEBUG_UTILS_SPEC_VERSION as SPEC_VERSION,
2972 };
2973 #[doc = "VK_EXT_debug_utils instance-level functions"]
2974 #[derive(Clone)]
2975 pub struct Instance {
2976 pub(crate) fp: InstanceFn,
2977 pub(crate) handle: crate::vk::Instance,
2978 }
2979 impl Instance {
2980 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
2981 let handle = instance.handle();
2982 let fp = InstanceFn::load(|name| unsafe {
2983 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
2984 entry.get_instance_proc_addr(handle, name.as_ptr()),
2985 )
2986 });
2987 Self { handle, fp }
2988 }
2989 #[inline]
2990 pub fn fp(&self) -> &InstanceFn {
2991 &self.fp
2992 }
2993 #[inline]
2994 pub fn instance(&self) -> crate::vk::Instance {
2995 self.handle
2996 }
2997 }
2998 #[derive(Clone)]
2999 #[doc = "Raw VK_EXT_debug_utils instance-level function pointers"]
3000 pub struct InstanceFn {
3001 pub create_debug_utils_messenger_ext: PFN_vkCreateDebugUtilsMessengerEXT,
3002 pub destroy_debug_utils_messenger_ext: PFN_vkDestroyDebugUtilsMessengerEXT,
3003 pub submit_debug_utils_message_ext: PFN_vkSubmitDebugUtilsMessageEXT,
3004 }
3005 unsafe impl Send for InstanceFn {}
3006 unsafe impl Sync for InstanceFn {}
3007 impl InstanceFn {
3008 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3009 Self::load_erased(&mut f)
3010 }
3011 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3012 Self {
3013 create_debug_utils_messenger_ext: unsafe {
3014 unsafe extern "system" fn create_debug_utils_messenger_ext(
3015 _instance: crate::vk::Instance,
3016 _p_create_info: *const DebugUtilsMessengerCreateInfoEXT<'_>,
3017 _p_allocator: *const AllocationCallbacks<'_>,
3018 _p_messenger: *mut DebugUtilsMessengerEXT,
3019 ) -> Result {
3020 panic!(concat!(
3021 "Unable to load ",
3022 stringify!(create_debug_utils_messenger_ext)
3023 ))
3024 }
3025 let val = _f(c"vkCreateDebugUtilsMessengerEXT");
3026 if val.is_null() {
3027 create_debug_utils_messenger_ext
3028 } else {
3029 ::core::mem::transmute::<
3030 *const c_void,
3031 PFN_vkCreateDebugUtilsMessengerEXT,
3032 >(val)
3033 }
3034 },
3035 destroy_debug_utils_messenger_ext: unsafe {
3036 unsafe extern "system" fn destroy_debug_utils_messenger_ext(
3037 _instance: crate::vk::Instance,
3038 _messenger: DebugUtilsMessengerEXT,
3039 _p_allocator: *const AllocationCallbacks<'_>,
3040 ) {
3041 panic!(concat!(
3042 "Unable to load ",
3043 stringify!(destroy_debug_utils_messenger_ext)
3044 ))
3045 }
3046 let val = _f(c"vkDestroyDebugUtilsMessengerEXT");
3047 if val.is_null() {
3048 destroy_debug_utils_messenger_ext
3049 } else {
3050 ::core::mem::transmute::<
3051 *const c_void,
3052 PFN_vkDestroyDebugUtilsMessengerEXT,
3053 >(val)
3054 }
3055 },
3056 submit_debug_utils_message_ext: unsafe {
3057 unsafe extern "system" fn submit_debug_utils_message_ext(
3058 _instance: crate::vk::Instance,
3059 _message_severity: DebugUtilsMessageSeverityFlagsEXT,
3060 _message_types: DebugUtilsMessageTypeFlagsEXT,
3061 _p_callback_data: *const DebugUtilsMessengerCallbackDataEXT<'_>,
3062 ) {
3063 panic!(concat!(
3064 "Unable to load ",
3065 stringify!(submit_debug_utils_message_ext)
3066 ))
3067 }
3068 let val = _f(c"vkSubmitDebugUtilsMessageEXT");
3069 if val.is_null() {
3070 submit_debug_utils_message_ext
3071 } else {
3072 ::core::mem::transmute::<*const c_void, PFN_vkSubmitDebugUtilsMessageEXT>(
3073 val,
3074 )
3075 }
3076 },
3077 }
3078 }
3079 }
3080 #[doc = "VK_EXT_debug_utils device-level functions"]
3081 #[derive(Clone)]
3082 pub struct Device {
3083 pub(crate) fp: DeviceFn,
3084 pub(crate) handle: crate::vk::Device,
3085 }
3086 impl Device {
3087 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
3088 let handle = device.handle();
3089 let fp = DeviceFn::load(|name| unsafe {
3090 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
3091 instance.get_device_proc_addr(handle, name.as_ptr()),
3092 )
3093 });
3094 Self { handle, fp }
3095 }
3096 #[inline]
3097 pub fn fp(&self) -> &DeviceFn {
3098 &self.fp
3099 }
3100 #[inline]
3101 pub fn device(&self) -> crate::vk::Device {
3102 self.handle
3103 }
3104 }
3105 #[derive(Clone)]
3106 #[doc = "Raw VK_EXT_debug_utils device-level function pointers"]
3107 pub struct DeviceFn {
3108 pub set_debug_utils_object_name_ext: PFN_vkSetDebugUtilsObjectNameEXT,
3109 pub set_debug_utils_object_tag_ext: PFN_vkSetDebugUtilsObjectTagEXT,
3110 pub queue_begin_debug_utils_label_ext: PFN_vkQueueBeginDebugUtilsLabelEXT,
3111 pub queue_end_debug_utils_label_ext: PFN_vkQueueEndDebugUtilsLabelEXT,
3112 pub queue_insert_debug_utils_label_ext: PFN_vkQueueInsertDebugUtilsLabelEXT,
3113 pub cmd_begin_debug_utils_label_ext: PFN_vkCmdBeginDebugUtilsLabelEXT,
3114 pub cmd_end_debug_utils_label_ext: PFN_vkCmdEndDebugUtilsLabelEXT,
3115 pub cmd_insert_debug_utils_label_ext: PFN_vkCmdInsertDebugUtilsLabelEXT,
3116 }
3117 unsafe impl Send for DeviceFn {}
3118 unsafe impl Sync for DeviceFn {}
3119 impl DeviceFn {
3120 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3121 Self::load_erased(&mut f)
3122 }
3123 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3124 Self {
3125 set_debug_utils_object_name_ext: unsafe {
3126 unsafe extern "system" fn set_debug_utils_object_name_ext(
3127 _device: crate::vk::Device,
3128 _p_name_info: *const DebugUtilsObjectNameInfoEXT<'_>,
3129 ) -> Result {
3130 panic!(concat!(
3131 "Unable to load ",
3132 stringify!(set_debug_utils_object_name_ext)
3133 ))
3134 }
3135 let val = _f(c"vkSetDebugUtilsObjectNameEXT");
3136 if val.is_null() {
3137 set_debug_utils_object_name_ext
3138 } else {
3139 ::core::mem::transmute::<*const c_void, PFN_vkSetDebugUtilsObjectNameEXT>(
3140 val,
3141 )
3142 }
3143 },
3144 set_debug_utils_object_tag_ext: unsafe {
3145 unsafe extern "system" fn set_debug_utils_object_tag_ext(
3146 _device: crate::vk::Device,
3147 _p_tag_info: *const DebugUtilsObjectTagInfoEXT<'_>,
3148 ) -> Result {
3149 panic!(concat!(
3150 "Unable to load ",
3151 stringify!(set_debug_utils_object_tag_ext)
3152 ))
3153 }
3154 let val = _f(c"vkSetDebugUtilsObjectTagEXT");
3155 if val.is_null() {
3156 set_debug_utils_object_tag_ext
3157 } else {
3158 ::core::mem::transmute::<*const c_void, PFN_vkSetDebugUtilsObjectTagEXT>(
3159 val,
3160 )
3161 }
3162 },
3163 queue_begin_debug_utils_label_ext: unsafe {
3164 unsafe extern "system" fn queue_begin_debug_utils_label_ext(
3165 _queue: Queue,
3166 _p_label_info: *const DebugUtilsLabelEXT<'_>,
3167 ) {
3168 panic!(concat!(
3169 "Unable to load ",
3170 stringify!(queue_begin_debug_utils_label_ext)
3171 ))
3172 }
3173 let val = _f(c"vkQueueBeginDebugUtilsLabelEXT");
3174 if val.is_null() {
3175 queue_begin_debug_utils_label_ext
3176 } else {
3177 ::core::mem::transmute::<
3178 *const c_void,
3179 PFN_vkQueueBeginDebugUtilsLabelEXT,
3180 >(val)
3181 }
3182 },
3183 queue_end_debug_utils_label_ext: unsafe {
3184 unsafe extern "system" fn queue_end_debug_utils_label_ext(_queue: Queue) {
3185 panic!(concat!(
3186 "Unable to load ",
3187 stringify!(queue_end_debug_utils_label_ext)
3188 ))
3189 }
3190 let val = _f(c"vkQueueEndDebugUtilsLabelEXT");
3191 if val.is_null() {
3192 queue_end_debug_utils_label_ext
3193 } else {
3194 ::core::mem::transmute::<*const c_void, PFN_vkQueueEndDebugUtilsLabelEXT>(
3195 val,
3196 )
3197 }
3198 },
3199 queue_insert_debug_utils_label_ext: unsafe {
3200 unsafe extern "system" fn queue_insert_debug_utils_label_ext(
3201 _queue: Queue,
3202 _p_label_info: *const DebugUtilsLabelEXT<'_>,
3203 ) {
3204 panic!(concat!(
3205 "Unable to load ",
3206 stringify!(queue_insert_debug_utils_label_ext)
3207 ))
3208 }
3209 let val = _f(c"vkQueueInsertDebugUtilsLabelEXT");
3210 if val.is_null() {
3211 queue_insert_debug_utils_label_ext
3212 } else {
3213 ::core::mem::transmute::<
3214 *const c_void,
3215 PFN_vkQueueInsertDebugUtilsLabelEXT,
3216 >(val)
3217 }
3218 },
3219 cmd_begin_debug_utils_label_ext: unsafe {
3220 unsafe extern "system" fn cmd_begin_debug_utils_label_ext(
3221 _command_buffer: CommandBuffer,
3222 _p_label_info: *const DebugUtilsLabelEXT<'_>,
3223 ) {
3224 panic!(concat!(
3225 "Unable to load ",
3226 stringify!(cmd_begin_debug_utils_label_ext)
3227 ))
3228 }
3229 let val = _f(c"vkCmdBeginDebugUtilsLabelEXT");
3230 if val.is_null() {
3231 cmd_begin_debug_utils_label_ext
3232 } else {
3233 ::core::mem::transmute::<*const c_void, PFN_vkCmdBeginDebugUtilsLabelEXT>(
3234 val,
3235 )
3236 }
3237 },
3238 cmd_end_debug_utils_label_ext: unsafe {
3239 unsafe extern "system" fn cmd_end_debug_utils_label_ext(
3240 _command_buffer: CommandBuffer,
3241 ) {
3242 panic!(concat!(
3243 "Unable to load ",
3244 stringify!(cmd_end_debug_utils_label_ext)
3245 ))
3246 }
3247 let val = _f(c"vkCmdEndDebugUtilsLabelEXT");
3248 if val.is_null() {
3249 cmd_end_debug_utils_label_ext
3250 } else {
3251 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndDebugUtilsLabelEXT>(
3252 val,
3253 )
3254 }
3255 },
3256 cmd_insert_debug_utils_label_ext: unsafe {
3257 unsafe extern "system" fn cmd_insert_debug_utils_label_ext(
3258 _command_buffer: CommandBuffer,
3259 _p_label_info: *const DebugUtilsLabelEXT<'_>,
3260 ) {
3261 panic!(concat!(
3262 "Unable to load ",
3263 stringify!(cmd_insert_debug_utils_label_ext)
3264 ))
3265 }
3266 let val = _f(c"vkCmdInsertDebugUtilsLabelEXT");
3267 if val.is_null() {
3268 cmd_insert_debug_utils_label_ext
3269 } else {
3270 ::core::mem::transmute::<*const c_void, PFN_vkCmdInsertDebugUtilsLabelEXT>(
3271 val,
3272 )
3273 }
3274 },
3275 }
3276 }
3277 }
3278 }
3279 #[doc = "VK_EXT_sampler_filter_minmax"]
3280 pub mod sampler_filter_minmax {
3281 use super::super::*;
3282 pub use {
3283 crate::vk::EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME as NAME,
3284 crate::vk::EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION as SPEC_VERSION,
3285 };
3286 }
3287 #[doc = "VK_EXT_descriptor_heap"]
3288 pub mod descriptor_heap {
3289 use super::super::*;
3290 pub use {
3291 crate::vk::EXT_DESCRIPTOR_HEAP_EXTENSION_NAME as NAME,
3292 crate::vk::EXT_DESCRIPTOR_HEAP_SPEC_VERSION as SPEC_VERSION,
3293 };
3294 #[doc = "VK_EXT_descriptor_heap instance-level functions"]
3295 #[derive(Clone)]
3296 pub struct Instance {
3297 pub(crate) fp: InstanceFn,
3298 pub(crate) handle: crate::vk::Instance,
3299 }
3300 impl Instance {
3301 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
3302 let handle = instance.handle();
3303 let fp = InstanceFn::load(|name| unsafe {
3304 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
3305 entry.get_instance_proc_addr(handle, name.as_ptr()),
3306 )
3307 });
3308 Self { handle, fp }
3309 }
3310 #[inline]
3311 pub fn fp(&self) -> &InstanceFn {
3312 &self.fp
3313 }
3314 #[inline]
3315 pub fn instance(&self) -> crate::vk::Instance {
3316 self.handle
3317 }
3318 }
3319 #[derive(Clone)]
3320 #[doc = "Raw VK_EXT_descriptor_heap instance-level function pointers"]
3321 pub struct InstanceFn {
3322 pub get_physical_device_descriptor_size_ext: PFN_vkGetPhysicalDeviceDescriptorSizeEXT,
3323 }
3324 unsafe impl Send for InstanceFn {}
3325 unsafe impl Sync for InstanceFn {}
3326 impl InstanceFn {
3327 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3328 Self::load_erased(&mut f)
3329 }
3330 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3331 Self {
3332 get_physical_device_descriptor_size_ext: unsafe {
3333 unsafe extern "system" fn get_physical_device_descriptor_size_ext(
3334 _physical_device: PhysicalDevice,
3335 _descriptor_type: DescriptorType,
3336 ) -> DeviceSize {
3337 panic!(concat!(
3338 "Unable to load ",
3339 stringify!(get_physical_device_descriptor_size_ext)
3340 ))
3341 }
3342 let val = _f(c"vkGetPhysicalDeviceDescriptorSizeEXT");
3343 if val.is_null() {
3344 get_physical_device_descriptor_size_ext
3345 } else {
3346 ::core::mem::transmute::<
3347 *const c_void,
3348 PFN_vkGetPhysicalDeviceDescriptorSizeEXT,
3349 >(val)
3350 }
3351 },
3352 }
3353 }
3354 }
3355 #[doc = "VK_EXT_descriptor_heap device-level functions"]
3356 #[derive(Clone)]
3357 pub struct Device {
3358 pub(crate) fp: DeviceFn,
3359 pub(crate) handle: crate::vk::Device,
3360 }
3361 impl Device {
3362 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
3363 let handle = device.handle();
3364 let fp = DeviceFn::load(|name| unsafe {
3365 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
3366 instance.get_device_proc_addr(handle, name.as_ptr()),
3367 )
3368 });
3369 Self { handle, fp }
3370 }
3371 #[inline]
3372 pub fn fp(&self) -> &DeviceFn {
3373 &self.fp
3374 }
3375 #[inline]
3376 pub fn device(&self) -> crate::vk::Device {
3377 self.handle
3378 }
3379 }
3380 #[derive(Clone)]
3381 #[doc = "Raw VK_EXT_descriptor_heap device-level function pointers"]
3382 pub struct DeviceFn {
3383 pub write_sampler_descriptors_ext: PFN_vkWriteSamplerDescriptorsEXT,
3384 pub write_resource_descriptors_ext: PFN_vkWriteResourceDescriptorsEXT,
3385 pub cmd_bind_sampler_heap_ext: PFN_vkCmdBindSamplerHeapEXT,
3386 pub cmd_bind_resource_heap_ext: PFN_vkCmdBindResourceHeapEXT,
3387 pub cmd_push_data_ext: PFN_vkCmdPushDataEXT,
3388 pub get_image_opaque_capture_data_ext: PFN_vkGetImageOpaqueCaptureDataEXT,
3389 pub register_custom_border_color_ext: PFN_vkRegisterCustomBorderColorEXT,
3390 pub unregister_custom_border_color_ext: PFN_vkUnregisterCustomBorderColorEXT,
3391 pub get_tensor_opaque_capture_data_arm: PFN_vkGetTensorOpaqueCaptureDataARM,
3392 }
3393 unsafe impl Send for DeviceFn {}
3394 unsafe impl Sync for DeviceFn {}
3395 impl DeviceFn {
3396 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3397 Self::load_erased(&mut f)
3398 }
3399 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3400 Self {
3401 write_sampler_descriptors_ext: unsafe {
3402 unsafe extern "system" fn write_sampler_descriptors_ext(
3403 _device: crate::vk::Device,
3404 _sampler_count: u32,
3405 _p_samplers: *const SamplerCreateInfo<'_>,
3406 _p_descriptors: *const HostAddressRangeEXT<'_>,
3407 ) -> Result {
3408 panic!(concat!(
3409 "Unable to load ",
3410 stringify!(write_sampler_descriptors_ext)
3411 ))
3412 }
3413 let val = _f(c"vkWriteSamplerDescriptorsEXT");
3414 if val.is_null() {
3415 write_sampler_descriptors_ext
3416 } else {
3417 ::core::mem::transmute::<*const c_void, PFN_vkWriteSamplerDescriptorsEXT>(
3418 val,
3419 )
3420 }
3421 },
3422 write_resource_descriptors_ext: unsafe {
3423 unsafe extern "system" fn write_resource_descriptors_ext(
3424 _device: crate::vk::Device,
3425 _resource_count: u32,
3426 _p_resources: *const ResourceDescriptorInfoEXT<'_>,
3427 _p_descriptors: *const HostAddressRangeEXT<'_>,
3428 ) -> Result {
3429 panic!(concat!(
3430 "Unable to load ",
3431 stringify!(write_resource_descriptors_ext)
3432 ))
3433 }
3434 let val = _f(c"vkWriteResourceDescriptorsEXT");
3435 if val.is_null() {
3436 write_resource_descriptors_ext
3437 } else {
3438 ::core::mem::transmute::<*const c_void, PFN_vkWriteResourceDescriptorsEXT>(
3439 val,
3440 )
3441 }
3442 },
3443 cmd_bind_sampler_heap_ext: unsafe {
3444 unsafe extern "system" fn cmd_bind_sampler_heap_ext(
3445 _command_buffer: CommandBuffer,
3446 _p_bind_info: *const BindHeapInfoEXT<'_>,
3447 ) {
3448 panic!(concat!(
3449 "Unable to load ",
3450 stringify!(cmd_bind_sampler_heap_ext)
3451 ))
3452 }
3453 let val = _f(c"vkCmdBindSamplerHeapEXT");
3454 if val.is_null() {
3455 cmd_bind_sampler_heap_ext
3456 } else {
3457 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindSamplerHeapEXT>(
3458 val,
3459 )
3460 }
3461 },
3462 cmd_bind_resource_heap_ext: unsafe {
3463 unsafe extern "system" fn cmd_bind_resource_heap_ext(
3464 _command_buffer: CommandBuffer,
3465 _p_bind_info: *const BindHeapInfoEXT<'_>,
3466 ) {
3467 panic!(concat!(
3468 "Unable to load ",
3469 stringify!(cmd_bind_resource_heap_ext)
3470 ))
3471 }
3472 let val = _f(c"vkCmdBindResourceHeapEXT");
3473 if val.is_null() {
3474 cmd_bind_resource_heap_ext
3475 } else {
3476 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindResourceHeapEXT>(
3477 val,
3478 )
3479 }
3480 },
3481 cmd_push_data_ext: unsafe {
3482 unsafe extern "system" fn cmd_push_data_ext(
3483 _command_buffer: CommandBuffer,
3484 _p_push_data_info: *const PushDataInfoEXT<'_>,
3485 ) {
3486 panic!(concat!("Unable to load ", stringify!(cmd_push_data_ext)))
3487 }
3488 let val = _f(c"vkCmdPushDataEXT");
3489 if val.is_null() {
3490 cmd_push_data_ext
3491 } else {
3492 ::core::mem::transmute::<*const c_void, PFN_vkCmdPushDataEXT>(val)
3493 }
3494 },
3495 get_image_opaque_capture_data_ext: unsafe {
3496 unsafe extern "system" fn get_image_opaque_capture_data_ext(
3497 _device: crate::vk::Device,
3498 _image_count: u32,
3499 _p_images: *const Image,
3500 _p_datas: *mut HostAddressRangeEXT<'_>,
3501 ) -> Result {
3502 panic!(concat!(
3503 "Unable to load ",
3504 stringify!(get_image_opaque_capture_data_ext)
3505 ))
3506 }
3507 let val = _f(c"vkGetImageOpaqueCaptureDataEXT");
3508 if val.is_null() {
3509 get_image_opaque_capture_data_ext
3510 } else {
3511 ::core::mem::transmute::<
3512 *const c_void,
3513 PFN_vkGetImageOpaqueCaptureDataEXT,
3514 >(val)
3515 }
3516 },
3517 register_custom_border_color_ext: unsafe {
3518 unsafe extern "system" fn register_custom_border_color_ext(
3519 _device: crate::vk::Device,
3520 _p_border_color: *const SamplerCustomBorderColorCreateInfoEXT<'_>,
3521 _request_index: Bool32,
3522 _p_index: *mut u32,
3523 ) -> Result {
3524 panic!(concat!(
3525 "Unable to load ",
3526 stringify!(register_custom_border_color_ext)
3527 ))
3528 }
3529 let val = _f(c"vkRegisterCustomBorderColorEXT");
3530 if val.is_null() {
3531 register_custom_border_color_ext
3532 } else {
3533 ::core::mem::transmute::<
3534 *const c_void,
3535 PFN_vkRegisterCustomBorderColorEXT,
3536 >(val)
3537 }
3538 },
3539 unregister_custom_border_color_ext: unsafe {
3540 unsafe extern "system" fn unregister_custom_border_color_ext(
3541 _device: crate::vk::Device,
3542 _index: u32,
3543 ) {
3544 panic!(concat!(
3545 "Unable to load ",
3546 stringify!(unregister_custom_border_color_ext)
3547 ))
3548 }
3549 let val = _f(c"vkUnregisterCustomBorderColorEXT");
3550 if val.is_null() {
3551 unregister_custom_border_color_ext
3552 } else {
3553 ::core::mem::transmute::<
3554 *const c_void,
3555 PFN_vkUnregisterCustomBorderColorEXT,
3556 >(val)
3557 }
3558 },
3559 get_tensor_opaque_capture_data_arm: unsafe {
3560 unsafe extern "system" fn get_tensor_opaque_capture_data_arm(
3561 _device: crate::vk::Device,
3562 _tensor_count: u32,
3563 _p_tensors: *const TensorARM,
3564 _p_datas: *mut HostAddressRangeEXT<'_>,
3565 ) -> Result {
3566 panic!(concat!(
3567 "Unable to load ",
3568 stringify!(get_tensor_opaque_capture_data_arm)
3569 ))
3570 }
3571 let val = _f(c"vkGetTensorOpaqueCaptureDataARM");
3572 if val.is_null() {
3573 get_tensor_opaque_capture_data_arm
3574 } else {
3575 ::core::mem::transmute::<
3576 *const c_void,
3577 PFN_vkGetTensorOpaqueCaptureDataARM,
3578 >(val)
3579 }
3580 },
3581 }
3582 }
3583 }
3584 }
3585 #[doc = "VK_EXT_inline_uniform_block"]
3586 pub mod inline_uniform_block {
3587 use super::super::*;
3588 pub use {
3589 crate::vk::EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME as NAME,
3590 crate::vk::EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION as SPEC_VERSION,
3591 };
3592 }
3593 #[doc = "VK_EXT_shader_stencil_export"]
3594 pub mod shader_stencil_export {
3595 use super::super::*;
3596 pub use {
3597 crate::vk::EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME as NAME,
3598 crate::vk::EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION as SPEC_VERSION,
3599 };
3600 }
3601 #[doc = "VK_EXT_sample_locations"]
3602 pub mod sample_locations {
3603 use super::super::*;
3604 pub use {
3605 crate::vk::EXT_SAMPLE_LOCATIONS_EXTENSION_NAME as NAME,
3606 crate::vk::EXT_SAMPLE_LOCATIONS_SPEC_VERSION as SPEC_VERSION,
3607 };
3608 #[doc = "VK_EXT_sample_locations instance-level functions"]
3609 #[derive(Clone)]
3610 pub struct Instance {
3611 pub(crate) fp: InstanceFn,
3612 pub(crate) handle: crate::vk::Instance,
3613 }
3614 impl Instance {
3615 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
3616 let handle = instance.handle();
3617 let fp = InstanceFn::load(|name| unsafe {
3618 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
3619 entry.get_instance_proc_addr(handle, name.as_ptr()),
3620 )
3621 });
3622 Self { handle, fp }
3623 }
3624 #[inline]
3625 pub fn fp(&self) -> &InstanceFn {
3626 &self.fp
3627 }
3628 #[inline]
3629 pub fn instance(&self) -> crate::vk::Instance {
3630 self.handle
3631 }
3632 }
3633 #[derive(Clone)]
3634 #[doc = "Raw VK_EXT_sample_locations instance-level function pointers"]
3635 pub struct InstanceFn {
3636 pub get_physical_device_multisample_properties_ext:
3637 PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT,
3638 }
3639 unsafe impl Send for InstanceFn {}
3640 unsafe impl Sync for InstanceFn {}
3641 impl InstanceFn {
3642 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3643 Self::load_erased(&mut f)
3644 }
3645 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3646 Self {
3647 get_physical_device_multisample_properties_ext: unsafe {
3648 unsafe extern "system" fn get_physical_device_multisample_properties_ext(
3649 _physical_device: PhysicalDevice,
3650 _samples: SampleCountFlags,
3651 _p_multisample_properties: *mut MultisamplePropertiesEXT<'_>,
3652 ) {
3653 panic!(concat!(
3654 "Unable to load ",
3655 stringify!(get_physical_device_multisample_properties_ext)
3656 ))
3657 }
3658 let val = _f(c"vkGetPhysicalDeviceMultisamplePropertiesEXT");
3659 if val.is_null() {
3660 get_physical_device_multisample_properties_ext
3661 } else {
3662 ::core::mem::transmute::<
3663 *const c_void,
3664 PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT,
3665 >(val)
3666 }
3667 },
3668 }
3669 }
3670 }
3671 #[doc = "VK_EXT_sample_locations device-level functions"]
3672 #[derive(Clone)]
3673 pub struct Device {
3674 pub(crate) fp: DeviceFn,
3675 pub(crate) handle: crate::vk::Device,
3676 }
3677 impl Device {
3678 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
3679 let handle = device.handle();
3680 let fp = DeviceFn::load(|name| unsafe {
3681 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
3682 instance.get_device_proc_addr(handle, name.as_ptr()),
3683 )
3684 });
3685 Self { handle, fp }
3686 }
3687 #[inline]
3688 pub fn fp(&self) -> &DeviceFn {
3689 &self.fp
3690 }
3691 #[inline]
3692 pub fn device(&self) -> crate::vk::Device {
3693 self.handle
3694 }
3695 }
3696 #[derive(Clone)]
3697 #[doc = "Raw VK_EXT_sample_locations device-level function pointers"]
3698 pub struct DeviceFn {
3699 pub cmd_set_sample_locations_ext: PFN_vkCmdSetSampleLocationsEXT,
3700 }
3701 unsafe impl Send for DeviceFn {}
3702 unsafe impl Sync for DeviceFn {}
3703 impl DeviceFn {
3704 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3705 Self::load_erased(&mut f)
3706 }
3707 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3708 Self {
3709 cmd_set_sample_locations_ext: unsafe {
3710 unsafe extern "system" fn cmd_set_sample_locations_ext(
3711 _command_buffer: CommandBuffer,
3712 _p_sample_locations_info: *const SampleLocationsInfoEXT<'_>,
3713 ) {
3714 panic!(concat!(
3715 "Unable to load ",
3716 stringify!(cmd_set_sample_locations_ext)
3717 ))
3718 }
3719 let val = _f(c"vkCmdSetSampleLocationsEXT");
3720 if val.is_null() {
3721 cmd_set_sample_locations_ext
3722 } else {
3723 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetSampleLocationsEXT>(
3724 val,
3725 )
3726 }
3727 },
3728 }
3729 }
3730 }
3731 }
3732 #[doc = "VK_EXT_blend_operation_advanced"]
3733 pub mod blend_operation_advanced {
3734 use super::super::*;
3735 pub use {
3736 crate::vk::EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME as NAME,
3737 crate::vk::EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION as SPEC_VERSION,
3738 };
3739 }
3740 #[doc = "VK_EXT_post_depth_coverage"]
3741 pub mod post_depth_coverage {
3742 use super::super::*;
3743 pub use {
3744 crate::vk::EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME as NAME,
3745 crate::vk::EXT_POST_DEPTH_COVERAGE_SPEC_VERSION as SPEC_VERSION,
3746 };
3747 }
3748 #[doc = "VK_EXT_image_drm_format_modifier"]
3749 pub mod image_drm_format_modifier {
3750 use super::super::*;
3751 pub use {
3752 crate::vk::EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME as NAME,
3753 crate::vk::EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION as SPEC_VERSION,
3754 };
3755 #[doc = "VK_EXT_image_drm_format_modifier device-level functions"]
3756 #[derive(Clone)]
3757 pub struct Device {
3758 pub(crate) fp: DeviceFn,
3759 pub(crate) handle: crate::vk::Device,
3760 }
3761 impl Device {
3762 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
3763 let handle = device.handle();
3764 let fp = DeviceFn::load(|name| unsafe {
3765 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
3766 instance.get_device_proc_addr(handle, name.as_ptr()),
3767 )
3768 });
3769 Self { handle, fp }
3770 }
3771 #[inline]
3772 pub fn fp(&self) -> &DeviceFn {
3773 &self.fp
3774 }
3775 #[inline]
3776 pub fn device(&self) -> crate::vk::Device {
3777 self.handle
3778 }
3779 }
3780 #[derive(Clone)]
3781 #[doc = "Raw VK_EXT_image_drm_format_modifier device-level function pointers"]
3782 pub struct DeviceFn {
3783 pub get_image_drm_format_modifier_properties_ext:
3784 PFN_vkGetImageDrmFormatModifierPropertiesEXT,
3785 }
3786 unsafe impl Send for DeviceFn {}
3787 unsafe impl Sync for DeviceFn {}
3788 impl DeviceFn {
3789 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3790 Self::load_erased(&mut f)
3791 }
3792 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3793 Self {
3794 get_image_drm_format_modifier_properties_ext: unsafe {
3795 unsafe extern "system" fn get_image_drm_format_modifier_properties_ext(
3796 _device: crate::vk::Device,
3797 _image: Image,
3798 _p_properties: *mut ImageDrmFormatModifierPropertiesEXT<'_>,
3799 ) -> Result {
3800 panic!(concat!(
3801 "Unable to load ",
3802 stringify!(get_image_drm_format_modifier_properties_ext)
3803 ))
3804 }
3805 let val = _f(c"vkGetImageDrmFormatModifierPropertiesEXT");
3806 if val.is_null() {
3807 get_image_drm_format_modifier_properties_ext
3808 } else {
3809 ::core::mem::transmute::<
3810 *const c_void,
3811 PFN_vkGetImageDrmFormatModifierPropertiesEXT,
3812 >(val)
3813 }
3814 },
3815 }
3816 }
3817 }
3818 }
3819 #[doc = "VK_EXT_validation_cache"]
3820 pub mod validation_cache {
3821 use super::super::*;
3822 pub use {
3823 crate::vk::EXT_VALIDATION_CACHE_EXTENSION_NAME as NAME,
3824 crate::vk::EXT_VALIDATION_CACHE_SPEC_VERSION as SPEC_VERSION,
3825 };
3826 #[doc = "VK_EXT_validation_cache device-level functions"]
3827 #[derive(Clone)]
3828 pub struct Device {
3829 pub(crate) fp: DeviceFn,
3830 pub(crate) handle: crate::vk::Device,
3831 }
3832 impl Device {
3833 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
3834 let handle = device.handle();
3835 let fp = DeviceFn::load(|name| unsafe {
3836 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
3837 instance.get_device_proc_addr(handle, name.as_ptr()),
3838 )
3839 });
3840 Self { handle, fp }
3841 }
3842 #[inline]
3843 pub fn fp(&self) -> &DeviceFn {
3844 &self.fp
3845 }
3846 #[inline]
3847 pub fn device(&self) -> crate::vk::Device {
3848 self.handle
3849 }
3850 }
3851 #[derive(Clone)]
3852 #[doc = "Raw VK_EXT_validation_cache device-level function pointers"]
3853 pub struct DeviceFn {
3854 pub create_validation_cache_ext: PFN_vkCreateValidationCacheEXT,
3855 pub destroy_validation_cache_ext: PFN_vkDestroyValidationCacheEXT,
3856 pub merge_validation_caches_ext: PFN_vkMergeValidationCachesEXT,
3857 pub get_validation_cache_data_ext: PFN_vkGetValidationCacheDataEXT,
3858 }
3859 unsafe impl Send for DeviceFn {}
3860 unsafe impl Sync for DeviceFn {}
3861 impl DeviceFn {
3862 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
3863 Self::load_erased(&mut f)
3864 }
3865 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
3866 Self {
3867 create_validation_cache_ext: unsafe {
3868 unsafe extern "system" fn create_validation_cache_ext(
3869 _device: crate::vk::Device,
3870 _p_create_info: *const ValidationCacheCreateInfoEXT<'_>,
3871 _p_allocator: *const AllocationCallbacks<'_>,
3872 _p_validation_cache: *mut ValidationCacheEXT,
3873 ) -> Result {
3874 panic!(concat!(
3875 "Unable to load ",
3876 stringify!(create_validation_cache_ext)
3877 ))
3878 }
3879 let val = _f(c"vkCreateValidationCacheEXT");
3880 if val.is_null() {
3881 create_validation_cache_ext
3882 } else {
3883 ::core::mem::transmute::<*const c_void, PFN_vkCreateValidationCacheEXT>(
3884 val,
3885 )
3886 }
3887 },
3888 destroy_validation_cache_ext: unsafe {
3889 unsafe extern "system" fn destroy_validation_cache_ext(
3890 _device: crate::vk::Device,
3891 _validation_cache: ValidationCacheEXT,
3892 _p_allocator: *const AllocationCallbacks<'_>,
3893 ) {
3894 panic!(concat!(
3895 "Unable to load ",
3896 stringify!(destroy_validation_cache_ext)
3897 ))
3898 }
3899 let val = _f(c"vkDestroyValidationCacheEXT");
3900 if val.is_null() {
3901 destroy_validation_cache_ext
3902 } else {
3903 ::core::mem::transmute::<*const c_void, PFN_vkDestroyValidationCacheEXT>(
3904 val,
3905 )
3906 }
3907 },
3908 merge_validation_caches_ext: unsafe {
3909 unsafe extern "system" fn merge_validation_caches_ext(
3910 _device: crate::vk::Device,
3911 _dst_cache: ValidationCacheEXT,
3912 _src_cache_count: u32,
3913 _p_src_caches: *const ValidationCacheEXT,
3914 ) -> Result {
3915 panic!(concat!(
3916 "Unable to load ",
3917 stringify!(merge_validation_caches_ext)
3918 ))
3919 }
3920 let val = _f(c"vkMergeValidationCachesEXT");
3921 if val.is_null() {
3922 merge_validation_caches_ext
3923 } else {
3924 ::core::mem::transmute::<*const c_void, PFN_vkMergeValidationCachesEXT>(
3925 val,
3926 )
3927 }
3928 },
3929 get_validation_cache_data_ext: unsafe {
3930 unsafe extern "system" fn get_validation_cache_data_ext(
3931 _device: crate::vk::Device,
3932 _validation_cache: ValidationCacheEXT,
3933 _p_data_size: *mut usize,
3934 _p_data: *mut c_void,
3935 ) -> Result {
3936 panic!(concat!(
3937 "Unable to load ",
3938 stringify!(get_validation_cache_data_ext)
3939 ))
3940 }
3941 let val = _f(c"vkGetValidationCacheDataEXT");
3942 if val.is_null() {
3943 get_validation_cache_data_ext
3944 } else {
3945 ::core::mem::transmute::<*const c_void, PFN_vkGetValidationCacheDataEXT>(
3946 val,
3947 )
3948 }
3949 },
3950 }
3951 }
3952 }
3953 }
3954 #[doc = "VK_EXT_descriptor_indexing"]
3955 pub mod descriptor_indexing {
3956 use super::super::*;
3957 pub use {
3958 crate::vk::EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME as NAME,
3959 crate::vk::EXT_DESCRIPTOR_INDEXING_SPEC_VERSION as SPEC_VERSION,
3960 };
3961 }
3962 #[doc = "VK_EXT_shader_viewport_index_layer"]
3963 pub mod shader_viewport_index_layer {
3964 use super::super::*;
3965 pub use {
3966 crate::vk::EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME as NAME,
3967 crate::vk::EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION as SPEC_VERSION,
3968 };
3969 }
3970 #[doc = "VK_EXT_filter_cubic"]
3971 pub mod filter_cubic {
3972 use super::super::*;
3973 pub use {
3974 crate::vk::EXT_FILTER_CUBIC_EXTENSION_NAME as NAME,
3975 crate::vk::EXT_FILTER_CUBIC_SPEC_VERSION as SPEC_VERSION,
3976 };
3977 }
3978 #[doc = "VK_EXT_global_priority"]
3979 pub mod global_priority {
3980 use super::super::*;
3981 pub use {
3982 crate::vk::EXT_GLOBAL_PRIORITY_EXTENSION_NAME as NAME,
3983 crate::vk::EXT_GLOBAL_PRIORITY_SPEC_VERSION as SPEC_VERSION,
3984 };
3985 }
3986 #[doc = "VK_EXT_external_memory_host"]
3987 pub mod external_memory_host {
3988 use super::super::*;
3989 pub use {
3990 crate::vk::EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME as NAME,
3991 crate::vk::EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION as SPEC_VERSION,
3992 };
3993 #[doc = "VK_EXT_external_memory_host device-level functions"]
3994 #[derive(Clone)]
3995 pub struct Device {
3996 pub(crate) fp: DeviceFn,
3997 pub(crate) handle: crate::vk::Device,
3998 }
3999 impl Device {
4000 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
4001 let handle = device.handle();
4002 let fp = DeviceFn::load(|name| unsafe {
4003 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4004 instance.get_device_proc_addr(handle, name.as_ptr()),
4005 )
4006 });
4007 Self { handle, fp }
4008 }
4009 #[inline]
4010 pub fn fp(&self) -> &DeviceFn {
4011 &self.fp
4012 }
4013 #[inline]
4014 pub fn device(&self) -> crate::vk::Device {
4015 self.handle
4016 }
4017 }
4018 #[derive(Clone)]
4019 #[doc = "Raw VK_EXT_external_memory_host device-level function pointers"]
4020 pub struct DeviceFn {
4021 pub get_memory_host_pointer_properties_ext: PFN_vkGetMemoryHostPointerPropertiesEXT,
4022 }
4023 unsafe impl Send for DeviceFn {}
4024 unsafe impl Sync for DeviceFn {}
4025 impl DeviceFn {
4026 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4027 Self::load_erased(&mut f)
4028 }
4029 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4030 Self {
4031 get_memory_host_pointer_properties_ext: unsafe {
4032 unsafe extern "system" fn get_memory_host_pointer_properties_ext(
4033 _device: crate::vk::Device,
4034 _handle_type: ExternalMemoryHandleTypeFlags,
4035 _p_host_pointer: *const c_void,
4036 _p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT<
4037 '_,
4038 >,
4039 ) -> Result {
4040 panic!(concat!(
4041 "Unable to load ",
4042 stringify!(get_memory_host_pointer_properties_ext)
4043 ))
4044 }
4045 let val = _f(c"vkGetMemoryHostPointerPropertiesEXT");
4046 if val.is_null() {
4047 get_memory_host_pointer_properties_ext
4048 } else {
4049 ::core::mem::transmute::<
4050 *const c_void,
4051 PFN_vkGetMemoryHostPointerPropertiesEXT,
4052 >(val)
4053 }
4054 },
4055 }
4056 }
4057 }
4058 }
4059 #[doc = "VK_EXT_calibrated_timestamps"]
4060 pub mod calibrated_timestamps {
4061 use super::super::*;
4062 pub use {
4063 crate::vk::EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME as NAME,
4064 crate::vk::EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION as SPEC_VERSION,
4065 };
4066 #[doc = "VK_EXT_calibrated_timestamps instance-level functions"]
4067 #[derive(Clone)]
4068 pub struct Instance {
4069 pub(crate) fp: InstanceFn,
4070 pub(crate) handle: crate::vk::Instance,
4071 }
4072 impl Instance {
4073 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
4074 let handle = instance.handle();
4075 let fp = InstanceFn::load(|name| unsafe {
4076 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4077 entry.get_instance_proc_addr(handle, name.as_ptr()),
4078 )
4079 });
4080 Self { handle, fp }
4081 }
4082 #[inline]
4083 pub fn fp(&self) -> &InstanceFn {
4084 &self.fp
4085 }
4086 #[inline]
4087 pub fn instance(&self) -> crate::vk::Instance {
4088 self.handle
4089 }
4090 }
4091 #[derive(Clone)]
4092 #[doc = "Raw VK_EXT_calibrated_timestamps instance-level function pointers"]
4093 pub struct InstanceFn {
4094 pub get_physical_device_calibrateable_time_domains_ext:
4095 PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR,
4096 }
4097 unsafe impl Send for InstanceFn {}
4098 unsafe impl Sync for InstanceFn {}
4099 impl InstanceFn {
4100 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4101 Self::load_erased(&mut f)
4102 }
4103 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4104 Self {
4105 get_physical_device_calibrateable_time_domains_ext: unsafe {
4106 unsafe extern "system" fn get_physical_device_calibrateable_time_domains_ext(
4107 _physical_device: PhysicalDevice,
4108 _p_time_domain_count: *mut u32,
4109 _p_time_domains: *mut TimeDomainKHR,
4110 ) -> Result {
4111 panic!(concat!(
4112 "Unable to load ",
4113 stringify!(get_physical_device_calibrateable_time_domains_ext)
4114 ))
4115 }
4116 let val = _f(c"vkGetPhysicalDeviceCalibrateableTimeDomainsEXT");
4117 if val.is_null() {
4118 get_physical_device_calibrateable_time_domains_ext
4119 } else {
4120 ::core::mem::transmute::<
4121 *const c_void,
4122 PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR,
4123 >(val)
4124 }
4125 },
4126 }
4127 }
4128 }
4129 #[doc = "VK_EXT_calibrated_timestamps device-level functions"]
4130 #[derive(Clone)]
4131 pub struct Device {
4132 pub(crate) fp: DeviceFn,
4133 pub(crate) handle: crate::vk::Device,
4134 }
4135 impl Device {
4136 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
4137 let handle = device.handle();
4138 let fp = DeviceFn::load(|name| unsafe {
4139 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4140 instance.get_device_proc_addr(handle, name.as_ptr()),
4141 )
4142 });
4143 Self { handle, fp }
4144 }
4145 #[inline]
4146 pub fn fp(&self) -> &DeviceFn {
4147 &self.fp
4148 }
4149 #[inline]
4150 pub fn device(&self) -> crate::vk::Device {
4151 self.handle
4152 }
4153 }
4154 #[derive(Clone)]
4155 #[doc = "Raw VK_EXT_calibrated_timestamps device-level function pointers"]
4156 pub struct DeviceFn {
4157 pub get_calibrated_timestamps_ext: PFN_vkGetCalibratedTimestampsKHR,
4158 }
4159 unsafe impl Send for DeviceFn {}
4160 unsafe impl Sync for DeviceFn {}
4161 impl DeviceFn {
4162 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4163 Self::load_erased(&mut f)
4164 }
4165 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4166 Self {
4167 get_calibrated_timestamps_ext: unsafe {
4168 unsafe extern "system" fn get_calibrated_timestamps_ext(
4169 _device: crate::vk::Device,
4170 _timestamp_count: u32,
4171 _p_timestamp_infos: *const CalibratedTimestampInfoKHR<'_>,
4172 _p_timestamps: *mut u64,
4173 _p_max_deviation: *mut u64,
4174 ) -> Result {
4175 panic!(concat!(
4176 "Unable to load ",
4177 stringify!(get_calibrated_timestamps_ext)
4178 ))
4179 }
4180 let val = _f(c"vkGetCalibratedTimestampsEXT");
4181 if val.is_null() {
4182 get_calibrated_timestamps_ext
4183 } else {
4184 ::core::mem::transmute::<*const c_void, PFN_vkGetCalibratedTimestampsKHR>(
4185 val,
4186 )
4187 }
4188 },
4189 }
4190 }
4191 }
4192 }
4193 #[doc = "VK_EXT_vertex_attribute_divisor"]
4194 pub mod vertex_attribute_divisor {
4195 use super::super::*;
4196 pub use {
4197 crate::vk::EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME as NAME,
4198 crate::vk::EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION as SPEC_VERSION,
4199 };
4200 }
4201 #[doc = "VK_EXT_pipeline_creation_feedback"]
4202 pub mod pipeline_creation_feedback {
4203 use super::super::*;
4204 pub use {
4205 crate::vk::EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME as NAME,
4206 crate::vk::EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION as SPEC_VERSION,
4207 };
4208 }
4209 #[doc = "VK_EXT_present_timing"]
4210 pub mod present_timing {
4211 use super::super::*;
4212 pub use {
4213 crate::vk::EXT_PRESENT_TIMING_EXTENSION_NAME as NAME,
4214 crate::vk::EXT_PRESENT_TIMING_SPEC_VERSION as SPEC_VERSION,
4215 };
4216 #[doc = "VK_EXT_present_timing device-level functions"]
4217 #[derive(Clone)]
4218 pub struct Device {
4219 pub(crate) fp: DeviceFn,
4220 pub(crate) handle: crate::vk::Device,
4221 }
4222 impl Device {
4223 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
4224 let handle = device.handle();
4225 let fp = DeviceFn::load(|name| unsafe {
4226 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4227 instance.get_device_proc_addr(handle, name.as_ptr()),
4228 )
4229 });
4230 Self { handle, fp }
4231 }
4232 #[inline]
4233 pub fn fp(&self) -> &DeviceFn {
4234 &self.fp
4235 }
4236 #[inline]
4237 pub fn device(&self) -> crate::vk::Device {
4238 self.handle
4239 }
4240 }
4241 #[derive(Clone)]
4242 #[doc = "Raw VK_EXT_present_timing device-level function pointers"]
4243 pub struct DeviceFn {
4244 pub set_swapchain_present_timing_queue_size_ext:
4245 PFN_vkSetSwapchainPresentTimingQueueSizeEXT,
4246 pub get_swapchain_timing_properties_ext: PFN_vkGetSwapchainTimingPropertiesEXT,
4247 pub get_swapchain_time_domain_properties_ext: PFN_vkGetSwapchainTimeDomainPropertiesEXT,
4248 pub get_past_presentation_timing_ext: PFN_vkGetPastPresentationTimingEXT,
4249 }
4250 unsafe impl Send for DeviceFn {}
4251 unsafe impl Sync for DeviceFn {}
4252 impl DeviceFn {
4253 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4254 Self::load_erased(&mut f)
4255 }
4256 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4257 Self {
4258 set_swapchain_present_timing_queue_size_ext: unsafe {
4259 unsafe extern "system" fn set_swapchain_present_timing_queue_size_ext(
4260 _device: crate::vk::Device,
4261 _swapchain: SwapchainKHR,
4262 _size: u32,
4263 ) -> Result {
4264 panic!(concat!(
4265 "Unable to load ",
4266 stringify!(set_swapchain_present_timing_queue_size_ext)
4267 ))
4268 }
4269 let val = _f(c"vkSetSwapchainPresentTimingQueueSizeEXT");
4270 if val.is_null() {
4271 set_swapchain_present_timing_queue_size_ext
4272 } else {
4273 ::core::mem::transmute::<
4274 *const c_void,
4275 PFN_vkSetSwapchainPresentTimingQueueSizeEXT,
4276 >(val)
4277 }
4278 },
4279 get_swapchain_timing_properties_ext: unsafe {
4280 unsafe extern "system" fn get_swapchain_timing_properties_ext(
4281 _device: crate::vk::Device,
4282 _swapchain: SwapchainKHR,
4283 _p_swapchain_timing_properties: *mut SwapchainTimingPropertiesEXT<'_>,
4284 _p_swapchain_timing_properties_counter: *mut u64,
4285 ) -> Result {
4286 panic!(concat!(
4287 "Unable to load ",
4288 stringify!(get_swapchain_timing_properties_ext)
4289 ))
4290 }
4291 let val = _f(c"vkGetSwapchainTimingPropertiesEXT");
4292 if val.is_null() {
4293 get_swapchain_timing_properties_ext
4294 } else {
4295 ::core::mem::transmute::<
4296 *const c_void,
4297 PFN_vkGetSwapchainTimingPropertiesEXT,
4298 >(val)
4299 }
4300 },
4301 get_swapchain_time_domain_properties_ext: unsafe {
4302 unsafe extern "system" fn get_swapchain_time_domain_properties_ext(
4303 _device: crate::vk::Device,
4304 _swapchain: SwapchainKHR,
4305 _p_swapchain_time_domain_properties : * mut SwapchainTimeDomainPropertiesEXT < '_ >,
4306 _p_time_domains_counter: *mut u64,
4307 ) -> Result {
4308 panic!(concat!(
4309 "Unable to load ",
4310 stringify!(get_swapchain_time_domain_properties_ext)
4311 ))
4312 }
4313 let val = _f(c"vkGetSwapchainTimeDomainPropertiesEXT");
4314 if val.is_null() {
4315 get_swapchain_time_domain_properties_ext
4316 } else {
4317 ::core::mem::transmute::<
4318 *const c_void,
4319 PFN_vkGetSwapchainTimeDomainPropertiesEXT,
4320 >(val)
4321 }
4322 },
4323 get_past_presentation_timing_ext: unsafe {
4324 unsafe extern "system" fn get_past_presentation_timing_ext(
4325 _device: crate::vk::Device,
4326 _p_past_presentation_timing_info: *const PastPresentationTimingInfoEXT<
4327 '_,
4328 >,
4329 _p_past_presentation_timing_properties : * mut PastPresentationTimingPropertiesEXT < '_ >,
4330 ) -> Result {
4331 panic!(concat!(
4332 "Unable to load ",
4333 stringify!(get_past_presentation_timing_ext)
4334 ))
4335 }
4336 let val = _f(c"vkGetPastPresentationTimingEXT");
4337 if val.is_null() {
4338 get_past_presentation_timing_ext
4339 } else {
4340 ::core::mem::transmute::<
4341 *const c_void,
4342 PFN_vkGetPastPresentationTimingEXT,
4343 >(val)
4344 }
4345 },
4346 }
4347 }
4348 }
4349 }
4350 #[doc = "VK_EXT_pci_bus_info"]
4351 pub mod pci_bus_info {
4352 use super::super::*;
4353 pub use {
4354 crate::vk::EXT_PCI_BUS_INFO_EXTENSION_NAME as NAME,
4355 crate::vk::EXT_PCI_BUS_INFO_SPEC_VERSION as SPEC_VERSION,
4356 };
4357 }
4358 #[doc = "VK_EXT_metal_surface"]
4359 pub mod metal_surface {
4360 use super::super::*;
4361 pub use {
4362 crate::vk::EXT_METAL_SURFACE_EXTENSION_NAME as NAME,
4363 crate::vk::EXT_METAL_SURFACE_SPEC_VERSION as SPEC_VERSION,
4364 };
4365 #[doc = "VK_EXT_metal_surface instance-level functions"]
4366 #[derive(Clone)]
4367 pub struct Instance {
4368 pub(crate) fp: InstanceFn,
4369 pub(crate) handle: crate::vk::Instance,
4370 }
4371 impl Instance {
4372 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
4373 let handle = instance.handle();
4374 let fp = InstanceFn::load(|name| unsafe {
4375 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4376 entry.get_instance_proc_addr(handle, name.as_ptr()),
4377 )
4378 });
4379 Self { handle, fp }
4380 }
4381 #[inline]
4382 pub fn fp(&self) -> &InstanceFn {
4383 &self.fp
4384 }
4385 #[inline]
4386 pub fn instance(&self) -> crate::vk::Instance {
4387 self.handle
4388 }
4389 }
4390 #[derive(Clone)]
4391 #[doc = "Raw VK_EXT_metal_surface instance-level function pointers"]
4392 pub struct InstanceFn {
4393 pub create_metal_surface_ext: PFN_vkCreateMetalSurfaceEXT,
4394 }
4395 unsafe impl Send for InstanceFn {}
4396 unsafe impl Sync for InstanceFn {}
4397 impl InstanceFn {
4398 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4399 Self::load_erased(&mut f)
4400 }
4401 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4402 Self {
4403 create_metal_surface_ext: unsafe {
4404 unsafe extern "system" fn create_metal_surface_ext(
4405 _instance: crate::vk::Instance,
4406 _p_create_info: *const MetalSurfaceCreateInfoEXT<'_>,
4407 _p_allocator: *const AllocationCallbacks<'_>,
4408 _p_surface: *mut SurfaceKHR,
4409 ) -> Result {
4410 panic!(concat!(
4411 "Unable to load ",
4412 stringify!(create_metal_surface_ext)
4413 ))
4414 }
4415 let val = _f(c"vkCreateMetalSurfaceEXT");
4416 if val.is_null() {
4417 create_metal_surface_ext
4418 } else {
4419 ::core::mem::transmute::<*const c_void, PFN_vkCreateMetalSurfaceEXT>(
4420 val,
4421 )
4422 }
4423 },
4424 }
4425 }
4426 }
4427 }
4428 #[doc = "VK_EXT_fragment_density_map"]
4429 pub mod fragment_density_map {
4430 use super::super::*;
4431 pub use {
4432 crate::vk::EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME as NAME,
4433 crate::vk::EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION as SPEC_VERSION,
4434 };
4435 }
4436 #[doc = "VK_EXT_scalar_block_layout"]
4437 pub mod scalar_block_layout {
4438 use super::super::*;
4439 pub use {
4440 crate::vk::EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME as NAME,
4441 crate::vk::EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION as SPEC_VERSION,
4442 };
4443 }
4444 #[doc = "VK_EXT_subgroup_size_control"]
4445 pub mod subgroup_size_control {
4446 use super::super::*;
4447 pub use {
4448 crate::vk::EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME as NAME,
4449 crate::vk::EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION as SPEC_VERSION,
4450 };
4451 }
4452 #[doc = "VK_EXT_shader_image_atomic_int64"]
4453 pub mod shader_image_atomic_int64 {
4454 use super::super::*;
4455 pub use {
4456 crate::vk::EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME as NAME,
4457 crate::vk::EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION as SPEC_VERSION,
4458 };
4459 }
4460 #[doc = "VK_EXT_memory_budget"]
4461 pub mod memory_budget {
4462 use super::super::*;
4463 pub use {
4464 crate::vk::EXT_MEMORY_BUDGET_EXTENSION_NAME as NAME,
4465 crate::vk::EXT_MEMORY_BUDGET_SPEC_VERSION as SPEC_VERSION,
4466 };
4467 }
4468 #[doc = "VK_EXT_memory_priority"]
4469 pub mod memory_priority {
4470 use super::super::*;
4471 pub use {
4472 crate::vk::EXT_MEMORY_PRIORITY_EXTENSION_NAME as NAME,
4473 crate::vk::EXT_MEMORY_PRIORITY_SPEC_VERSION as SPEC_VERSION,
4474 };
4475 }
4476 #[doc = "VK_EXT_buffer_device_address"]
4477 pub mod buffer_device_address {
4478 use super::super::*;
4479 pub use {
4480 crate::vk::EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME as NAME,
4481 crate::vk::EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION as SPEC_VERSION,
4482 };
4483 #[doc = "VK_EXT_buffer_device_address device-level functions"]
4484 #[derive(Clone)]
4485 pub struct Device {
4486 pub(crate) fp: DeviceFn,
4487 pub(crate) handle: crate::vk::Device,
4488 }
4489 impl Device {
4490 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
4491 let handle = device.handle();
4492 let fp = DeviceFn::load(|name| unsafe {
4493 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4494 instance.get_device_proc_addr(handle, name.as_ptr()),
4495 )
4496 });
4497 Self { handle, fp }
4498 }
4499 #[inline]
4500 pub fn fp(&self) -> &DeviceFn {
4501 &self.fp
4502 }
4503 #[inline]
4504 pub fn device(&self) -> crate::vk::Device {
4505 self.handle
4506 }
4507 }
4508 #[derive(Clone)]
4509 #[doc = "Raw VK_EXT_buffer_device_address device-level function pointers"]
4510 pub struct DeviceFn {
4511 pub get_buffer_device_address_ext: PFN_vkGetBufferDeviceAddress,
4512 }
4513 unsafe impl Send for DeviceFn {}
4514 unsafe impl Sync for DeviceFn {}
4515 impl DeviceFn {
4516 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4517 Self::load_erased(&mut f)
4518 }
4519 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4520 Self {
4521 get_buffer_device_address_ext: unsafe {
4522 unsafe extern "system" fn get_buffer_device_address_ext(
4523 _device: crate::vk::Device,
4524 _p_info: *const BufferDeviceAddressInfo<'_>,
4525 ) -> DeviceAddress {
4526 panic!(concat!(
4527 "Unable to load ",
4528 stringify!(get_buffer_device_address_ext)
4529 ))
4530 }
4531 let val = _f(c"vkGetBufferDeviceAddressEXT");
4532 if val.is_null() {
4533 get_buffer_device_address_ext
4534 } else {
4535 ::core::mem::transmute::<*const c_void, PFN_vkGetBufferDeviceAddress>(
4536 val,
4537 )
4538 }
4539 },
4540 }
4541 }
4542 }
4543 }
4544 #[doc = "VK_EXT_tooling_info"]
4545 pub mod tooling_info {
4546 use super::super::*;
4547 pub use {
4548 crate::vk::EXT_TOOLING_INFO_EXTENSION_NAME as NAME,
4549 crate::vk::EXT_TOOLING_INFO_SPEC_VERSION as SPEC_VERSION,
4550 };
4551 #[doc = "VK_EXT_tooling_info instance-level functions"]
4552 #[derive(Clone)]
4553 pub struct Instance {
4554 pub(crate) fp: InstanceFn,
4555 pub(crate) handle: crate::vk::Instance,
4556 }
4557 impl Instance {
4558 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
4559 let handle = instance.handle();
4560 let fp = InstanceFn::load(|name| unsafe {
4561 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4562 entry.get_instance_proc_addr(handle, name.as_ptr()),
4563 )
4564 });
4565 Self { handle, fp }
4566 }
4567 #[inline]
4568 pub fn fp(&self) -> &InstanceFn {
4569 &self.fp
4570 }
4571 #[inline]
4572 pub fn instance(&self) -> crate::vk::Instance {
4573 self.handle
4574 }
4575 }
4576 #[derive(Clone)]
4577 #[doc = "Raw VK_EXT_tooling_info instance-level function pointers"]
4578 pub struct InstanceFn {
4579 pub get_physical_device_tool_properties_ext: PFN_vkGetPhysicalDeviceToolProperties,
4580 }
4581 unsafe impl Send for InstanceFn {}
4582 unsafe impl Sync for InstanceFn {}
4583 impl InstanceFn {
4584 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4585 Self::load_erased(&mut f)
4586 }
4587 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4588 Self {
4589 get_physical_device_tool_properties_ext: unsafe {
4590 unsafe extern "system" fn get_physical_device_tool_properties_ext(
4591 _physical_device: PhysicalDevice,
4592 _p_tool_count: *mut u32,
4593 _p_tool_properties: *mut PhysicalDeviceToolProperties<'_>,
4594 ) -> Result {
4595 panic!(concat!(
4596 "Unable to load ",
4597 stringify!(get_physical_device_tool_properties_ext)
4598 ))
4599 }
4600 let val = _f(c"vkGetPhysicalDeviceToolPropertiesEXT");
4601 if val.is_null() {
4602 get_physical_device_tool_properties_ext
4603 } else {
4604 ::core::mem::transmute::<
4605 *const c_void,
4606 PFN_vkGetPhysicalDeviceToolProperties,
4607 >(val)
4608 }
4609 },
4610 }
4611 }
4612 }
4613 }
4614 #[doc = "VK_EXT_separate_stencil_usage"]
4615 pub mod separate_stencil_usage {
4616 use super::super::*;
4617 pub use {
4618 crate::vk::EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME as NAME,
4619 crate::vk::EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION as SPEC_VERSION,
4620 };
4621 }
4622 #[doc = "VK_EXT_validation_features"]
4623 pub mod validation_features {
4624 use super::super::*;
4625 pub use {
4626 crate::vk::EXT_VALIDATION_FEATURES_EXTENSION_NAME as NAME,
4627 crate::vk::EXT_VALIDATION_FEATURES_SPEC_VERSION as SPEC_VERSION,
4628 };
4629 }
4630 #[doc = "VK_EXT_fragment_shader_interlock"]
4631 pub mod fragment_shader_interlock {
4632 use super::super::*;
4633 pub use {
4634 crate::vk::EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME as NAME,
4635 crate::vk::EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION as SPEC_VERSION,
4636 };
4637 }
4638 #[doc = "VK_EXT_ycbcr_image_arrays"]
4639 pub mod ycbcr_image_arrays {
4640 use super::super::*;
4641 pub use {
4642 crate::vk::EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME as NAME,
4643 crate::vk::EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION as SPEC_VERSION,
4644 };
4645 }
4646 #[doc = "VK_EXT_provoking_vertex"]
4647 pub mod provoking_vertex {
4648 use super::super::*;
4649 pub use {
4650 crate::vk::EXT_PROVOKING_VERTEX_EXTENSION_NAME as NAME,
4651 crate::vk::EXT_PROVOKING_VERTEX_SPEC_VERSION as SPEC_VERSION,
4652 };
4653 }
4654 #[doc = "VK_EXT_full_screen_exclusive"]
4655 pub mod full_screen_exclusive {
4656 use super::super::*;
4657 pub use {
4658 crate::vk::EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME as NAME,
4659 crate::vk::EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION as SPEC_VERSION,
4660 };
4661 #[doc = "VK_EXT_full_screen_exclusive instance-level functions"]
4662 #[derive(Clone)]
4663 pub struct Instance {
4664 pub(crate) fp: InstanceFn,
4665 pub(crate) handle: crate::vk::Instance,
4666 }
4667 impl Instance {
4668 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
4669 let handle = instance.handle();
4670 let fp = InstanceFn::load(|name| unsafe {
4671 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4672 entry.get_instance_proc_addr(handle, name.as_ptr()),
4673 )
4674 });
4675 Self { handle, fp }
4676 }
4677 #[inline]
4678 pub fn fp(&self) -> &InstanceFn {
4679 &self.fp
4680 }
4681 #[inline]
4682 pub fn instance(&self) -> crate::vk::Instance {
4683 self.handle
4684 }
4685 }
4686 #[derive(Clone)]
4687 #[doc = "Raw VK_EXT_full_screen_exclusive instance-level function pointers"]
4688 pub struct InstanceFn {
4689 pub get_physical_device_surface_present_modes2_ext:
4690 PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT,
4691 }
4692 unsafe impl Send for InstanceFn {}
4693 unsafe impl Sync for InstanceFn {}
4694 impl InstanceFn {
4695 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4696 Self::load_erased(&mut f)
4697 }
4698 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4699 Self {
4700 get_physical_device_surface_present_modes2_ext: unsafe {
4701 unsafe extern "system" fn get_physical_device_surface_present_modes2_ext(
4702 _physical_device: PhysicalDevice,
4703 _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>,
4704 _p_present_mode_count: *mut u32,
4705 _p_present_modes: *mut PresentModeKHR,
4706 ) -> Result {
4707 panic!(concat!(
4708 "Unable to load ",
4709 stringify!(get_physical_device_surface_present_modes2_ext)
4710 ))
4711 }
4712 let val = _f(c"vkGetPhysicalDeviceSurfacePresentModes2EXT");
4713 if val.is_null() {
4714 get_physical_device_surface_present_modes2_ext
4715 } else {
4716 ::core::mem::transmute::<
4717 *const c_void,
4718 PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT,
4719 >(val)
4720 }
4721 },
4722 }
4723 }
4724 }
4725 #[doc = "VK_EXT_full_screen_exclusive device-level functions"]
4726 #[derive(Clone)]
4727 pub struct Device {
4728 pub(crate) fp: DeviceFn,
4729 pub(crate) handle: crate::vk::Device,
4730 }
4731 impl Device {
4732 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
4733 let handle = device.handle();
4734 let fp = DeviceFn::load(|name| unsafe {
4735 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4736 instance.get_device_proc_addr(handle, name.as_ptr()),
4737 )
4738 });
4739 Self { handle, fp }
4740 }
4741 #[inline]
4742 pub fn fp(&self) -> &DeviceFn {
4743 &self.fp
4744 }
4745 #[inline]
4746 pub fn device(&self) -> crate::vk::Device {
4747 self.handle
4748 }
4749 }
4750 #[derive(Clone)]
4751 #[doc = "Raw VK_EXT_full_screen_exclusive device-level function pointers"]
4752 pub struct DeviceFn {
4753 pub acquire_full_screen_exclusive_mode_ext: PFN_vkAcquireFullScreenExclusiveModeEXT,
4754 pub release_full_screen_exclusive_mode_ext: PFN_vkReleaseFullScreenExclusiveModeEXT,
4755 pub get_device_group_surface_present_modes2_ext:
4756 PFN_vkGetDeviceGroupSurfacePresentModes2EXT,
4757 }
4758 unsafe impl Send for DeviceFn {}
4759 unsafe impl Sync for DeviceFn {}
4760 impl DeviceFn {
4761 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4762 Self::load_erased(&mut f)
4763 }
4764 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4765 Self {
4766 acquire_full_screen_exclusive_mode_ext: unsafe {
4767 unsafe extern "system" fn acquire_full_screen_exclusive_mode_ext(
4768 _device: crate::vk::Device,
4769 _swapchain: SwapchainKHR,
4770 ) -> Result {
4771 panic!(concat!(
4772 "Unable to load ",
4773 stringify!(acquire_full_screen_exclusive_mode_ext)
4774 ))
4775 }
4776 let val = _f(c"vkAcquireFullScreenExclusiveModeEXT");
4777 if val.is_null() {
4778 acquire_full_screen_exclusive_mode_ext
4779 } else {
4780 ::core::mem::transmute::<
4781 *const c_void,
4782 PFN_vkAcquireFullScreenExclusiveModeEXT,
4783 >(val)
4784 }
4785 },
4786 release_full_screen_exclusive_mode_ext: unsafe {
4787 unsafe extern "system" fn release_full_screen_exclusive_mode_ext(
4788 _device: crate::vk::Device,
4789 _swapchain: SwapchainKHR,
4790 ) -> Result {
4791 panic!(concat!(
4792 "Unable to load ",
4793 stringify!(release_full_screen_exclusive_mode_ext)
4794 ))
4795 }
4796 let val = _f(c"vkReleaseFullScreenExclusiveModeEXT");
4797 if val.is_null() {
4798 release_full_screen_exclusive_mode_ext
4799 } else {
4800 ::core::mem::transmute::<
4801 *const c_void,
4802 PFN_vkReleaseFullScreenExclusiveModeEXT,
4803 >(val)
4804 }
4805 },
4806 get_device_group_surface_present_modes2_ext: unsafe {
4807 unsafe extern "system" fn get_device_group_surface_present_modes2_ext(
4808 _device: crate::vk::Device,
4809 _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>,
4810 _p_modes: *mut DeviceGroupPresentModeFlagsKHR,
4811 ) -> Result {
4812 panic!(concat!(
4813 "Unable to load ",
4814 stringify!(get_device_group_surface_present_modes2_ext)
4815 ))
4816 }
4817 let val = _f(c"vkGetDeviceGroupSurfacePresentModes2EXT");
4818 if val.is_null() {
4819 get_device_group_surface_present_modes2_ext
4820 } else {
4821 ::core::mem::transmute::<
4822 *const c_void,
4823 PFN_vkGetDeviceGroupSurfacePresentModes2EXT,
4824 >(val)
4825 }
4826 },
4827 }
4828 }
4829 }
4830 }
4831 #[doc = "VK_EXT_headless_surface"]
4832 pub mod headless_surface {
4833 use super::super::*;
4834 pub use {
4835 crate::vk::EXT_HEADLESS_SURFACE_EXTENSION_NAME as NAME,
4836 crate::vk::EXT_HEADLESS_SURFACE_SPEC_VERSION as SPEC_VERSION,
4837 };
4838 #[doc = "VK_EXT_headless_surface instance-level functions"]
4839 #[derive(Clone)]
4840 pub struct Instance {
4841 pub(crate) fp: InstanceFn,
4842 pub(crate) handle: crate::vk::Instance,
4843 }
4844 impl Instance {
4845 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
4846 let handle = instance.handle();
4847 let fp = InstanceFn::load(|name| unsafe {
4848 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4849 entry.get_instance_proc_addr(handle, name.as_ptr()),
4850 )
4851 });
4852 Self { handle, fp }
4853 }
4854 #[inline]
4855 pub fn fp(&self) -> &InstanceFn {
4856 &self.fp
4857 }
4858 #[inline]
4859 pub fn instance(&self) -> crate::vk::Instance {
4860 self.handle
4861 }
4862 }
4863 #[derive(Clone)]
4864 #[doc = "Raw VK_EXT_headless_surface instance-level function pointers"]
4865 pub struct InstanceFn {
4866 pub create_headless_surface_ext: PFN_vkCreateHeadlessSurfaceEXT,
4867 }
4868 unsafe impl Send for InstanceFn {}
4869 unsafe impl Sync for InstanceFn {}
4870 impl InstanceFn {
4871 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4872 Self::load_erased(&mut f)
4873 }
4874 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4875 Self {
4876 create_headless_surface_ext: unsafe {
4877 unsafe extern "system" fn create_headless_surface_ext(
4878 _instance: crate::vk::Instance,
4879 _p_create_info: *const HeadlessSurfaceCreateInfoEXT<'_>,
4880 _p_allocator: *const AllocationCallbacks<'_>,
4881 _p_surface: *mut SurfaceKHR,
4882 ) -> Result {
4883 panic!(concat!(
4884 "Unable to load ",
4885 stringify!(create_headless_surface_ext)
4886 ))
4887 }
4888 let val = _f(c"vkCreateHeadlessSurfaceEXT");
4889 if val.is_null() {
4890 create_headless_surface_ext
4891 } else {
4892 ::core::mem::transmute::<*const c_void, PFN_vkCreateHeadlessSurfaceEXT>(
4893 val,
4894 )
4895 }
4896 },
4897 }
4898 }
4899 }
4900 }
4901 #[doc = "VK_EXT_line_rasterization"]
4902 pub mod line_rasterization {
4903 use super::super::*;
4904 pub use {
4905 crate::vk::EXT_LINE_RASTERIZATION_EXTENSION_NAME as NAME,
4906 crate::vk::EXT_LINE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION,
4907 };
4908 #[doc = "VK_EXT_line_rasterization device-level functions"]
4909 #[derive(Clone)]
4910 pub struct Device {
4911 pub(crate) fp: DeviceFn,
4912 pub(crate) handle: crate::vk::Device,
4913 }
4914 impl Device {
4915 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
4916 let handle = device.handle();
4917 let fp = DeviceFn::load(|name| unsafe {
4918 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4919 instance.get_device_proc_addr(handle, name.as_ptr()),
4920 )
4921 });
4922 Self { handle, fp }
4923 }
4924 #[inline]
4925 pub fn fp(&self) -> &DeviceFn {
4926 &self.fp
4927 }
4928 #[inline]
4929 pub fn device(&self) -> crate::vk::Device {
4930 self.handle
4931 }
4932 }
4933 #[derive(Clone)]
4934 #[doc = "Raw VK_EXT_line_rasterization device-level function pointers"]
4935 pub struct DeviceFn {
4936 pub cmd_set_line_stipple_ext: PFN_vkCmdSetLineStipple,
4937 }
4938 unsafe impl Send for DeviceFn {}
4939 unsafe impl Sync for DeviceFn {}
4940 impl DeviceFn {
4941 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
4942 Self::load_erased(&mut f)
4943 }
4944 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
4945 Self {
4946 cmd_set_line_stipple_ext: unsafe {
4947 unsafe extern "system" fn cmd_set_line_stipple_ext(
4948 _command_buffer: CommandBuffer,
4949 _line_stipple_factor: u32,
4950 _line_stipple_pattern: u16,
4951 ) {
4952 panic!(concat!(
4953 "Unable to load ",
4954 stringify!(cmd_set_line_stipple_ext)
4955 ))
4956 }
4957 let val = _f(c"vkCmdSetLineStippleEXT");
4958 if val.is_null() {
4959 cmd_set_line_stipple_ext
4960 } else {
4961 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLineStipple>(val)
4962 }
4963 },
4964 }
4965 }
4966 }
4967 }
4968 #[doc = "VK_EXT_shader_atomic_float"]
4969 pub mod shader_atomic_float {
4970 use super::super::*;
4971 pub use {
4972 crate::vk::EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME as NAME,
4973 crate::vk::EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION as SPEC_VERSION,
4974 };
4975 }
4976 #[doc = "VK_EXT_host_query_reset"]
4977 pub mod host_query_reset {
4978 use super::super::*;
4979 pub use {
4980 crate::vk::EXT_HOST_QUERY_RESET_EXTENSION_NAME as NAME,
4981 crate::vk::EXT_HOST_QUERY_RESET_SPEC_VERSION as SPEC_VERSION,
4982 };
4983 #[doc = "VK_EXT_host_query_reset device-level functions"]
4984 #[derive(Clone)]
4985 pub struct Device {
4986 pub(crate) fp: DeviceFn,
4987 pub(crate) handle: crate::vk::Device,
4988 }
4989 impl Device {
4990 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
4991 let handle = device.handle();
4992 let fp = DeviceFn::load(|name| unsafe {
4993 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
4994 instance.get_device_proc_addr(handle, name.as_ptr()),
4995 )
4996 });
4997 Self { handle, fp }
4998 }
4999 #[inline]
5000 pub fn fp(&self) -> &DeviceFn {
5001 &self.fp
5002 }
5003 #[inline]
5004 pub fn device(&self) -> crate::vk::Device {
5005 self.handle
5006 }
5007 }
5008 #[derive(Clone)]
5009 #[doc = "Raw VK_EXT_host_query_reset device-level function pointers"]
5010 pub struct DeviceFn {
5011 pub reset_query_pool_ext: PFN_vkResetQueryPool,
5012 }
5013 unsafe impl Send for DeviceFn {}
5014 unsafe impl Sync for DeviceFn {}
5015 impl DeviceFn {
5016 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5017 Self::load_erased(&mut f)
5018 }
5019 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5020 Self {
5021 reset_query_pool_ext: unsafe {
5022 unsafe extern "system" fn reset_query_pool_ext(
5023 _device: crate::vk::Device,
5024 _query_pool: QueryPool,
5025 _first_query: u32,
5026 _query_count: u32,
5027 ) {
5028 panic!(concat!("Unable to load ", stringify!(reset_query_pool_ext)))
5029 }
5030 let val = _f(c"vkResetQueryPoolEXT");
5031 if val.is_null() {
5032 reset_query_pool_ext
5033 } else {
5034 ::core::mem::transmute::<*const c_void, PFN_vkResetQueryPool>(val)
5035 }
5036 },
5037 }
5038 }
5039 }
5040 }
5041 #[doc = "VK_EXT_index_type_uint8"]
5042 pub mod index_type_uint8 {
5043 use super::super::*;
5044 pub use {
5045 crate::vk::EXT_INDEX_TYPE_UINT8_EXTENSION_NAME as NAME,
5046 crate::vk::EXT_INDEX_TYPE_UINT8_SPEC_VERSION as SPEC_VERSION,
5047 };
5048 }
5049 #[doc = "VK_EXT_extended_dynamic_state"]
5050 pub mod extended_dynamic_state {
5051 use super::super::*;
5052 pub use {
5053 crate::vk::EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME as NAME,
5054 crate::vk::EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION as SPEC_VERSION,
5055 };
5056 #[doc = "VK_EXT_extended_dynamic_state device-level functions"]
5057 #[derive(Clone)]
5058 pub struct Device {
5059 pub(crate) fp: DeviceFn,
5060 pub(crate) handle: crate::vk::Device,
5061 }
5062 impl Device {
5063 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
5064 let handle = device.handle();
5065 let fp = DeviceFn::load(|name| unsafe {
5066 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5067 instance.get_device_proc_addr(handle, name.as_ptr()),
5068 )
5069 });
5070 Self { handle, fp }
5071 }
5072 #[inline]
5073 pub fn fp(&self) -> &DeviceFn {
5074 &self.fp
5075 }
5076 #[inline]
5077 pub fn device(&self) -> crate::vk::Device {
5078 self.handle
5079 }
5080 }
5081 #[derive(Clone)]
5082 #[doc = "Raw VK_EXT_extended_dynamic_state device-level function pointers"]
5083 pub struct DeviceFn {
5084 pub cmd_set_cull_mode_ext: PFN_vkCmdSetCullMode,
5085 pub cmd_set_front_face_ext: PFN_vkCmdSetFrontFace,
5086 pub cmd_set_primitive_topology_ext: PFN_vkCmdSetPrimitiveTopology,
5087 pub cmd_set_viewport_with_count_ext: PFN_vkCmdSetViewportWithCount,
5088 pub cmd_set_scissor_with_count_ext: PFN_vkCmdSetScissorWithCount,
5089 pub cmd_bind_vertex_buffers2_ext: PFN_vkCmdBindVertexBuffers2,
5090 pub cmd_set_depth_test_enable_ext: PFN_vkCmdSetDepthTestEnable,
5091 pub cmd_set_depth_write_enable_ext: PFN_vkCmdSetDepthWriteEnable,
5092 pub cmd_set_depth_compare_op_ext: PFN_vkCmdSetDepthCompareOp,
5093 pub cmd_set_depth_bounds_test_enable_ext: PFN_vkCmdSetDepthBoundsTestEnable,
5094 pub cmd_set_stencil_test_enable_ext: PFN_vkCmdSetStencilTestEnable,
5095 pub cmd_set_stencil_op_ext: PFN_vkCmdSetStencilOp,
5096 }
5097 unsafe impl Send for DeviceFn {}
5098 unsafe impl Sync for DeviceFn {}
5099 impl DeviceFn {
5100 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5101 Self::load_erased(&mut f)
5102 }
5103 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5104 Self {
5105 cmd_set_cull_mode_ext: unsafe {
5106 unsafe extern "system" fn cmd_set_cull_mode_ext(
5107 _command_buffer: CommandBuffer,
5108 _cull_mode: CullModeFlags,
5109 ) {
5110 panic!(concat!(
5111 "Unable to load ",
5112 stringify!(cmd_set_cull_mode_ext)
5113 ))
5114 }
5115 let val = _f(c"vkCmdSetCullModeEXT");
5116 if val.is_null() {
5117 cmd_set_cull_mode_ext
5118 } else {
5119 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetCullMode>(val)
5120 }
5121 },
5122 cmd_set_front_face_ext: unsafe {
5123 unsafe extern "system" fn cmd_set_front_face_ext(
5124 _command_buffer: CommandBuffer,
5125 _front_face: FrontFace,
5126 ) {
5127 panic!(concat!(
5128 "Unable to load ",
5129 stringify!(cmd_set_front_face_ext)
5130 ))
5131 }
5132 let val = _f(c"vkCmdSetFrontFaceEXT");
5133 if val.is_null() {
5134 cmd_set_front_face_ext
5135 } else {
5136 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetFrontFace>(val)
5137 }
5138 },
5139 cmd_set_primitive_topology_ext: unsafe {
5140 unsafe extern "system" fn cmd_set_primitive_topology_ext(
5141 _command_buffer: CommandBuffer,
5142 _primitive_topology: PrimitiveTopology,
5143 ) {
5144 panic!(concat!(
5145 "Unable to load ",
5146 stringify!(cmd_set_primitive_topology_ext)
5147 ))
5148 }
5149 let val = _f(c"vkCmdSetPrimitiveTopologyEXT");
5150 if val.is_null() {
5151 cmd_set_primitive_topology_ext
5152 } else {
5153 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetPrimitiveTopology>(
5154 val,
5155 )
5156 }
5157 },
5158 cmd_set_viewport_with_count_ext: unsafe {
5159 unsafe extern "system" fn cmd_set_viewport_with_count_ext(
5160 _command_buffer: CommandBuffer,
5161 _viewport_count: u32,
5162 _p_viewports: *const Viewport,
5163 ) {
5164 panic!(concat!(
5165 "Unable to load ",
5166 stringify!(cmd_set_viewport_with_count_ext)
5167 ))
5168 }
5169 let val = _f(c"vkCmdSetViewportWithCountEXT");
5170 if val.is_null() {
5171 cmd_set_viewport_with_count_ext
5172 } else {
5173 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetViewportWithCount>(
5174 val,
5175 )
5176 }
5177 },
5178 cmd_set_scissor_with_count_ext: unsafe {
5179 unsafe extern "system" fn cmd_set_scissor_with_count_ext(
5180 _command_buffer: CommandBuffer,
5181 _scissor_count: u32,
5182 _p_scissors: *const Rect2D,
5183 ) {
5184 panic!(concat!(
5185 "Unable to load ",
5186 stringify!(cmd_set_scissor_with_count_ext)
5187 ))
5188 }
5189 let val = _f(c"vkCmdSetScissorWithCountEXT");
5190 if val.is_null() {
5191 cmd_set_scissor_with_count_ext
5192 } else {
5193 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetScissorWithCount>(
5194 val,
5195 )
5196 }
5197 },
5198 cmd_bind_vertex_buffers2_ext: unsafe {
5199 unsafe extern "system" fn cmd_bind_vertex_buffers2_ext(
5200 _command_buffer: CommandBuffer,
5201 _first_binding: u32,
5202 _binding_count: u32,
5203 _p_buffers: *const Buffer,
5204 _p_offsets: *const DeviceSize,
5205 _p_sizes: *const DeviceSize,
5206 _p_strides: *const DeviceSize,
5207 ) {
5208 panic!(concat!(
5209 "Unable to load ",
5210 stringify!(cmd_bind_vertex_buffers2_ext)
5211 ))
5212 }
5213 let val = _f(c"vkCmdBindVertexBuffers2EXT");
5214 if val.is_null() {
5215 cmd_bind_vertex_buffers2_ext
5216 } else {
5217 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindVertexBuffers2>(
5218 val,
5219 )
5220 }
5221 },
5222 cmd_set_depth_test_enable_ext: unsafe {
5223 unsafe extern "system" fn cmd_set_depth_test_enable_ext(
5224 _command_buffer: CommandBuffer,
5225 _depth_test_enable: Bool32,
5226 ) {
5227 panic!(concat!(
5228 "Unable to load ",
5229 stringify!(cmd_set_depth_test_enable_ext)
5230 ))
5231 }
5232 let val = _f(c"vkCmdSetDepthTestEnableEXT");
5233 if val.is_null() {
5234 cmd_set_depth_test_enable_ext
5235 } else {
5236 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthTestEnable>(
5237 val,
5238 )
5239 }
5240 },
5241 cmd_set_depth_write_enable_ext: unsafe {
5242 unsafe extern "system" fn cmd_set_depth_write_enable_ext(
5243 _command_buffer: CommandBuffer,
5244 _depth_write_enable: Bool32,
5245 ) {
5246 panic!(concat!(
5247 "Unable to load ",
5248 stringify!(cmd_set_depth_write_enable_ext)
5249 ))
5250 }
5251 let val = _f(c"vkCmdSetDepthWriteEnableEXT");
5252 if val.is_null() {
5253 cmd_set_depth_write_enable_ext
5254 } else {
5255 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthWriteEnable>(
5256 val,
5257 )
5258 }
5259 },
5260 cmd_set_depth_compare_op_ext: unsafe {
5261 unsafe extern "system" fn cmd_set_depth_compare_op_ext(
5262 _command_buffer: CommandBuffer,
5263 _depth_compare_op: CompareOp,
5264 ) {
5265 panic!(concat!(
5266 "Unable to load ",
5267 stringify!(cmd_set_depth_compare_op_ext)
5268 ))
5269 }
5270 let val = _f(c"vkCmdSetDepthCompareOpEXT");
5271 if val.is_null() {
5272 cmd_set_depth_compare_op_ext
5273 } else {
5274 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthCompareOp>(val)
5275 }
5276 },
5277 cmd_set_depth_bounds_test_enable_ext: unsafe {
5278 unsafe extern "system" fn cmd_set_depth_bounds_test_enable_ext(
5279 _command_buffer: CommandBuffer,
5280 _depth_bounds_test_enable: Bool32,
5281 ) {
5282 panic!(concat!(
5283 "Unable to load ",
5284 stringify!(cmd_set_depth_bounds_test_enable_ext)
5285 ))
5286 }
5287 let val = _f(c"vkCmdSetDepthBoundsTestEnableEXT");
5288 if val.is_null() {
5289 cmd_set_depth_bounds_test_enable_ext
5290 } else {
5291 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthBoundsTestEnable>(
5292 val,
5293 )
5294 }
5295 },
5296 cmd_set_stencil_test_enable_ext: unsafe {
5297 unsafe extern "system" fn cmd_set_stencil_test_enable_ext(
5298 _command_buffer: CommandBuffer,
5299 _stencil_test_enable: Bool32,
5300 ) {
5301 panic!(concat!(
5302 "Unable to load ",
5303 stringify!(cmd_set_stencil_test_enable_ext)
5304 ))
5305 }
5306 let val = _f(c"vkCmdSetStencilTestEnableEXT");
5307 if val.is_null() {
5308 cmd_set_stencil_test_enable_ext
5309 } else {
5310 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetStencilTestEnable>(
5311 val,
5312 )
5313 }
5314 },
5315 cmd_set_stencil_op_ext: unsafe {
5316 unsafe extern "system" fn cmd_set_stencil_op_ext(
5317 _command_buffer: CommandBuffer,
5318 _face_mask: StencilFaceFlags,
5319 _fail_op: StencilOp,
5320 _pass_op: StencilOp,
5321 _depth_fail_op: StencilOp,
5322 _compare_op: CompareOp,
5323 ) {
5324 panic!(concat!(
5325 "Unable to load ",
5326 stringify!(cmd_set_stencil_op_ext)
5327 ))
5328 }
5329 let val = _f(c"vkCmdSetStencilOpEXT");
5330 if val.is_null() {
5331 cmd_set_stencil_op_ext
5332 } else {
5333 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetStencilOp>(val)
5334 }
5335 },
5336 }
5337 }
5338 }
5339 }
5340 #[doc = "VK_EXT_host_image_copy"]
5341 pub mod host_image_copy {
5342 use super::super::*;
5343 pub use {
5344 crate::vk::EXT_HOST_IMAGE_COPY_EXTENSION_NAME as NAME,
5345 crate::vk::EXT_HOST_IMAGE_COPY_SPEC_VERSION as SPEC_VERSION,
5346 };
5347 #[doc = "VK_EXT_host_image_copy device-level functions"]
5348 #[derive(Clone)]
5349 pub struct Device {
5350 pub(crate) fp: DeviceFn,
5351 pub(crate) handle: crate::vk::Device,
5352 }
5353 impl Device {
5354 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
5355 let handle = device.handle();
5356 let fp = DeviceFn::load(|name| unsafe {
5357 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5358 instance.get_device_proc_addr(handle, name.as_ptr()),
5359 )
5360 });
5361 Self { handle, fp }
5362 }
5363 #[inline]
5364 pub fn fp(&self) -> &DeviceFn {
5365 &self.fp
5366 }
5367 #[inline]
5368 pub fn device(&self) -> crate::vk::Device {
5369 self.handle
5370 }
5371 }
5372 #[derive(Clone)]
5373 #[doc = "Raw VK_EXT_host_image_copy device-level function pointers"]
5374 pub struct DeviceFn {
5375 pub copy_memory_to_image_ext: PFN_vkCopyMemoryToImage,
5376 pub copy_image_to_memory_ext: PFN_vkCopyImageToMemory,
5377 pub copy_image_to_image_ext: PFN_vkCopyImageToImage,
5378 pub transition_image_layout_ext: PFN_vkTransitionImageLayout,
5379 pub get_image_subresource_layout2_ext: PFN_vkGetImageSubresourceLayout2,
5380 }
5381 unsafe impl Send for DeviceFn {}
5382 unsafe impl Sync for DeviceFn {}
5383 impl DeviceFn {
5384 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5385 Self::load_erased(&mut f)
5386 }
5387 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5388 Self {
5389 copy_memory_to_image_ext: unsafe {
5390 unsafe extern "system" fn copy_memory_to_image_ext(
5391 _device: crate::vk::Device,
5392 _p_copy_memory_to_image_info: *const CopyMemoryToImageInfo<'_>,
5393 ) -> Result {
5394 panic!(concat!(
5395 "Unable to load ",
5396 stringify!(copy_memory_to_image_ext)
5397 ))
5398 }
5399 let val = _f(c"vkCopyMemoryToImageEXT");
5400 if val.is_null() {
5401 copy_memory_to_image_ext
5402 } else {
5403 ::core::mem::transmute::<*const c_void, PFN_vkCopyMemoryToImage>(val)
5404 }
5405 },
5406 copy_image_to_memory_ext: unsafe {
5407 unsafe extern "system" fn copy_image_to_memory_ext(
5408 _device: crate::vk::Device,
5409 _p_copy_image_to_memory_info: *const CopyImageToMemoryInfo<'_>,
5410 ) -> Result {
5411 panic!(concat!(
5412 "Unable to load ",
5413 stringify!(copy_image_to_memory_ext)
5414 ))
5415 }
5416 let val = _f(c"vkCopyImageToMemoryEXT");
5417 if val.is_null() {
5418 copy_image_to_memory_ext
5419 } else {
5420 ::core::mem::transmute::<*const c_void, PFN_vkCopyImageToMemory>(val)
5421 }
5422 },
5423 copy_image_to_image_ext: unsafe {
5424 unsafe extern "system" fn copy_image_to_image_ext(
5425 _device: crate::vk::Device,
5426 _p_copy_image_to_image_info: *const CopyImageToImageInfo<'_>,
5427 ) -> Result {
5428 panic!(concat!(
5429 "Unable to load ",
5430 stringify!(copy_image_to_image_ext)
5431 ))
5432 }
5433 let val = _f(c"vkCopyImageToImageEXT");
5434 if val.is_null() {
5435 copy_image_to_image_ext
5436 } else {
5437 ::core::mem::transmute::<*const c_void, PFN_vkCopyImageToImage>(val)
5438 }
5439 },
5440 transition_image_layout_ext: unsafe {
5441 unsafe extern "system" fn transition_image_layout_ext(
5442 _device: crate::vk::Device,
5443 _transition_count: u32,
5444 _p_transitions: *const HostImageLayoutTransitionInfo<'_>,
5445 ) -> Result {
5446 panic!(concat!(
5447 "Unable to load ",
5448 stringify!(transition_image_layout_ext)
5449 ))
5450 }
5451 let val = _f(c"vkTransitionImageLayoutEXT");
5452 if val.is_null() {
5453 transition_image_layout_ext
5454 } else {
5455 ::core::mem::transmute::<*const c_void, PFN_vkTransitionImageLayout>(
5456 val,
5457 )
5458 }
5459 },
5460 get_image_subresource_layout2_ext: unsafe {
5461 unsafe extern "system" fn get_image_subresource_layout2_ext(
5462 _device: crate::vk::Device,
5463 _image: Image,
5464 _p_subresource: *const ImageSubresource2<'_>,
5465 _p_layout: *mut SubresourceLayout2<'_>,
5466 ) {
5467 panic!(concat!(
5468 "Unable to load ",
5469 stringify!(get_image_subresource_layout2_ext)
5470 ))
5471 }
5472 let val = _f(c"vkGetImageSubresourceLayout2EXT");
5473 if val.is_null() {
5474 get_image_subresource_layout2_ext
5475 } else {
5476 ::core::mem::transmute::<*const c_void, PFN_vkGetImageSubresourceLayout2>(
5477 val,
5478 )
5479 }
5480 },
5481 }
5482 }
5483 }
5484 }
5485 #[doc = "VK_EXT_map_memory_placed"]
5486 pub mod map_memory_placed {
5487 use super::super::*;
5488 pub use {
5489 crate::vk::EXT_MAP_MEMORY_PLACED_EXTENSION_NAME as NAME,
5490 crate::vk::EXT_MAP_MEMORY_PLACED_SPEC_VERSION as SPEC_VERSION,
5491 };
5492 }
5493 #[doc = "VK_EXT_shader_atomic_float2"]
5494 pub mod shader_atomic_float2 {
5495 use super::super::*;
5496 pub use {
5497 crate::vk::EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME as NAME,
5498 crate::vk::EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION as SPEC_VERSION,
5499 };
5500 }
5501 #[doc = "VK_EXT_surface_maintenance1"]
5502 pub mod surface_maintenance1 {
5503 use super::super::*;
5504 pub use {
5505 crate::vk::EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME as NAME,
5506 crate::vk::EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION as SPEC_VERSION,
5507 };
5508 }
5509 #[doc = "VK_EXT_swapchain_maintenance1"]
5510 pub mod swapchain_maintenance1 {
5511 use super::super::*;
5512 pub use {
5513 crate::vk::EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME as NAME,
5514 crate::vk::EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION as SPEC_VERSION,
5515 };
5516 #[doc = "VK_EXT_swapchain_maintenance1 device-level functions"]
5517 #[derive(Clone)]
5518 pub struct Device {
5519 pub(crate) fp: DeviceFn,
5520 pub(crate) handle: crate::vk::Device,
5521 }
5522 impl Device {
5523 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
5524 let handle = device.handle();
5525 let fp = DeviceFn::load(|name| unsafe {
5526 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5527 instance.get_device_proc_addr(handle, name.as_ptr()),
5528 )
5529 });
5530 Self { handle, fp }
5531 }
5532 #[inline]
5533 pub fn fp(&self) -> &DeviceFn {
5534 &self.fp
5535 }
5536 #[inline]
5537 pub fn device(&self) -> crate::vk::Device {
5538 self.handle
5539 }
5540 }
5541 #[derive(Clone)]
5542 #[doc = "Raw VK_EXT_swapchain_maintenance1 device-level function pointers"]
5543 pub struct DeviceFn {
5544 pub release_swapchain_images_ext: PFN_vkReleaseSwapchainImagesKHR,
5545 }
5546 unsafe impl Send for DeviceFn {}
5547 unsafe impl Sync for DeviceFn {}
5548 impl DeviceFn {
5549 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5550 Self::load_erased(&mut f)
5551 }
5552 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5553 Self {
5554 release_swapchain_images_ext: unsafe {
5555 unsafe extern "system" fn release_swapchain_images_ext(
5556 _device: crate::vk::Device,
5557 _p_release_info: *const ReleaseSwapchainImagesInfoKHR<'_>,
5558 ) -> Result {
5559 panic!(concat!(
5560 "Unable to load ",
5561 stringify!(release_swapchain_images_ext)
5562 ))
5563 }
5564 let val = _f(c"vkReleaseSwapchainImagesEXT");
5565 if val.is_null() {
5566 release_swapchain_images_ext
5567 } else {
5568 ::core::mem::transmute::<*const c_void, PFN_vkReleaseSwapchainImagesKHR>(
5569 val,
5570 )
5571 }
5572 },
5573 }
5574 }
5575 }
5576 }
5577 #[doc = "VK_EXT_shader_demote_to_helper_invocation"]
5578 pub mod shader_demote_to_helper_invocation {
5579 use super::super::*;
5580 pub use {
5581 crate::vk::EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME as NAME,
5582 crate::vk::EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION as SPEC_VERSION,
5583 };
5584 }
5585 #[doc = "VK_EXT_texel_buffer_alignment"]
5586 pub mod texel_buffer_alignment {
5587 use super::super::*;
5588 pub use {
5589 crate::vk::EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME as NAME,
5590 crate::vk::EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION as SPEC_VERSION,
5591 };
5592 }
5593 #[doc = "VK_EXT_depth_bias_control"]
5594 pub mod depth_bias_control {
5595 use super::super::*;
5596 pub use {
5597 crate::vk::EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME as NAME,
5598 crate::vk::EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION as SPEC_VERSION,
5599 };
5600 #[doc = "VK_EXT_depth_bias_control device-level functions"]
5601 #[derive(Clone)]
5602 pub struct Device {
5603 pub(crate) fp: DeviceFn,
5604 pub(crate) handle: crate::vk::Device,
5605 }
5606 impl Device {
5607 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
5608 let handle = device.handle();
5609 let fp = DeviceFn::load(|name| unsafe {
5610 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5611 instance.get_device_proc_addr(handle, name.as_ptr()),
5612 )
5613 });
5614 Self { handle, fp }
5615 }
5616 #[inline]
5617 pub fn fp(&self) -> &DeviceFn {
5618 &self.fp
5619 }
5620 #[inline]
5621 pub fn device(&self) -> crate::vk::Device {
5622 self.handle
5623 }
5624 }
5625 #[derive(Clone)]
5626 #[doc = "Raw VK_EXT_depth_bias_control device-level function pointers"]
5627 pub struct DeviceFn {
5628 pub cmd_set_depth_bias2_ext: PFN_vkCmdSetDepthBias2EXT,
5629 }
5630 unsafe impl Send for DeviceFn {}
5631 unsafe impl Sync for DeviceFn {}
5632 impl DeviceFn {
5633 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5634 Self::load_erased(&mut f)
5635 }
5636 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5637 Self {
5638 cmd_set_depth_bias2_ext: unsafe {
5639 unsafe extern "system" fn cmd_set_depth_bias2_ext(
5640 _command_buffer: CommandBuffer,
5641 _p_depth_bias_info: *const DepthBiasInfoEXT<'_>,
5642 ) {
5643 panic!(concat!(
5644 "Unable to load ",
5645 stringify!(cmd_set_depth_bias2_ext)
5646 ))
5647 }
5648 let val = _f(c"vkCmdSetDepthBias2EXT");
5649 if val.is_null() {
5650 cmd_set_depth_bias2_ext
5651 } else {
5652 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthBias2EXT>(val)
5653 }
5654 },
5655 }
5656 }
5657 }
5658 }
5659 #[doc = "VK_EXT_device_memory_report"]
5660 pub mod device_memory_report {
5661 use super::super::*;
5662 pub use {
5663 crate::vk::EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME as NAME,
5664 crate::vk::EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION as SPEC_VERSION,
5665 };
5666 }
5667 #[doc = "VK_EXT_acquire_drm_display"]
5668 pub mod acquire_drm_display {
5669 use super::super::*;
5670 pub use {
5671 crate::vk::EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME as NAME,
5672 crate::vk::EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION as SPEC_VERSION,
5673 };
5674 #[doc = "VK_EXT_acquire_drm_display instance-level functions"]
5675 #[derive(Clone)]
5676 pub struct Instance {
5677 pub(crate) fp: InstanceFn,
5678 pub(crate) handle: crate::vk::Instance,
5679 }
5680 impl Instance {
5681 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
5682 let handle = instance.handle();
5683 let fp = InstanceFn::load(|name| unsafe {
5684 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5685 entry.get_instance_proc_addr(handle, name.as_ptr()),
5686 )
5687 });
5688 Self { handle, fp }
5689 }
5690 #[inline]
5691 pub fn fp(&self) -> &InstanceFn {
5692 &self.fp
5693 }
5694 #[inline]
5695 pub fn instance(&self) -> crate::vk::Instance {
5696 self.handle
5697 }
5698 }
5699 #[derive(Clone)]
5700 #[doc = "Raw VK_EXT_acquire_drm_display instance-level function pointers"]
5701 pub struct InstanceFn {
5702 pub acquire_drm_display_ext: PFN_vkAcquireDrmDisplayEXT,
5703 pub get_drm_display_ext: PFN_vkGetDrmDisplayEXT,
5704 }
5705 unsafe impl Send for InstanceFn {}
5706 unsafe impl Sync for InstanceFn {}
5707 impl InstanceFn {
5708 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5709 Self::load_erased(&mut f)
5710 }
5711 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5712 Self {
5713 acquire_drm_display_ext: unsafe {
5714 unsafe extern "system" fn acquire_drm_display_ext(
5715 _physical_device: PhysicalDevice,
5716 _drm_fd: i32,
5717 _display: DisplayKHR,
5718 ) -> Result {
5719 panic!(concat!(
5720 "Unable to load ",
5721 stringify!(acquire_drm_display_ext)
5722 ))
5723 }
5724 let val = _f(c"vkAcquireDrmDisplayEXT");
5725 if val.is_null() {
5726 acquire_drm_display_ext
5727 } else {
5728 ::core::mem::transmute::<*const c_void, PFN_vkAcquireDrmDisplayEXT>(val)
5729 }
5730 },
5731 get_drm_display_ext: unsafe {
5732 unsafe extern "system" fn get_drm_display_ext(
5733 _physical_device: PhysicalDevice,
5734 _drm_fd: i32,
5735 _connector_id: u32,
5736 _display: *mut DisplayKHR,
5737 ) -> Result {
5738 panic!(concat!("Unable to load ", stringify!(get_drm_display_ext)))
5739 }
5740 let val = _f(c"vkGetDrmDisplayEXT");
5741 if val.is_null() {
5742 get_drm_display_ext
5743 } else {
5744 ::core::mem::transmute::<*const c_void, PFN_vkGetDrmDisplayEXT>(val)
5745 }
5746 },
5747 }
5748 }
5749 }
5750 }
5751 #[doc = "VK_EXT_robustness2"]
5752 pub mod robustness2 {
5753 use super::super::*;
5754 pub use {
5755 crate::vk::EXT_ROBUSTNESS_2_EXTENSION_NAME as NAME,
5756 crate::vk::EXT_ROBUSTNESS_2_SPEC_VERSION as SPEC_VERSION,
5757 };
5758 }
5759 #[doc = "VK_EXT_custom_border_color"]
5760 pub mod custom_border_color {
5761 use super::super::*;
5762 pub use {
5763 crate::vk::EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME as NAME,
5764 crate::vk::EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION as SPEC_VERSION,
5765 };
5766 }
5767 #[doc = "VK_EXT_texture_compression_astc_3d"]
5768 pub mod texture_compression_astc_3d {
5769 use super::super::*;
5770 pub use {
5771 crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_3D_EXTENSION_NAME as NAME,
5772 crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_3D_SPEC_VERSION as SPEC_VERSION,
5773 };
5774 }
5775 #[doc = "VK_EXT_private_data"]
5776 pub mod private_data {
5777 use super::super::*;
5778 pub use {
5779 crate::vk::EXT_PRIVATE_DATA_EXTENSION_NAME as NAME,
5780 crate::vk::EXT_PRIVATE_DATA_SPEC_VERSION as SPEC_VERSION,
5781 };
5782 #[doc = "VK_EXT_private_data device-level functions"]
5783 #[derive(Clone)]
5784 pub struct Device {
5785 pub(crate) fp: DeviceFn,
5786 pub(crate) handle: crate::vk::Device,
5787 }
5788 impl Device {
5789 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
5790 let handle = device.handle();
5791 let fp = DeviceFn::load(|name| unsafe {
5792 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5793 instance.get_device_proc_addr(handle, name.as_ptr()),
5794 )
5795 });
5796 Self { handle, fp }
5797 }
5798 #[inline]
5799 pub fn fp(&self) -> &DeviceFn {
5800 &self.fp
5801 }
5802 #[inline]
5803 pub fn device(&self) -> crate::vk::Device {
5804 self.handle
5805 }
5806 }
5807 #[derive(Clone)]
5808 #[doc = "Raw VK_EXT_private_data device-level function pointers"]
5809 pub struct DeviceFn {
5810 pub create_private_data_slot_ext: PFN_vkCreatePrivateDataSlot,
5811 pub destroy_private_data_slot_ext: PFN_vkDestroyPrivateDataSlot,
5812 pub set_private_data_ext: PFN_vkSetPrivateData,
5813 pub get_private_data_ext: PFN_vkGetPrivateData,
5814 }
5815 unsafe impl Send for DeviceFn {}
5816 unsafe impl Sync for DeviceFn {}
5817 impl DeviceFn {
5818 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5819 Self::load_erased(&mut f)
5820 }
5821 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5822 Self {
5823 create_private_data_slot_ext: unsafe {
5824 unsafe extern "system" fn create_private_data_slot_ext(
5825 _device: crate::vk::Device,
5826 _p_create_info: *const PrivateDataSlotCreateInfo<'_>,
5827 _p_allocator: *const AllocationCallbacks<'_>,
5828 _p_private_data_slot: *mut PrivateDataSlot,
5829 ) -> Result {
5830 panic!(concat!(
5831 "Unable to load ",
5832 stringify!(create_private_data_slot_ext)
5833 ))
5834 }
5835 let val = _f(c"vkCreatePrivateDataSlotEXT");
5836 if val.is_null() {
5837 create_private_data_slot_ext
5838 } else {
5839 ::core::mem::transmute::<*const c_void, PFN_vkCreatePrivateDataSlot>(
5840 val,
5841 )
5842 }
5843 },
5844 destroy_private_data_slot_ext: unsafe {
5845 unsafe extern "system" fn destroy_private_data_slot_ext(
5846 _device: crate::vk::Device,
5847 _private_data_slot: PrivateDataSlot,
5848 _p_allocator: *const AllocationCallbacks<'_>,
5849 ) {
5850 panic!(concat!(
5851 "Unable to load ",
5852 stringify!(destroy_private_data_slot_ext)
5853 ))
5854 }
5855 let val = _f(c"vkDestroyPrivateDataSlotEXT");
5856 if val.is_null() {
5857 destroy_private_data_slot_ext
5858 } else {
5859 ::core::mem::transmute::<*const c_void, PFN_vkDestroyPrivateDataSlot>(
5860 val,
5861 )
5862 }
5863 },
5864 set_private_data_ext: unsafe {
5865 unsafe extern "system" fn set_private_data_ext(
5866 _device: crate::vk::Device,
5867 _object_type: ObjectType,
5868 _object_handle: u64,
5869 _private_data_slot: PrivateDataSlot,
5870 _data: u64,
5871 ) -> Result {
5872 panic!(concat!("Unable to load ", stringify!(set_private_data_ext)))
5873 }
5874 let val = _f(c"vkSetPrivateDataEXT");
5875 if val.is_null() {
5876 set_private_data_ext
5877 } else {
5878 ::core::mem::transmute::<*const c_void, PFN_vkSetPrivateData>(val)
5879 }
5880 },
5881 get_private_data_ext: unsafe {
5882 unsafe extern "system" fn get_private_data_ext(
5883 _device: crate::vk::Device,
5884 _object_type: ObjectType,
5885 _object_handle: u64,
5886 _private_data_slot: PrivateDataSlot,
5887 _p_data: *mut u64,
5888 ) {
5889 panic!(concat!("Unable to load ", stringify!(get_private_data_ext)))
5890 }
5891 let val = _f(c"vkGetPrivateDataEXT");
5892 if val.is_null() {
5893 get_private_data_ext
5894 } else {
5895 ::core::mem::transmute::<*const c_void, PFN_vkGetPrivateData>(val)
5896 }
5897 },
5898 }
5899 }
5900 }
5901 }
5902 #[doc = "VK_EXT_pipeline_creation_cache_control"]
5903 pub mod pipeline_creation_cache_control {
5904 use super::super::*;
5905 pub use {
5906 crate::vk::EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME as NAME,
5907 crate::vk::EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION as SPEC_VERSION,
5908 };
5909 }
5910 #[doc = "VK_EXT_metal_objects"]
5911 pub mod metal_objects {
5912 use super::super::*;
5913 pub use {
5914 crate::vk::EXT_METAL_OBJECTS_EXTENSION_NAME as NAME,
5915 crate::vk::EXT_METAL_OBJECTS_SPEC_VERSION as SPEC_VERSION,
5916 };
5917 #[doc = "VK_EXT_metal_objects device-level functions"]
5918 #[derive(Clone)]
5919 pub struct Device {
5920 pub(crate) fp: DeviceFn,
5921 pub(crate) handle: crate::vk::Device,
5922 }
5923 impl Device {
5924 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
5925 let handle = device.handle();
5926 let fp = DeviceFn::load(|name| unsafe {
5927 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5928 instance.get_device_proc_addr(handle, name.as_ptr()),
5929 )
5930 });
5931 Self { handle, fp }
5932 }
5933 #[inline]
5934 pub fn fp(&self) -> &DeviceFn {
5935 &self.fp
5936 }
5937 #[inline]
5938 pub fn device(&self) -> crate::vk::Device {
5939 self.handle
5940 }
5941 }
5942 #[derive(Clone)]
5943 #[doc = "Raw VK_EXT_metal_objects device-level function pointers"]
5944 pub struct DeviceFn {
5945 pub export_metal_objects_ext: PFN_vkExportMetalObjectsEXT,
5946 }
5947 unsafe impl Send for DeviceFn {}
5948 unsafe impl Sync for DeviceFn {}
5949 impl DeviceFn {
5950 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
5951 Self::load_erased(&mut f)
5952 }
5953 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
5954 Self {
5955 export_metal_objects_ext: unsafe {
5956 unsafe extern "system" fn export_metal_objects_ext(
5957 _device: crate::vk::Device,
5958 _p_metal_objects_info: *mut ExportMetalObjectsInfoEXT<'_>,
5959 ) {
5960 panic!(concat!(
5961 "Unable to load ",
5962 stringify!(export_metal_objects_ext)
5963 ))
5964 }
5965 let val = _f(c"vkExportMetalObjectsEXT");
5966 if val.is_null() {
5967 export_metal_objects_ext
5968 } else {
5969 ::core::mem::transmute::<*const c_void, PFN_vkExportMetalObjectsEXT>(
5970 val,
5971 )
5972 }
5973 },
5974 }
5975 }
5976 }
5977 }
5978 #[doc = "VK_EXT_descriptor_buffer"]
5979 pub mod descriptor_buffer {
5980 use super::super::*;
5981 pub use {
5982 crate::vk::EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME as NAME,
5983 crate::vk::EXT_DESCRIPTOR_BUFFER_SPEC_VERSION as SPEC_VERSION,
5984 };
5985 #[doc = "VK_EXT_descriptor_buffer device-level functions"]
5986 #[derive(Clone)]
5987 pub struct Device {
5988 pub(crate) fp: DeviceFn,
5989 pub(crate) handle: crate::vk::Device,
5990 }
5991 impl Device {
5992 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
5993 let handle = device.handle();
5994 let fp = DeviceFn::load(|name| unsafe {
5995 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
5996 instance.get_device_proc_addr(handle, name.as_ptr()),
5997 )
5998 });
5999 Self { handle, fp }
6000 }
6001 #[inline]
6002 pub fn fp(&self) -> &DeviceFn {
6003 &self.fp
6004 }
6005 #[inline]
6006 pub fn device(&self) -> crate::vk::Device {
6007 self.handle
6008 }
6009 }
6010 #[derive(Clone)]
6011 #[doc = "Raw VK_EXT_descriptor_buffer device-level function pointers"]
6012 pub struct DeviceFn {
6013 pub get_descriptor_set_layout_size_ext: PFN_vkGetDescriptorSetLayoutSizeEXT,
6014 pub get_descriptor_set_layout_binding_offset_ext:
6015 PFN_vkGetDescriptorSetLayoutBindingOffsetEXT,
6016 pub get_descriptor_ext: PFN_vkGetDescriptorEXT,
6017 pub cmd_bind_descriptor_buffers_ext: PFN_vkCmdBindDescriptorBuffersEXT,
6018 pub cmd_set_descriptor_buffer_offsets_ext: PFN_vkCmdSetDescriptorBufferOffsetsEXT,
6019 pub cmd_bind_descriptor_buffer_embedded_samplers_ext:
6020 PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT,
6021 pub get_buffer_opaque_capture_descriptor_data_ext:
6022 PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT,
6023 pub get_image_opaque_capture_descriptor_data_ext:
6024 PFN_vkGetImageOpaqueCaptureDescriptorDataEXT,
6025 pub get_image_view_opaque_capture_descriptor_data_ext:
6026 PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT,
6027 pub get_sampler_opaque_capture_descriptor_data_ext:
6028 PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT,
6029 pub get_acceleration_structure_opaque_capture_descriptor_data_ext:
6030 PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT,
6031 }
6032 unsafe impl Send for DeviceFn {}
6033 unsafe impl Sync for DeviceFn {}
6034 impl DeviceFn {
6035 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6036 Self::load_erased(&mut f)
6037 }
6038 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6039 Self {
6040 get_descriptor_set_layout_size_ext: unsafe {
6041 unsafe extern "system" fn get_descriptor_set_layout_size_ext(
6042 _device: crate::vk::Device,
6043 _layout: DescriptorSetLayout,
6044 _p_layout_size_in_bytes: *mut DeviceSize,
6045 ) {
6046 panic!(concat!(
6047 "Unable to load ",
6048 stringify!(get_descriptor_set_layout_size_ext)
6049 ))
6050 }
6051 let val = _f(c"vkGetDescriptorSetLayoutSizeEXT");
6052 if val.is_null() {
6053 get_descriptor_set_layout_size_ext
6054 } else {
6055 ::core::mem::transmute::<
6056 *const c_void,
6057 PFN_vkGetDescriptorSetLayoutSizeEXT,
6058 >(val)
6059 }
6060 },
6061 get_descriptor_set_layout_binding_offset_ext: unsafe {
6062 unsafe extern "system" fn get_descriptor_set_layout_binding_offset_ext(
6063 _device: crate::vk::Device,
6064 _layout: DescriptorSetLayout,
6065 _binding: u32,
6066 _p_offset: *mut DeviceSize,
6067 ) {
6068 panic!(concat!(
6069 "Unable to load ",
6070 stringify!(get_descriptor_set_layout_binding_offset_ext)
6071 ))
6072 }
6073 let val = _f(c"vkGetDescriptorSetLayoutBindingOffsetEXT");
6074 if val.is_null() {
6075 get_descriptor_set_layout_binding_offset_ext
6076 } else {
6077 ::core::mem::transmute::<
6078 *const c_void,
6079 PFN_vkGetDescriptorSetLayoutBindingOffsetEXT,
6080 >(val)
6081 }
6082 },
6083 get_descriptor_ext: unsafe {
6084 unsafe extern "system" fn get_descriptor_ext(
6085 _device: crate::vk::Device,
6086 _p_descriptor_info: *const DescriptorGetInfoEXT<'_>,
6087 _data_size: usize,
6088 _p_descriptor: *mut c_void,
6089 ) {
6090 panic!(concat!("Unable to load ", stringify!(get_descriptor_ext)))
6091 }
6092 let val = _f(c"vkGetDescriptorEXT");
6093 if val.is_null() {
6094 get_descriptor_ext
6095 } else {
6096 ::core::mem::transmute::<*const c_void, PFN_vkGetDescriptorEXT>(val)
6097 }
6098 },
6099 cmd_bind_descriptor_buffers_ext: unsafe {
6100 unsafe extern "system" fn cmd_bind_descriptor_buffers_ext(
6101 _command_buffer: CommandBuffer,
6102 _buffer_count: u32,
6103 _p_binding_infos: *const DescriptorBufferBindingInfoEXT<'_>,
6104 ) {
6105 panic!(concat!(
6106 "Unable to load ",
6107 stringify!(cmd_bind_descriptor_buffers_ext)
6108 ))
6109 }
6110 let val = _f(c"vkCmdBindDescriptorBuffersEXT");
6111 if val.is_null() {
6112 cmd_bind_descriptor_buffers_ext
6113 } else {
6114 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindDescriptorBuffersEXT>(
6115 val,
6116 )
6117 }
6118 },
6119 cmd_set_descriptor_buffer_offsets_ext: unsafe {
6120 unsafe extern "system" fn cmd_set_descriptor_buffer_offsets_ext(
6121 _command_buffer: CommandBuffer,
6122 _pipeline_bind_point: PipelineBindPoint,
6123 _layout: PipelineLayout,
6124 _first_set: u32,
6125 _set_count: u32,
6126 _p_buffer_indices: *const u32,
6127 _p_offsets: *const DeviceSize,
6128 ) {
6129 panic!(concat!(
6130 "Unable to load ",
6131 stringify!(cmd_set_descriptor_buffer_offsets_ext)
6132 ))
6133 }
6134 let val = _f(c"vkCmdSetDescriptorBufferOffsetsEXT");
6135 if val.is_null() {
6136 cmd_set_descriptor_buffer_offsets_ext
6137 } else {
6138 ::core::mem::transmute::<
6139 *const c_void,
6140 PFN_vkCmdSetDescriptorBufferOffsetsEXT,
6141 >(val)
6142 }
6143 },
6144 cmd_bind_descriptor_buffer_embedded_samplers_ext: unsafe {
6145 unsafe extern "system" fn cmd_bind_descriptor_buffer_embedded_samplers_ext(
6146 _command_buffer: CommandBuffer,
6147 _pipeline_bind_point: PipelineBindPoint,
6148 _layout: PipelineLayout,
6149 _set: u32,
6150 ) {
6151 panic!(concat!(
6152 "Unable to load ",
6153 stringify!(cmd_bind_descriptor_buffer_embedded_samplers_ext)
6154 ))
6155 }
6156 let val = _f(c"vkCmdBindDescriptorBufferEmbeddedSamplersEXT");
6157 if val.is_null() {
6158 cmd_bind_descriptor_buffer_embedded_samplers_ext
6159 } else {
6160 ::core::mem::transmute::<
6161 *const c_void,
6162 PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT,
6163 >(val)
6164 }
6165 },
6166 get_buffer_opaque_capture_descriptor_data_ext: unsafe {
6167 unsafe extern "system" fn get_buffer_opaque_capture_descriptor_data_ext(
6168 _device: crate::vk::Device,
6169 _p_info: *const BufferCaptureDescriptorDataInfoEXT<'_>,
6170 _p_data: *mut c_void,
6171 ) -> Result {
6172 panic!(concat!(
6173 "Unable to load ",
6174 stringify!(get_buffer_opaque_capture_descriptor_data_ext)
6175 ))
6176 }
6177 let val = _f(c"vkGetBufferOpaqueCaptureDescriptorDataEXT");
6178 if val.is_null() {
6179 get_buffer_opaque_capture_descriptor_data_ext
6180 } else {
6181 ::core::mem::transmute::<
6182 *const c_void,
6183 PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT,
6184 >(val)
6185 }
6186 },
6187 get_image_opaque_capture_descriptor_data_ext: unsafe {
6188 unsafe extern "system" fn get_image_opaque_capture_descriptor_data_ext(
6189 _device: crate::vk::Device,
6190 _p_info: *const ImageCaptureDescriptorDataInfoEXT<'_>,
6191 _p_data: *mut c_void,
6192 ) -> Result {
6193 panic!(concat!(
6194 "Unable to load ",
6195 stringify!(get_image_opaque_capture_descriptor_data_ext)
6196 ))
6197 }
6198 let val = _f(c"vkGetImageOpaqueCaptureDescriptorDataEXT");
6199 if val.is_null() {
6200 get_image_opaque_capture_descriptor_data_ext
6201 } else {
6202 ::core::mem::transmute::<
6203 *const c_void,
6204 PFN_vkGetImageOpaqueCaptureDescriptorDataEXT,
6205 >(val)
6206 }
6207 },
6208 get_image_view_opaque_capture_descriptor_data_ext: unsafe {
6209 unsafe extern "system" fn get_image_view_opaque_capture_descriptor_data_ext(
6210 _device: crate::vk::Device,
6211 _p_info: *const ImageViewCaptureDescriptorDataInfoEXT<'_>,
6212 _p_data: *mut c_void,
6213 ) -> Result {
6214 panic!(concat!(
6215 "Unable to load ",
6216 stringify!(get_image_view_opaque_capture_descriptor_data_ext)
6217 ))
6218 }
6219 let val = _f(c"vkGetImageViewOpaqueCaptureDescriptorDataEXT");
6220 if val.is_null() {
6221 get_image_view_opaque_capture_descriptor_data_ext
6222 } else {
6223 ::core::mem::transmute::<
6224 *const c_void,
6225 PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT,
6226 >(val)
6227 }
6228 },
6229 get_sampler_opaque_capture_descriptor_data_ext: unsafe {
6230 unsafe extern "system" fn get_sampler_opaque_capture_descriptor_data_ext(
6231 _device: crate::vk::Device,
6232 _p_info: *const SamplerCaptureDescriptorDataInfoEXT<'_>,
6233 _p_data: *mut c_void,
6234 ) -> Result {
6235 panic!(concat!(
6236 "Unable to load ",
6237 stringify!(get_sampler_opaque_capture_descriptor_data_ext)
6238 ))
6239 }
6240 let val = _f(c"vkGetSamplerOpaqueCaptureDescriptorDataEXT");
6241 if val.is_null() {
6242 get_sampler_opaque_capture_descriptor_data_ext
6243 } else {
6244 ::core::mem::transmute::<
6245 *const c_void,
6246 PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT,
6247 >(val)
6248 }
6249 },
6250 get_acceleration_structure_opaque_capture_descriptor_data_ext: unsafe {
6251 unsafe extern "system" fn get_acceleration_structure_opaque_capture_descriptor_data_ext(
6252 _device: crate::vk::Device,
6253 _p_info: *const AccelerationStructureCaptureDescriptorDataInfoEXT<'_>,
6254 _p_data: *mut c_void,
6255 ) -> Result {
6256 panic!(concat!(
6257 "Unable to load ",
6258 stringify!(
6259 get_acceleration_structure_opaque_capture_descriptor_data_ext
6260 )
6261 ))
6262 }
6263 let val = _f(c"vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT");
6264 if val.is_null() {
6265 get_acceleration_structure_opaque_capture_descriptor_data_ext
6266 } else {
6267 ::core::mem::transmute::<
6268 *const c_void,
6269 PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT,
6270 >(val)
6271 }
6272 },
6273 }
6274 }
6275 }
6276 }
6277 #[doc = "VK_EXT_graphics_pipeline_library"]
6278 pub mod graphics_pipeline_library {
6279 use super::super::*;
6280 pub use {
6281 crate::vk::EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME as NAME,
6282 crate::vk::EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION as SPEC_VERSION,
6283 };
6284 }
6285 #[doc = "VK_EXT_mesh_shader"]
6286 pub mod mesh_shader {
6287 use super::super::*;
6288 pub use {
6289 crate::vk::EXT_MESH_SHADER_EXTENSION_NAME as NAME,
6290 crate::vk::EXT_MESH_SHADER_SPEC_VERSION as SPEC_VERSION,
6291 };
6292 #[doc = "VK_EXT_mesh_shader device-level functions"]
6293 #[derive(Clone)]
6294 pub struct Device {
6295 pub(crate) fp: DeviceFn,
6296 pub(crate) handle: crate::vk::Device,
6297 }
6298 impl Device {
6299 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
6300 let handle = device.handle();
6301 let fp = DeviceFn::load(|name| unsafe {
6302 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
6303 instance.get_device_proc_addr(handle, name.as_ptr()),
6304 )
6305 });
6306 Self { handle, fp }
6307 }
6308 #[inline]
6309 pub fn fp(&self) -> &DeviceFn {
6310 &self.fp
6311 }
6312 #[inline]
6313 pub fn device(&self) -> crate::vk::Device {
6314 self.handle
6315 }
6316 }
6317 #[derive(Clone)]
6318 #[doc = "Raw VK_EXT_mesh_shader device-level function pointers"]
6319 pub struct DeviceFn {
6320 pub cmd_draw_mesh_tasks_ext: PFN_vkCmdDrawMeshTasksEXT,
6321 pub cmd_draw_mesh_tasks_indirect_ext: PFN_vkCmdDrawMeshTasksIndirectEXT,
6322 pub cmd_draw_mesh_tasks_indirect_count_ext: PFN_vkCmdDrawMeshTasksIndirectCountEXT,
6323 }
6324 unsafe impl Send for DeviceFn {}
6325 unsafe impl Sync for DeviceFn {}
6326 impl DeviceFn {
6327 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6328 Self::load_erased(&mut f)
6329 }
6330 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6331 Self {
6332 cmd_draw_mesh_tasks_ext: unsafe {
6333 unsafe extern "system" fn cmd_draw_mesh_tasks_ext(
6334 _command_buffer: CommandBuffer,
6335 _group_count_x: u32,
6336 _group_count_y: u32,
6337 _group_count_z: u32,
6338 ) {
6339 panic!(concat!(
6340 "Unable to load ",
6341 stringify!(cmd_draw_mesh_tasks_ext)
6342 ))
6343 }
6344 let val = _f(c"vkCmdDrawMeshTasksEXT");
6345 if val.is_null() {
6346 cmd_draw_mesh_tasks_ext
6347 } else {
6348 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawMeshTasksEXT>(val)
6349 }
6350 },
6351 cmd_draw_mesh_tasks_indirect_ext: unsafe {
6352 unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_ext(
6353 _command_buffer: CommandBuffer,
6354 _buffer: Buffer,
6355 _offset: DeviceSize,
6356 _draw_count: u32,
6357 _stride: u32,
6358 ) {
6359 panic!(concat!(
6360 "Unable to load ",
6361 stringify!(cmd_draw_mesh_tasks_indirect_ext)
6362 ))
6363 }
6364 let val = _f(c"vkCmdDrawMeshTasksIndirectEXT");
6365 if val.is_null() {
6366 cmd_draw_mesh_tasks_indirect_ext
6367 } else {
6368 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawMeshTasksIndirectEXT>(
6369 val,
6370 )
6371 }
6372 },
6373 cmd_draw_mesh_tasks_indirect_count_ext: unsafe {
6374 unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_count_ext(
6375 _command_buffer: CommandBuffer,
6376 _buffer: Buffer,
6377 _offset: DeviceSize,
6378 _count_buffer: Buffer,
6379 _count_buffer_offset: DeviceSize,
6380 _max_draw_count: u32,
6381 _stride: u32,
6382 ) {
6383 panic!(concat!(
6384 "Unable to load ",
6385 stringify!(cmd_draw_mesh_tasks_indirect_count_ext)
6386 ))
6387 }
6388 let val = _f(c"vkCmdDrawMeshTasksIndirectCountEXT");
6389 if val.is_null() {
6390 cmd_draw_mesh_tasks_indirect_count_ext
6391 } else {
6392 ::core::mem::transmute::<
6393 *const c_void,
6394 PFN_vkCmdDrawMeshTasksIndirectCountEXT,
6395 >(val)
6396 }
6397 },
6398 }
6399 }
6400 }
6401 }
6402 #[doc = "VK_EXT_ycbcr_2plane_444_formats"]
6403 pub mod ycbcr_2plane_444_formats {
6404 use super::super::*;
6405 pub use {
6406 crate::vk::EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME as NAME,
6407 crate::vk::EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION as SPEC_VERSION,
6408 };
6409 }
6410 #[doc = "VK_EXT_fragment_density_map2"]
6411 pub mod fragment_density_map2 {
6412 use super::super::*;
6413 pub use {
6414 crate::vk::EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME as NAME,
6415 crate::vk::EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION as SPEC_VERSION,
6416 };
6417 }
6418 #[doc = "VK_EXT_image_robustness"]
6419 pub mod image_robustness {
6420 use super::super::*;
6421 pub use {
6422 crate::vk::EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME as NAME,
6423 crate::vk::EXT_IMAGE_ROBUSTNESS_SPEC_VERSION as SPEC_VERSION,
6424 };
6425 }
6426 #[doc = "VK_EXT_image_compression_control"]
6427 pub mod image_compression_control {
6428 use super::super::*;
6429 pub use {
6430 crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME as NAME,
6431 crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION as SPEC_VERSION,
6432 };
6433 #[doc = "VK_EXT_image_compression_control device-level functions"]
6434 #[derive(Clone)]
6435 pub struct Device {
6436 pub(crate) fp: DeviceFn,
6437 pub(crate) handle: crate::vk::Device,
6438 }
6439 impl Device {
6440 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
6441 let handle = device.handle();
6442 let fp = DeviceFn::load(|name| unsafe {
6443 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
6444 instance.get_device_proc_addr(handle, name.as_ptr()),
6445 )
6446 });
6447 Self { handle, fp }
6448 }
6449 #[inline]
6450 pub fn fp(&self) -> &DeviceFn {
6451 &self.fp
6452 }
6453 #[inline]
6454 pub fn device(&self) -> crate::vk::Device {
6455 self.handle
6456 }
6457 }
6458 #[derive(Clone)]
6459 #[doc = "Raw VK_EXT_image_compression_control device-level function pointers"]
6460 pub struct DeviceFn {
6461 pub get_image_subresource_layout2_ext: PFN_vkGetImageSubresourceLayout2,
6462 }
6463 unsafe impl Send for DeviceFn {}
6464 unsafe impl Sync for DeviceFn {}
6465 impl DeviceFn {
6466 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6467 Self::load_erased(&mut f)
6468 }
6469 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6470 Self {
6471 get_image_subresource_layout2_ext: unsafe {
6472 unsafe extern "system" fn get_image_subresource_layout2_ext(
6473 _device: crate::vk::Device,
6474 _image: Image,
6475 _p_subresource: *const ImageSubresource2<'_>,
6476 _p_layout: *mut SubresourceLayout2<'_>,
6477 ) {
6478 panic!(concat!(
6479 "Unable to load ",
6480 stringify!(get_image_subresource_layout2_ext)
6481 ))
6482 }
6483 let val = _f(c"vkGetImageSubresourceLayout2EXT");
6484 if val.is_null() {
6485 get_image_subresource_layout2_ext
6486 } else {
6487 ::core::mem::transmute::<*const c_void, PFN_vkGetImageSubresourceLayout2>(
6488 val,
6489 )
6490 }
6491 },
6492 }
6493 }
6494 }
6495 }
6496 #[doc = "VK_EXT_attachment_feedback_loop_layout"]
6497 pub mod attachment_feedback_loop_layout {
6498 use super::super::*;
6499 pub use {
6500 crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME as NAME,
6501 crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION as SPEC_VERSION,
6502 };
6503 }
6504 #[doc = "VK_EXT_4444_formats"]
6505 pub mod _4444_formats {
6506 use super::super::*;
6507 pub use {
6508 crate::vk::EXT_4444_FORMATS_EXTENSION_NAME as NAME,
6509 crate::vk::EXT_4444_FORMATS_SPEC_VERSION as SPEC_VERSION,
6510 };
6511 }
6512 #[doc = "VK_EXT_device_fault"]
6513 pub mod device_fault {
6514 use super::super::*;
6515 pub use {
6516 crate::vk::EXT_DEVICE_FAULT_EXTENSION_NAME as NAME,
6517 crate::vk::EXT_DEVICE_FAULT_SPEC_VERSION as SPEC_VERSION,
6518 };
6519 #[doc = "VK_EXT_device_fault device-level functions"]
6520 #[derive(Clone)]
6521 pub struct Device {
6522 pub(crate) fp: DeviceFn,
6523 pub(crate) handle: crate::vk::Device,
6524 }
6525 impl Device {
6526 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
6527 let handle = device.handle();
6528 let fp = DeviceFn::load(|name| unsafe {
6529 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
6530 instance.get_device_proc_addr(handle, name.as_ptr()),
6531 )
6532 });
6533 Self { handle, fp }
6534 }
6535 #[inline]
6536 pub fn fp(&self) -> &DeviceFn {
6537 &self.fp
6538 }
6539 #[inline]
6540 pub fn device(&self) -> crate::vk::Device {
6541 self.handle
6542 }
6543 }
6544 #[derive(Clone)]
6545 #[doc = "Raw VK_EXT_device_fault device-level function pointers"]
6546 pub struct DeviceFn {
6547 pub get_device_fault_info_ext: PFN_vkGetDeviceFaultInfoEXT,
6548 }
6549 unsafe impl Send for DeviceFn {}
6550 unsafe impl Sync for DeviceFn {}
6551 impl DeviceFn {
6552 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6553 Self::load_erased(&mut f)
6554 }
6555 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6556 Self {
6557 get_device_fault_info_ext: unsafe {
6558 unsafe extern "system" fn get_device_fault_info_ext(
6559 _device: crate::vk::Device,
6560 _p_fault_counts: *mut DeviceFaultCountsEXT<'_>,
6561 _p_fault_info: *mut DeviceFaultInfoEXT<'_>,
6562 ) -> Result {
6563 panic!(concat!(
6564 "Unable to load ",
6565 stringify!(get_device_fault_info_ext)
6566 ))
6567 }
6568 let val = _f(c"vkGetDeviceFaultInfoEXT");
6569 if val.is_null() {
6570 get_device_fault_info_ext
6571 } else {
6572 ::core::mem::transmute::<*const c_void, PFN_vkGetDeviceFaultInfoEXT>(
6573 val,
6574 )
6575 }
6576 },
6577 }
6578 }
6579 }
6580 }
6581 #[doc = "VK_EXT_rgba10x6_formats"]
6582 pub mod rgba10x6_formats {
6583 use super::super::*;
6584 pub use {
6585 crate::vk::EXT_RGBA10X6_FORMATS_EXTENSION_NAME as NAME,
6586 crate::vk::EXT_RGBA10X6_FORMATS_SPEC_VERSION as SPEC_VERSION,
6587 };
6588 }
6589 #[doc = "VK_EXT_directfb_surface"]
6590 pub mod directfb_surface {
6591 use super::super::*;
6592 pub use {
6593 crate::vk::EXT_DIRECTFB_SURFACE_EXTENSION_NAME as NAME,
6594 crate::vk::EXT_DIRECTFB_SURFACE_SPEC_VERSION as SPEC_VERSION,
6595 };
6596 #[doc = "VK_EXT_directfb_surface instance-level functions"]
6597 #[derive(Clone)]
6598 pub struct Instance {
6599 pub(crate) fp: InstanceFn,
6600 pub(crate) handle: crate::vk::Instance,
6601 }
6602 impl Instance {
6603 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
6604 let handle = instance.handle();
6605 let fp = InstanceFn::load(|name| unsafe {
6606 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
6607 entry.get_instance_proc_addr(handle, name.as_ptr()),
6608 )
6609 });
6610 Self { handle, fp }
6611 }
6612 #[inline]
6613 pub fn fp(&self) -> &InstanceFn {
6614 &self.fp
6615 }
6616 #[inline]
6617 pub fn instance(&self) -> crate::vk::Instance {
6618 self.handle
6619 }
6620 }
6621 #[derive(Clone)]
6622 #[doc = "Raw VK_EXT_directfb_surface instance-level function pointers"]
6623 pub struct InstanceFn {
6624 pub create_direct_fb_surface_ext: PFN_vkCreateDirectFBSurfaceEXT,
6625 pub get_physical_device_direct_fb_presentation_support_ext:
6626 PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT,
6627 }
6628 unsafe impl Send for InstanceFn {}
6629 unsafe impl Sync for InstanceFn {}
6630 impl InstanceFn {
6631 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6632 Self::load_erased(&mut f)
6633 }
6634 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6635 Self {
6636 create_direct_fb_surface_ext: unsafe {
6637 unsafe extern "system" fn create_direct_fb_surface_ext(
6638 _instance: crate::vk::Instance,
6639 _p_create_info: *const DirectFBSurfaceCreateInfoEXT<'_>,
6640 _p_allocator: *const AllocationCallbacks<'_>,
6641 _p_surface: *mut SurfaceKHR,
6642 ) -> Result {
6643 panic!(concat!(
6644 "Unable to load ",
6645 stringify!(create_direct_fb_surface_ext)
6646 ))
6647 }
6648 let val = _f(c"vkCreateDirectFBSurfaceEXT");
6649 if val.is_null() {
6650 create_direct_fb_surface_ext
6651 } else {
6652 ::core::mem::transmute::<*const c_void, PFN_vkCreateDirectFBSurfaceEXT>(
6653 val,
6654 )
6655 }
6656 },
6657 get_physical_device_direct_fb_presentation_support_ext: unsafe {
6658 unsafe extern "system" fn get_physical_device_direct_fb_presentation_support_ext(
6659 _physical_device: PhysicalDevice,
6660 _queue_family_index: u32,
6661 _dfb: *mut IDirectFB,
6662 ) -> Bool32 {
6663 panic!(concat!(
6664 "Unable to load ",
6665 stringify!(get_physical_device_direct_fb_presentation_support_ext)
6666 ))
6667 }
6668 let val = _f(c"vkGetPhysicalDeviceDirectFBPresentationSupportEXT");
6669 if val.is_null() {
6670 get_physical_device_direct_fb_presentation_support_ext
6671 } else {
6672 ::core::mem::transmute::<
6673 *const c_void,
6674 PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT,
6675 >(val)
6676 }
6677 },
6678 }
6679 }
6680 }
6681 }
6682 #[doc = "VK_EXT_vertex_input_dynamic_state"]
6683 pub mod vertex_input_dynamic_state {
6684 use super::super::*;
6685 pub use {
6686 crate::vk::EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME as NAME,
6687 crate::vk::EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION as SPEC_VERSION,
6688 };
6689 #[doc = "VK_EXT_vertex_input_dynamic_state device-level functions"]
6690 #[derive(Clone)]
6691 pub struct Device {
6692 pub(crate) fp: DeviceFn,
6693 pub(crate) handle: crate::vk::Device,
6694 }
6695 impl Device {
6696 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
6697 let handle = device.handle();
6698 let fp = DeviceFn::load(|name| unsafe {
6699 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
6700 instance.get_device_proc_addr(handle, name.as_ptr()),
6701 )
6702 });
6703 Self { handle, fp }
6704 }
6705 #[inline]
6706 pub fn fp(&self) -> &DeviceFn {
6707 &self.fp
6708 }
6709 #[inline]
6710 pub fn device(&self) -> crate::vk::Device {
6711 self.handle
6712 }
6713 }
6714 #[derive(Clone)]
6715 #[doc = "Raw VK_EXT_vertex_input_dynamic_state device-level function pointers"]
6716 pub struct DeviceFn {
6717 pub cmd_set_vertex_input_ext: PFN_vkCmdSetVertexInputEXT,
6718 }
6719 unsafe impl Send for DeviceFn {}
6720 unsafe impl Sync for DeviceFn {}
6721 impl DeviceFn {
6722 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6723 Self::load_erased(&mut f)
6724 }
6725 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6726 Self {
6727 cmd_set_vertex_input_ext: unsafe {
6728 unsafe extern "system" fn cmd_set_vertex_input_ext(
6729 _command_buffer: CommandBuffer,
6730 _vertex_binding_description_count: u32,
6731 _p_vertex_binding_descriptions : * const VertexInputBindingDescription2EXT < '_ >,
6732 _vertex_attribute_description_count: u32,
6733 _p_vertex_attribute_descriptions : * const VertexInputAttributeDescription2EXT < '_ >,
6734 ) {
6735 panic!(concat!(
6736 "Unable to load ",
6737 stringify!(cmd_set_vertex_input_ext)
6738 ))
6739 }
6740 let val = _f(c"vkCmdSetVertexInputEXT");
6741 if val.is_null() {
6742 cmd_set_vertex_input_ext
6743 } else {
6744 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetVertexInputEXT>(val)
6745 }
6746 },
6747 }
6748 }
6749 }
6750 }
6751 #[doc = "VK_EXT_physical_device_drm"]
6752 pub mod physical_device_drm {
6753 use super::super::*;
6754 pub use {
6755 crate::vk::EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME as NAME,
6756 crate::vk::EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION as SPEC_VERSION,
6757 };
6758 }
6759 #[doc = "VK_EXT_device_address_binding_report"]
6760 pub mod device_address_binding_report {
6761 use super::super::*;
6762 pub use {
6763 crate::vk::EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME as NAME,
6764 crate::vk::EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION as SPEC_VERSION,
6765 };
6766 }
6767 #[doc = "VK_EXT_depth_clip_control"]
6768 pub mod depth_clip_control {
6769 use super::super::*;
6770 pub use {
6771 crate::vk::EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME as NAME,
6772 crate::vk::EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION as SPEC_VERSION,
6773 };
6774 }
6775 #[doc = "VK_EXT_primitive_topology_list_restart"]
6776 pub mod primitive_topology_list_restart {
6777 use super::super::*;
6778 pub use {
6779 crate::vk::EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME as NAME,
6780 crate::vk::EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION as SPEC_VERSION,
6781 };
6782 }
6783 #[doc = "VK_EXT_present_mode_fifo_latest_ready"]
6784 pub mod present_mode_fifo_latest_ready {
6785 use super::super::*;
6786 pub use {
6787 crate::vk::EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME as NAME,
6788 crate::vk::EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION as SPEC_VERSION,
6789 };
6790 }
6791 #[doc = "VK_EXT_pipeline_properties"]
6792 pub mod pipeline_properties {
6793 use super::super::*;
6794 pub use {
6795 crate::vk::EXT_PIPELINE_PROPERTIES_EXTENSION_NAME as NAME,
6796 crate::vk::EXT_PIPELINE_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
6797 };
6798 #[doc = "VK_EXT_pipeline_properties device-level functions"]
6799 #[derive(Clone)]
6800 pub struct Device {
6801 pub(crate) fp: DeviceFn,
6802 pub(crate) handle: crate::vk::Device,
6803 }
6804 impl Device {
6805 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
6806 let handle = device.handle();
6807 let fp = DeviceFn::load(|name| unsafe {
6808 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
6809 instance.get_device_proc_addr(handle, name.as_ptr()),
6810 )
6811 });
6812 Self { handle, fp }
6813 }
6814 #[inline]
6815 pub fn fp(&self) -> &DeviceFn {
6816 &self.fp
6817 }
6818 #[inline]
6819 pub fn device(&self) -> crate::vk::Device {
6820 self.handle
6821 }
6822 }
6823 #[allow(non_camel_case_types)]
6824 #[doc = "Implemented for all types that can be passed as argument to `pipeline_properties` in [`PFN_vkGetPipelinePropertiesEXT`]"]
6825 pub unsafe trait GetPipelinePropertiesEXTParamPipelineProperties {}
6826 unsafe impl GetPipelinePropertiesEXTParamPipelineProperties
6827 for PipelinePropertiesIdentifierEXT<'_>
6828 {
6829 }
6830 #[derive(Clone)]
6831 #[doc = "Raw VK_EXT_pipeline_properties device-level function pointers"]
6832 pub struct DeviceFn {
6833 pub get_pipeline_properties_ext: PFN_vkGetPipelinePropertiesEXT,
6834 }
6835 unsafe impl Send for DeviceFn {}
6836 unsafe impl Sync for DeviceFn {}
6837 impl DeviceFn {
6838 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6839 Self::load_erased(&mut f)
6840 }
6841 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6842 Self {
6843 get_pipeline_properties_ext: unsafe {
6844 unsafe extern "system" fn get_pipeline_properties_ext(
6845 _device: crate::vk::Device,
6846 _p_pipeline_info: *const PipelineInfoEXT<'_>,
6847 _p_pipeline_properties: *mut BaseOutStructure<'_>,
6848 ) -> Result {
6849 panic!(concat!(
6850 "Unable to load ",
6851 stringify!(get_pipeline_properties_ext)
6852 ))
6853 }
6854 let val = _f(c"vkGetPipelinePropertiesEXT");
6855 if val.is_null() {
6856 get_pipeline_properties_ext
6857 } else {
6858 ::core::mem::transmute::<*const c_void, PFN_vkGetPipelinePropertiesEXT>(
6859 val,
6860 )
6861 }
6862 },
6863 }
6864 }
6865 }
6866 }
6867 #[doc = "VK_EXT_frame_boundary"]
6868 pub mod frame_boundary {
6869 use super::super::*;
6870 pub use {
6871 crate::vk::EXT_FRAME_BOUNDARY_EXTENSION_NAME as NAME,
6872 crate::vk::EXT_FRAME_BOUNDARY_SPEC_VERSION as SPEC_VERSION,
6873 };
6874 }
6875 #[doc = "VK_EXT_multisampled_render_to_single_sampled"]
6876 pub mod multisampled_render_to_single_sampled {
6877 use super::super::*;
6878 pub use {
6879 crate::vk::EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME as NAME,
6880 crate::vk::EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION as SPEC_VERSION,
6881 };
6882 }
6883 #[doc = "VK_EXT_extended_dynamic_state2"]
6884 pub mod extended_dynamic_state2 {
6885 use super::super::*;
6886 pub use {
6887 crate::vk::EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME as NAME,
6888 crate::vk::EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION as SPEC_VERSION,
6889 };
6890 #[doc = "VK_EXT_extended_dynamic_state2 device-level functions"]
6891 #[derive(Clone)]
6892 pub struct Device {
6893 pub(crate) fp: DeviceFn,
6894 pub(crate) handle: crate::vk::Device,
6895 }
6896 impl Device {
6897 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
6898 let handle = device.handle();
6899 let fp = DeviceFn::load(|name| unsafe {
6900 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
6901 instance.get_device_proc_addr(handle, name.as_ptr()),
6902 )
6903 });
6904 Self { handle, fp }
6905 }
6906 #[inline]
6907 pub fn fp(&self) -> &DeviceFn {
6908 &self.fp
6909 }
6910 #[inline]
6911 pub fn device(&self) -> crate::vk::Device {
6912 self.handle
6913 }
6914 }
6915 #[derive(Clone)]
6916 #[doc = "Raw VK_EXT_extended_dynamic_state2 device-level function pointers"]
6917 pub struct DeviceFn {
6918 pub cmd_set_patch_control_points_ext: PFN_vkCmdSetPatchControlPointsEXT,
6919 pub cmd_set_rasterizer_discard_enable_ext: PFN_vkCmdSetRasterizerDiscardEnable,
6920 pub cmd_set_depth_bias_enable_ext: PFN_vkCmdSetDepthBiasEnable,
6921 pub cmd_set_logic_op_ext: PFN_vkCmdSetLogicOpEXT,
6922 pub cmd_set_primitive_restart_enable_ext: PFN_vkCmdSetPrimitiveRestartEnable,
6923 }
6924 unsafe impl Send for DeviceFn {}
6925 unsafe impl Sync for DeviceFn {}
6926 impl DeviceFn {
6927 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
6928 Self::load_erased(&mut f)
6929 }
6930 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
6931 Self {
6932 cmd_set_patch_control_points_ext: unsafe {
6933 unsafe extern "system" fn cmd_set_patch_control_points_ext(
6934 _command_buffer: CommandBuffer,
6935 _patch_control_points: u32,
6936 ) {
6937 panic!(concat!(
6938 "Unable to load ",
6939 stringify!(cmd_set_patch_control_points_ext)
6940 ))
6941 }
6942 let val = _f(c"vkCmdSetPatchControlPointsEXT");
6943 if val.is_null() {
6944 cmd_set_patch_control_points_ext
6945 } else {
6946 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetPatchControlPointsEXT>(
6947 val,
6948 )
6949 }
6950 },
6951 cmd_set_rasterizer_discard_enable_ext: unsafe {
6952 unsafe extern "system" fn cmd_set_rasterizer_discard_enable_ext(
6953 _command_buffer: CommandBuffer,
6954 _rasterizer_discard_enable: Bool32,
6955 ) {
6956 panic!(concat!(
6957 "Unable to load ",
6958 stringify!(cmd_set_rasterizer_discard_enable_ext)
6959 ))
6960 }
6961 let val = _f(c"vkCmdSetRasterizerDiscardEnableEXT");
6962 if val.is_null() {
6963 cmd_set_rasterizer_discard_enable_ext
6964 } else {
6965 ::core::mem::transmute::<
6966 *const c_void,
6967 PFN_vkCmdSetRasterizerDiscardEnable,
6968 >(val)
6969 }
6970 },
6971 cmd_set_depth_bias_enable_ext: unsafe {
6972 unsafe extern "system" fn cmd_set_depth_bias_enable_ext(
6973 _command_buffer: CommandBuffer,
6974 _depth_bias_enable: Bool32,
6975 ) {
6976 panic!(concat!(
6977 "Unable to load ",
6978 stringify!(cmd_set_depth_bias_enable_ext)
6979 ))
6980 }
6981 let val = _f(c"vkCmdSetDepthBiasEnableEXT");
6982 if val.is_null() {
6983 cmd_set_depth_bias_enable_ext
6984 } else {
6985 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthBiasEnable>(
6986 val,
6987 )
6988 }
6989 },
6990 cmd_set_logic_op_ext: unsafe {
6991 unsafe extern "system" fn cmd_set_logic_op_ext(
6992 _command_buffer: CommandBuffer,
6993 _logic_op: LogicOp,
6994 ) {
6995 panic!(concat!("Unable to load ", stringify!(cmd_set_logic_op_ext)))
6996 }
6997 let val = _f(c"vkCmdSetLogicOpEXT");
6998 if val.is_null() {
6999 cmd_set_logic_op_ext
7000 } else {
7001 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLogicOpEXT>(val)
7002 }
7003 },
7004 cmd_set_primitive_restart_enable_ext: unsafe {
7005 unsafe extern "system" fn cmd_set_primitive_restart_enable_ext(
7006 _command_buffer: CommandBuffer,
7007 _primitive_restart_enable: Bool32,
7008 ) {
7009 panic!(concat!(
7010 "Unable to load ",
7011 stringify!(cmd_set_primitive_restart_enable_ext)
7012 ))
7013 }
7014 let val = _f(c"vkCmdSetPrimitiveRestartEnableEXT");
7015 if val.is_null() {
7016 cmd_set_primitive_restart_enable_ext
7017 } else {
7018 ::core::mem::transmute::<
7019 *const c_void,
7020 PFN_vkCmdSetPrimitiveRestartEnable,
7021 >(val)
7022 }
7023 },
7024 }
7025 }
7026 }
7027 }
7028 #[doc = "VK_EXT_color_write_enable"]
7029 pub mod color_write_enable {
7030 use super::super::*;
7031 pub use {
7032 crate::vk::EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME as NAME,
7033 crate::vk::EXT_COLOR_WRITE_ENABLE_SPEC_VERSION as SPEC_VERSION,
7034 };
7035 #[doc = "VK_EXT_color_write_enable device-level functions"]
7036 #[derive(Clone)]
7037 pub struct Device {
7038 pub(crate) fp: DeviceFn,
7039 pub(crate) handle: crate::vk::Device,
7040 }
7041 impl Device {
7042 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
7043 let handle = device.handle();
7044 let fp = DeviceFn::load(|name| unsafe {
7045 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
7046 instance.get_device_proc_addr(handle, name.as_ptr()),
7047 )
7048 });
7049 Self { handle, fp }
7050 }
7051 #[inline]
7052 pub fn fp(&self) -> &DeviceFn {
7053 &self.fp
7054 }
7055 #[inline]
7056 pub fn device(&self) -> crate::vk::Device {
7057 self.handle
7058 }
7059 }
7060 #[derive(Clone)]
7061 #[doc = "Raw VK_EXT_color_write_enable device-level function pointers"]
7062 pub struct DeviceFn {
7063 pub cmd_set_color_write_enable_ext: PFN_vkCmdSetColorWriteEnableEXT,
7064 }
7065 unsafe impl Send for DeviceFn {}
7066 unsafe impl Sync for DeviceFn {}
7067 impl DeviceFn {
7068 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
7069 Self::load_erased(&mut f)
7070 }
7071 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
7072 Self {
7073 cmd_set_color_write_enable_ext: unsafe {
7074 unsafe extern "system" fn cmd_set_color_write_enable_ext(
7075 _command_buffer: CommandBuffer,
7076 _attachment_count: u32,
7077 _p_color_write_enables: *const Bool32,
7078 ) {
7079 panic!(concat!(
7080 "Unable to load ",
7081 stringify!(cmd_set_color_write_enable_ext)
7082 ))
7083 }
7084 let val = _f(c"vkCmdSetColorWriteEnableEXT");
7085 if val.is_null() {
7086 cmd_set_color_write_enable_ext
7087 } else {
7088 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorWriteEnableEXT>(
7089 val,
7090 )
7091 }
7092 },
7093 }
7094 }
7095 }
7096 }
7097 #[doc = "VK_EXT_primitives_generated_query"]
7098 pub mod primitives_generated_query {
7099 use super::super::*;
7100 pub use {
7101 crate::vk::EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME as NAME,
7102 crate::vk::EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION as SPEC_VERSION,
7103 };
7104 }
7105 #[doc = "VK_EXT_global_priority_query"]
7106 pub mod global_priority_query {
7107 use super::super::*;
7108 pub use {
7109 crate::vk::EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME as NAME,
7110 crate::vk::EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION as SPEC_VERSION,
7111 };
7112 }
7113 #[doc = "VK_EXT_image_view_min_lod"]
7114 pub mod image_view_min_lod {
7115 use super::super::*;
7116 pub use {
7117 crate::vk::EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME as NAME,
7118 crate::vk::EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION as SPEC_VERSION,
7119 };
7120 }
7121 #[doc = "VK_EXT_multi_draw"]
7122 pub mod multi_draw {
7123 use super::super::*;
7124 pub use {
7125 crate::vk::EXT_MULTI_DRAW_EXTENSION_NAME as NAME,
7126 crate::vk::EXT_MULTI_DRAW_SPEC_VERSION as SPEC_VERSION,
7127 };
7128 #[doc = "VK_EXT_multi_draw device-level functions"]
7129 #[derive(Clone)]
7130 pub struct Device {
7131 pub(crate) fp: DeviceFn,
7132 pub(crate) handle: crate::vk::Device,
7133 }
7134 impl Device {
7135 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
7136 let handle = device.handle();
7137 let fp = DeviceFn::load(|name| unsafe {
7138 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
7139 instance.get_device_proc_addr(handle, name.as_ptr()),
7140 )
7141 });
7142 Self { handle, fp }
7143 }
7144 #[inline]
7145 pub fn fp(&self) -> &DeviceFn {
7146 &self.fp
7147 }
7148 #[inline]
7149 pub fn device(&self) -> crate::vk::Device {
7150 self.handle
7151 }
7152 }
7153 #[derive(Clone)]
7154 #[doc = "Raw VK_EXT_multi_draw device-level function pointers"]
7155 pub struct DeviceFn {
7156 pub cmd_draw_multi_ext: PFN_vkCmdDrawMultiEXT,
7157 pub cmd_draw_multi_indexed_ext: PFN_vkCmdDrawMultiIndexedEXT,
7158 }
7159 unsafe impl Send for DeviceFn {}
7160 unsafe impl Sync for DeviceFn {}
7161 impl DeviceFn {
7162 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
7163 Self::load_erased(&mut f)
7164 }
7165 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
7166 Self {
7167 cmd_draw_multi_ext: unsafe {
7168 unsafe extern "system" fn cmd_draw_multi_ext(
7169 _command_buffer: CommandBuffer,
7170 _draw_count: u32,
7171 _p_vertex_info: *const MultiDrawInfoEXT,
7172 _instance_count: u32,
7173 _first_instance: u32,
7174 _stride: u32,
7175 ) {
7176 panic!(concat!("Unable to load ", stringify!(cmd_draw_multi_ext)))
7177 }
7178 let val = _f(c"vkCmdDrawMultiEXT");
7179 if val.is_null() {
7180 cmd_draw_multi_ext
7181 } else {
7182 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawMultiEXT>(val)
7183 }
7184 },
7185 cmd_draw_multi_indexed_ext: unsafe {
7186 unsafe extern "system" fn cmd_draw_multi_indexed_ext(
7187 _command_buffer: CommandBuffer,
7188 _draw_count: u32,
7189 _p_index_info: *const MultiDrawIndexedInfoEXT,
7190 _instance_count: u32,
7191 _first_instance: u32,
7192 _stride: u32,
7193 _p_vertex_offset: *const i32,
7194 ) {
7195 panic!(concat!(
7196 "Unable to load ",
7197 stringify!(cmd_draw_multi_indexed_ext)
7198 ))
7199 }
7200 let val = _f(c"vkCmdDrawMultiIndexedEXT");
7201 if val.is_null() {
7202 cmd_draw_multi_indexed_ext
7203 } else {
7204 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawMultiIndexedEXT>(
7205 val,
7206 )
7207 }
7208 },
7209 }
7210 }
7211 }
7212 }
7213 #[doc = "VK_EXT_image_2d_view_of_3d"]
7214 pub mod image_2d_view_of_3d {
7215 use super::super::*;
7216 pub use {
7217 crate::vk::EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME as NAME,
7218 crate::vk::EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION as SPEC_VERSION,
7219 };
7220 }
7221 #[doc = "VK_EXT_shader_tile_image"]
7222 pub mod shader_tile_image {
7223 use super::super::*;
7224 pub use {
7225 crate::vk::EXT_SHADER_TILE_IMAGE_EXTENSION_NAME as NAME,
7226 crate::vk::EXT_SHADER_TILE_IMAGE_SPEC_VERSION as SPEC_VERSION,
7227 };
7228 }
7229 #[doc = "VK_EXT_opacity_micromap"]
7230 pub mod opacity_micromap {
7231 use super::super::*;
7232 pub use {
7233 crate::vk::EXT_OPACITY_MICROMAP_EXTENSION_NAME as NAME,
7234 crate::vk::EXT_OPACITY_MICROMAP_SPEC_VERSION as SPEC_VERSION,
7235 };
7236 #[doc = "VK_EXT_opacity_micromap device-level functions"]
7237 #[derive(Clone)]
7238 pub struct Device {
7239 pub(crate) fp: DeviceFn,
7240 pub(crate) handle: crate::vk::Device,
7241 }
7242 impl Device {
7243 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
7244 let handle = device.handle();
7245 let fp = DeviceFn::load(|name| unsafe {
7246 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
7247 instance.get_device_proc_addr(handle, name.as_ptr()),
7248 )
7249 });
7250 Self { handle, fp }
7251 }
7252 #[inline]
7253 pub fn fp(&self) -> &DeviceFn {
7254 &self.fp
7255 }
7256 #[inline]
7257 pub fn device(&self) -> crate::vk::Device {
7258 self.handle
7259 }
7260 }
7261 #[derive(Clone)]
7262 #[doc = "Raw VK_EXT_opacity_micromap device-level function pointers"]
7263 pub struct DeviceFn {
7264 pub create_micromap_ext: PFN_vkCreateMicromapEXT,
7265 pub destroy_micromap_ext: PFN_vkDestroyMicromapEXT,
7266 pub cmd_build_micromaps_ext: PFN_vkCmdBuildMicromapsEXT,
7267 pub build_micromaps_ext: PFN_vkBuildMicromapsEXT,
7268 pub copy_micromap_ext: PFN_vkCopyMicromapEXT,
7269 pub copy_micromap_to_memory_ext: PFN_vkCopyMicromapToMemoryEXT,
7270 pub copy_memory_to_micromap_ext: PFN_vkCopyMemoryToMicromapEXT,
7271 pub write_micromaps_properties_ext: PFN_vkWriteMicromapsPropertiesEXT,
7272 pub cmd_copy_micromap_ext: PFN_vkCmdCopyMicromapEXT,
7273 pub cmd_copy_micromap_to_memory_ext: PFN_vkCmdCopyMicromapToMemoryEXT,
7274 pub cmd_copy_memory_to_micromap_ext: PFN_vkCmdCopyMemoryToMicromapEXT,
7275 pub cmd_write_micromaps_properties_ext: PFN_vkCmdWriteMicromapsPropertiesEXT,
7276 pub get_device_micromap_compatibility_ext: PFN_vkGetDeviceMicromapCompatibilityEXT,
7277 pub get_micromap_build_sizes_ext: PFN_vkGetMicromapBuildSizesEXT,
7278 }
7279 unsafe impl Send for DeviceFn {}
7280 unsafe impl Sync for DeviceFn {}
7281 impl DeviceFn {
7282 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
7283 Self::load_erased(&mut f)
7284 }
7285 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
7286 Self {
7287 create_micromap_ext: unsafe {
7288 unsafe extern "system" fn create_micromap_ext(
7289 _device: crate::vk::Device,
7290 _p_create_info: *const MicromapCreateInfoEXT<'_>,
7291 _p_allocator: *const AllocationCallbacks<'_>,
7292 _p_micromap: *mut MicromapEXT,
7293 ) -> Result {
7294 panic!(concat!("Unable to load ", stringify!(create_micromap_ext)))
7295 }
7296 let val = _f(c"vkCreateMicromapEXT");
7297 if val.is_null() {
7298 create_micromap_ext
7299 } else {
7300 ::core::mem::transmute::<*const c_void, PFN_vkCreateMicromapEXT>(val)
7301 }
7302 },
7303 destroy_micromap_ext: unsafe {
7304 unsafe extern "system" fn destroy_micromap_ext(
7305 _device: crate::vk::Device,
7306 _micromap: MicromapEXT,
7307 _p_allocator: *const AllocationCallbacks<'_>,
7308 ) {
7309 panic!(concat!("Unable to load ", stringify!(destroy_micromap_ext)))
7310 }
7311 let val = _f(c"vkDestroyMicromapEXT");
7312 if val.is_null() {
7313 destroy_micromap_ext
7314 } else {
7315 ::core::mem::transmute::<*const c_void, PFN_vkDestroyMicromapEXT>(val)
7316 }
7317 },
7318 cmd_build_micromaps_ext: unsafe {
7319 unsafe extern "system" fn cmd_build_micromaps_ext(
7320 _command_buffer: CommandBuffer,
7321 _info_count: u32,
7322 _p_infos: *const MicromapBuildInfoEXT<'_>,
7323 ) {
7324 panic!(concat!(
7325 "Unable to load ",
7326 stringify!(cmd_build_micromaps_ext)
7327 ))
7328 }
7329 let val = _f(c"vkCmdBuildMicromapsEXT");
7330 if val.is_null() {
7331 cmd_build_micromaps_ext
7332 } else {
7333 ::core::mem::transmute::<*const c_void, PFN_vkCmdBuildMicromapsEXT>(val)
7334 }
7335 },
7336 build_micromaps_ext: unsafe {
7337 unsafe extern "system" fn build_micromaps_ext(
7338 _device: crate::vk::Device,
7339 _deferred_operation: DeferredOperationKHR,
7340 _info_count: u32,
7341 _p_infos: *const MicromapBuildInfoEXT<'_>,
7342 ) -> Result {
7343 panic!(concat!("Unable to load ", stringify!(build_micromaps_ext)))
7344 }
7345 let val = _f(c"vkBuildMicromapsEXT");
7346 if val.is_null() {
7347 build_micromaps_ext
7348 } else {
7349 ::core::mem::transmute::<*const c_void, PFN_vkBuildMicromapsEXT>(val)
7350 }
7351 },
7352 copy_micromap_ext: unsafe {
7353 unsafe extern "system" fn copy_micromap_ext(
7354 _device: crate::vk::Device,
7355 _deferred_operation: DeferredOperationKHR,
7356 _p_info: *const CopyMicromapInfoEXT<'_>,
7357 ) -> Result {
7358 panic!(concat!("Unable to load ", stringify!(copy_micromap_ext)))
7359 }
7360 let val = _f(c"vkCopyMicromapEXT");
7361 if val.is_null() {
7362 copy_micromap_ext
7363 } else {
7364 ::core::mem::transmute::<*const c_void, PFN_vkCopyMicromapEXT>(val)
7365 }
7366 },
7367 copy_micromap_to_memory_ext: unsafe {
7368 unsafe extern "system" fn copy_micromap_to_memory_ext(
7369 _device: crate::vk::Device,
7370 _deferred_operation: DeferredOperationKHR,
7371 _p_info: *const CopyMicromapToMemoryInfoEXT<'_>,
7372 ) -> Result {
7373 panic!(concat!(
7374 "Unable to load ",
7375 stringify!(copy_micromap_to_memory_ext)
7376 ))
7377 }
7378 let val = _f(c"vkCopyMicromapToMemoryEXT");
7379 if val.is_null() {
7380 copy_micromap_to_memory_ext
7381 } else {
7382 ::core::mem::transmute::<*const c_void, PFN_vkCopyMicromapToMemoryEXT>(
7383 val,
7384 )
7385 }
7386 },
7387 copy_memory_to_micromap_ext: unsafe {
7388 unsafe extern "system" fn copy_memory_to_micromap_ext(
7389 _device: crate::vk::Device,
7390 _deferred_operation: DeferredOperationKHR,
7391 _p_info: *const CopyMemoryToMicromapInfoEXT<'_>,
7392 ) -> Result {
7393 panic!(concat!(
7394 "Unable to load ",
7395 stringify!(copy_memory_to_micromap_ext)
7396 ))
7397 }
7398 let val = _f(c"vkCopyMemoryToMicromapEXT");
7399 if val.is_null() {
7400 copy_memory_to_micromap_ext
7401 } else {
7402 ::core::mem::transmute::<*const c_void, PFN_vkCopyMemoryToMicromapEXT>(
7403 val,
7404 )
7405 }
7406 },
7407 write_micromaps_properties_ext: unsafe {
7408 unsafe extern "system" fn write_micromaps_properties_ext(
7409 _device: crate::vk::Device,
7410 _micromap_count: u32,
7411 _p_micromaps: *const MicromapEXT,
7412 _query_type: QueryType,
7413 _data_size: usize,
7414 _p_data: *mut c_void,
7415 _stride: usize,
7416 ) -> Result {
7417 panic!(concat!(
7418 "Unable to load ",
7419 stringify!(write_micromaps_properties_ext)
7420 ))
7421 }
7422 let val = _f(c"vkWriteMicromapsPropertiesEXT");
7423 if val.is_null() {
7424 write_micromaps_properties_ext
7425 } else {
7426 ::core::mem::transmute::<*const c_void, PFN_vkWriteMicromapsPropertiesEXT>(
7427 val,
7428 )
7429 }
7430 },
7431 cmd_copy_micromap_ext: unsafe {
7432 unsafe extern "system" fn cmd_copy_micromap_ext(
7433 _command_buffer: CommandBuffer,
7434 _p_info: *const CopyMicromapInfoEXT<'_>,
7435 ) {
7436 panic!(concat!(
7437 "Unable to load ",
7438 stringify!(cmd_copy_micromap_ext)
7439 ))
7440 }
7441 let val = _f(c"vkCmdCopyMicromapEXT");
7442 if val.is_null() {
7443 cmd_copy_micromap_ext
7444 } else {
7445 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyMicromapEXT>(val)
7446 }
7447 },
7448 cmd_copy_micromap_to_memory_ext: unsafe {
7449 unsafe extern "system" fn cmd_copy_micromap_to_memory_ext(
7450 _command_buffer: CommandBuffer,
7451 _p_info: *const CopyMicromapToMemoryInfoEXT<'_>,
7452 ) {
7453 panic!(concat!(
7454 "Unable to load ",
7455 stringify!(cmd_copy_micromap_to_memory_ext)
7456 ))
7457 }
7458 let val = _f(c"vkCmdCopyMicromapToMemoryEXT");
7459 if val.is_null() {
7460 cmd_copy_micromap_to_memory_ext
7461 } else {
7462 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyMicromapToMemoryEXT>(
7463 val,
7464 )
7465 }
7466 },
7467 cmd_copy_memory_to_micromap_ext: unsafe {
7468 unsafe extern "system" fn cmd_copy_memory_to_micromap_ext(
7469 _command_buffer: CommandBuffer,
7470 _p_info: *const CopyMemoryToMicromapInfoEXT<'_>,
7471 ) {
7472 panic!(concat!(
7473 "Unable to load ",
7474 stringify!(cmd_copy_memory_to_micromap_ext)
7475 ))
7476 }
7477 let val = _f(c"vkCmdCopyMemoryToMicromapEXT");
7478 if val.is_null() {
7479 cmd_copy_memory_to_micromap_ext
7480 } else {
7481 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyMemoryToMicromapEXT>(
7482 val,
7483 )
7484 }
7485 },
7486 cmd_write_micromaps_properties_ext: unsafe {
7487 unsafe extern "system" fn cmd_write_micromaps_properties_ext(
7488 _command_buffer: CommandBuffer,
7489 _micromap_count: u32,
7490 _p_micromaps: *const MicromapEXT,
7491 _query_type: QueryType,
7492 _query_pool: QueryPool,
7493 _first_query: u32,
7494 ) {
7495 panic!(concat!(
7496 "Unable to load ",
7497 stringify!(cmd_write_micromaps_properties_ext)
7498 ))
7499 }
7500 let val = _f(c"vkCmdWriteMicromapsPropertiesEXT");
7501 if val.is_null() {
7502 cmd_write_micromaps_properties_ext
7503 } else {
7504 ::core::mem::transmute::<
7505 *const c_void,
7506 PFN_vkCmdWriteMicromapsPropertiesEXT,
7507 >(val)
7508 }
7509 },
7510 get_device_micromap_compatibility_ext: unsafe {
7511 unsafe extern "system" fn get_device_micromap_compatibility_ext(
7512 _device: crate::vk::Device,
7513 _p_version_info: *const MicromapVersionInfoEXT<'_>,
7514 _p_compatibility: *mut AccelerationStructureCompatibilityKHR,
7515 ) {
7516 panic!(concat!(
7517 "Unable to load ",
7518 stringify!(get_device_micromap_compatibility_ext)
7519 ))
7520 }
7521 let val = _f(c"vkGetDeviceMicromapCompatibilityEXT");
7522 if val.is_null() {
7523 get_device_micromap_compatibility_ext
7524 } else {
7525 ::core::mem::transmute::<
7526 *const c_void,
7527 PFN_vkGetDeviceMicromapCompatibilityEXT,
7528 >(val)
7529 }
7530 },
7531 get_micromap_build_sizes_ext: unsafe {
7532 unsafe extern "system" fn get_micromap_build_sizes_ext(
7533 _device: crate::vk::Device,
7534 _build_type: AccelerationStructureBuildTypeKHR,
7535 _p_build_info: *const MicromapBuildInfoEXT<'_>,
7536 _p_size_info: *mut MicromapBuildSizesInfoEXT<'_>,
7537 ) {
7538 panic!(concat!(
7539 "Unable to load ",
7540 stringify!(get_micromap_build_sizes_ext)
7541 ))
7542 }
7543 let val = _f(c"vkGetMicromapBuildSizesEXT");
7544 if val.is_null() {
7545 get_micromap_build_sizes_ext
7546 } else {
7547 ::core::mem::transmute::<*const c_void, PFN_vkGetMicromapBuildSizesEXT>(
7548 val,
7549 )
7550 }
7551 },
7552 }
7553 }
7554 }
7555 }
7556 #[doc = "VK_EXT_load_store_op_none"]
7557 pub mod load_store_op_none {
7558 use super::super::*;
7559 pub use {
7560 crate::vk::EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME as NAME,
7561 crate::vk::EXT_LOAD_STORE_OP_NONE_SPEC_VERSION as SPEC_VERSION,
7562 };
7563 }
7564 #[doc = "VK_EXT_border_color_swizzle"]
7565 pub mod border_color_swizzle {
7566 use super::super::*;
7567 pub use {
7568 crate::vk::EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME as NAME,
7569 crate::vk::EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION as SPEC_VERSION,
7570 };
7571 }
7572 #[doc = "VK_EXT_pageable_device_local_memory"]
7573 pub mod pageable_device_local_memory {
7574 use super::super::*;
7575 pub use {
7576 crate::vk::EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME as NAME,
7577 crate::vk::EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION as SPEC_VERSION,
7578 };
7579 #[doc = "VK_EXT_pageable_device_local_memory device-level functions"]
7580 #[derive(Clone)]
7581 pub struct Device {
7582 pub(crate) fp: DeviceFn,
7583 pub(crate) handle: crate::vk::Device,
7584 }
7585 impl Device {
7586 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
7587 let handle = device.handle();
7588 let fp = DeviceFn::load(|name| unsafe {
7589 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
7590 instance.get_device_proc_addr(handle, name.as_ptr()),
7591 )
7592 });
7593 Self { handle, fp }
7594 }
7595 #[inline]
7596 pub fn fp(&self) -> &DeviceFn {
7597 &self.fp
7598 }
7599 #[inline]
7600 pub fn device(&self) -> crate::vk::Device {
7601 self.handle
7602 }
7603 }
7604 #[derive(Clone)]
7605 #[doc = "Raw VK_EXT_pageable_device_local_memory device-level function pointers"]
7606 pub struct DeviceFn {
7607 pub set_device_memory_priority_ext: PFN_vkSetDeviceMemoryPriorityEXT,
7608 }
7609 unsafe impl Send for DeviceFn {}
7610 unsafe impl Sync for DeviceFn {}
7611 impl DeviceFn {
7612 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
7613 Self::load_erased(&mut f)
7614 }
7615 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
7616 Self {
7617 set_device_memory_priority_ext: unsafe {
7618 unsafe extern "system" fn set_device_memory_priority_ext(
7619 _device: crate::vk::Device,
7620 _memory: DeviceMemory,
7621 _priority: f32,
7622 ) {
7623 panic!(concat!(
7624 "Unable to load ",
7625 stringify!(set_device_memory_priority_ext)
7626 ))
7627 }
7628 let val = _f(c"vkSetDeviceMemoryPriorityEXT");
7629 if val.is_null() {
7630 set_device_memory_priority_ext
7631 } else {
7632 ::core::mem::transmute::<*const c_void, PFN_vkSetDeviceMemoryPriorityEXT>(
7633 val,
7634 )
7635 }
7636 },
7637 }
7638 }
7639 }
7640 }
7641 #[doc = "VK_EXT_image_sliced_view_of_3d"]
7642 pub mod image_sliced_view_of_3d {
7643 use super::super::*;
7644 pub use {
7645 crate::vk::EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME as NAME,
7646 crate::vk::EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION as SPEC_VERSION,
7647 };
7648 }
7649 #[doc = "VK_EXT_depth_clamp_zero_one"]
7650 pub mod depth_clamp_zero_one {
7651 use super::super::*;
7652 pub use {
7653 crate::vk::EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME as NAME,
7654 crate::vk::EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION as SPEC_VERSION,
7655 };
7656 }
7657 #[doc = "VK_EXT_non_seamless_cube_map"]
7658 pub mod non_seamless_cube_map {
7659 use super::super::*;
7660 pub use {
7661 crate::vk::EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME as NAME,
7662 crate::vk::EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION as SPEC_VERSION,
7663 };
7664 }
7665 #[doc = "VK_EXT_image_compression_control_swapchain"]
7666 pub mod image_compression_control_swapchain {
7667 use super::super::*;
7668 pub use {
7669 crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME as NAME,
7670 crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION as SPEC_VERSION,
7671 };
7672 }
7673 #[doc = "VK_EXT_nested_command_buffer"]
7674 pub mod nested_command_buffer {
7675 use super::super::*;
7676 pub use {
7677 crate::vk::EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME as NAME,
7678 crate::vk::EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION as SPEC_VERSION,
7679 };
7680 }
7681 #[doc = "VK_EXT_external_memory_acquire_unmodified"]
7682 pub mod external_memory_acquire_unmodified {
7683 use super::super::*;
7684 pub use {
7685 crate::vk::EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME as NAME,
7686 crate::vk::EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION as SPEC_VERSION,
7687 };
7688 }
7689 #[doc = "VK_EXT_extended_dynamic_state3"]
7690 pub mod extended_dynamic_state3 {
7691 use super::super::*;
7692 pub use {
7693 crate::vk::EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME as NAME,
7694 crate::vk::EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION as SPEC_VERSION,
7695 };
7696 #[doc = "VK_EXT_extended_dynamic_state3 device-level functions"]
7697 #[derive(Clone)]
7698 pub struct Device {
7699 pub(crate) fp: DeviceFn,
7700 pub(crate) handle: crate::vk::Device,
7701 }
7702 impl Device {
7703 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
7704 let handle = device.handle();
7705 let fp = DeviceFn::load(|name| unsafe {
7706 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
7707 instance.get_device_proc_addr(handle, name.as_ptr()),
7708 )
7709 });
7710 Self { handle, fp }
7711 }
7712 #[inline]
7713 pub fn fp(&self) -> &DeviceFn {
7714 &self.fp
7715 }
7716 #[inline]
7717 pub fn device(&self) -> crate::vk::Device {
7718 self.handle
7719 }
7720 }
7721 #[derive(Clone)]
7722 #[doc = "Raw VK_EXT_extended_dynamic_state3 device-level function pointers"]
7723 pub struct DeviceFn {
7724 pub cmd_set_depth_clamp_enable_ext: PFN_vkCmdSetDepthClampEnableEXT,
7725 pub cmd_set_polygon_mode_ext: PFN_vkCmdSetPolygonModeEXT,
7726 pub cmd_set_rasterization_samples_ext: PFN_vkCmdSetRasterizationSamplesEXT,
7727 pub cmd_set_sample_mask_ext: PFN_vkCmdSetSampleMaskEXT,
7728 pub cmd_set_alpha_to_coverage_enable_ext: PFN_vkCmdSetAlphaToCoverageEnableEXT,
7729 pub cmd_set_alpha_to_one_enable_ext: PFN_vkCmdSetAlphaToOneEnableEXT,
7730 pub cmd_set_logic_op_enable_ext: PFN_vkCmdSetLogicOpEnableEXT,
7731 pub cmd_set_color_blend_enable_ext: PFN_vkCmdSetColorBlendEnableEXT,
7732 pub cmd_set_color_blend_equation_ext: PFN_vkCmdSetColorBlendEquationEXT,
7733 pub cmd_set_color_write_mask_ext: PFN_vkCmdSetColorWriteMaskEXT,
7734 pub cmd_set_tessellation_domain_origin_ext: PFN_vkCmdSetTessellationDomainOriginEXT,
7735 pub cmd_set_rasterization_stream_ext: PFN_vkCmdSetRasterizationStreamEXT,
7736 pub cmd_set_conservative_rasterization_mode_ext:
7737 PFN_vkCmdSetConservativeRasterizationModeEXT,
7738 pub cmd_set_extra_primitive_overestimation_size_ext:
7739 PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT,
7740 pub cmd_set_depth_clip_enable_ext: PFN_vkCmdSetDepthClipEnableEXT,
7741 pub cmd_set_sample_locations_enable_ext: PFN_vkCmdSetSampleLocationsEnableEXT,
7742 pub cmd_set_color_blend_advanced_ext: PFN_vkCmdSetColorBlendAdvancedEXT,
7743 pub cmd_set_provoking_vertex_mode_ext: PFN_vkCmdSetProvokingVertexModeEXT,
7744 pub cmd_set_line_rasterization_mode_ext: PFN_vkCmdSetLineRasterizationModeEXT,
7745 pub cmd_set_line_stipple_enable_ext: PFN_vkCmdSetLineStippleEnableEXT,
7746 pub cmd_set_depth_clip_negative_one_to_one_ext:
7747 PFN_vkCmdSetDepthClipNegativeOneToOneEXT,
7748 pub cmd_set_viewport_w_scaling_enable_nv: PFN_vkCmdSetViewportWScalingEnableNV,
7749 pub cmd_set_viewport_swizzle_nv: PFN_vkCmdSetViewportSwizzleNV,
7750 pub cmd_set_coverage_to_color_enable_nv: PFN_vkCmdSetCoverageToColorEnableNV,
7751 pub cmd_set_coverage_to_color_location_nv: PFN_vkCmdSetCoverageToColorLocationNV,
7752 pub cmd_set_coverage_modulation_mode_nv: PFN_vkCmdSetCoverageModulationModeNV,
7753 pub cmd_set_coverage_modulation_table_enable_nv:
7754 PFN_vkCmdSetCoverageModulationTableEnableNV,
7755 pub cmd_set_coverage_modulation_table_nv: PFN_vkCmdSetCoverageModulationTableNV,
7756 pub cmd_set_shading_rate_image_enable_nv: PFN_vkCmdSetShadingRateImageEnableNV,
7757 pub cmd_set_representative_fragment_test_enable_nv:
7758 PFN_vkCmdSetRepresentativeFragmentTestEnableNV,
7759 pub cmd_set_coverage_reduction_mode_nv: PFN_vkCmdSetCoverageReductionModeNV,
7760 }
7761 unsafe impl Send for DeviceFn {}
7762 unsafe impl Sync for DeviceFn {}
7763 impl DeviceFn {
7764 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
7765 Self::load_erased(&mut f)
7766 }
7767 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
7768 Self {
7769 cmd_set_depth_clamp_enable_ext: unsafe {
7770 unsafe extern "system" fn cmd_set_depth_clamp_enable_ext(
7771 _command_buffer: CommandBuffer,
7772 _depth_clamp_enable: Bool32,
7773 ) {
7774 panic!(concat!(
7775 "Unable to load ",
7776 stringify!(cmd_set_depth_clamp_enable_ext)
7777 ))
7778 }
7779 let val = _f(c"vkCmdSetDepthClampEnableEXT");
7780 if val.is_null() {
7781 cmd_set_depth_clamp_enable_ext
7782 } else {
7783 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthClampEnableEXT>(
7784 val,
7785 )
7786 }
7787 },
7788 cmd_set_polygon_mode_ext: unsafe {
7789 unsafe extern "system" fn cmd_set_polygon_mode_ext(
7790 _command_buffer: CommandBuffer,
7791 _polygon_mode: PolygonMode,
7792 ) {
7793 panic!(concat!(
7794 "Unable to load ",
7795 stringify!(cmd_set_polygon_mode_ext)
7796 ))
7797 }
7798 let val = _f(c"vkCmdSetPolygonModeEXT");
7799 if val.is_null() {
7800 cmd_set_polygon_mode_ext
7801 } else {
7802 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetPolygonModeEXT>(val)
7803 }
7804 },
7805 cmd_set_rasterization_samples_ext: unsafe {
7806 unsafe extern "system" fn cmd_set_rasterization_samples_ext(
7807 _command_buffer: CommandBuffer,
7808 _rasterization_samples: SampleCountFlags,
7809 ) {
7810 panic!(concat!(
7811 "Unable to load ",
7812 stringify!(cmd_set_rasterization_samples_ext)
7813 ))
7814 }
7815 let val = _f(c"vkCmdSetRasterizationSamplesEXT");
7816 if val.is_null() {
7817 cmd_set_rasterization_samples_ext
7818 } else {
7819 ::core::mem::transmute::<
7820 *const c_void,
7821 PFN_vkCmdSetRasterizationSamplesEXT,
7822 >(val)
7823 }
7824 },
7825 cmd_set_sample_mask_ext: unsafe {
7826 unsafe extern "system" fn cmd_set_sample_mask_ext(
7827 _command_buffer: CommandBuffer,
7828 _samples: SampleCountFlags,
7829 _p_sample_mask: *const SampleMask,
7830 ) {
7831 panic!(concat!(
7832 "Unable to load ",
7833 stringify!(cmd_set_sample_mask_ext)
7834 ))
7835 }
7836 let val = _f(c"vkCmdSetSampleMaskEXT");
7837 if val.is_null() {
7838 cmd_set_sample_mask_ext
7839 } else {
7840 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetSampleMaskEXT>(val)
7841 }
7842 },
7843 cmd_set_alpha_to_coverage_enable_ext: unsafe {
7844 unsafe extern "system" fn cmd_set_alpha_to_coverage_enable_ext(
7845 _command_buffer: CommandBuffer,
7846 _alpha_to_coverage_enable: Bool32,
7847 ) {
7848 panic!(concat!(
7849 "Unable to load ",
7850 stringify!(cmd_set_alpha_to_coverage_enable_ext)
7851 ))
7852 }
7853 let val = _f(c"vkCmdSetAlphaToCoverageEnableEXT");
7854 if val.is_null() {
7855 cmd_set_alpha_to_coverage_enable_ext
7856 } else {
7857 ::core::mem::transmute::<
7858 *const c_void,
7859 PFN_vkCmdSetAlphaToCoverageEnableEXT,
7860 >(val)
7861 }
7862 },
7863 cmd_set_alpha_to_one_enable_ext: unsafe {
7864 unsafe extern "system" fn cmd_set_alpha_to_one_enable_ext(
7865 _command_buffer: CommandBuffer,
7866 _alpha_to_one_enable: Bool32,
7867 ) {
7868 panic!(concat!(
7869 "Unable to load ",
7870 stringify!(cmd_set_alpha_to_one_enable_ext)
7871 ))
7872 }
7873 let val = _f(c"vkCmdSetAlphaToOneEnableEXT");
7874 if val.is_null() {
7875 cmd_set_alpha_to_one_enable_ext
7876 } else {
7877 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetAlphaToOneEnableEXT>(
7878 val,
7879 )
7880 }
7881 },
7882 cmd_set_logic_op_enable_ext: unsafe {
7883 unsafe extern "system" fn cmd_set_logic_op_enable_ext(
7884 _command_buffer: CommandBuffer,
7885 _logic_op_enable: Bool32,
7886 ) {
7887 panic!(concat!(
7888 "Unable to load ",
7889 stringify!(cmd_set_logic_op_enable_ext)
7890 ))
7891 }
7892 let val = _f(c"vkCmdSetLogicOpEnableEXT");
7893 if val.is_null() {
7894 cmd_set_logic_op_enable_ext
7895 } else {
7896 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLogicOpEnableEXT>(
7897 val,
7898 )
7899 }
7900 },
7901 cmd_set_color_blend_enable_ext: unsafe {
7902 unsafe extern "system" fn cmd_set_color_blend_enable_ext(
7903 _command_buffer: CommandBuffer,
7904 _first_attachment: u32,
7905 _attachment_count: u32,
7906 _p_color_blend_enables: *const Bool32,
7907 ) {
7908 panic!(concat!(
7909 "Unable to load ",
7910 stringify!(cmd_set_color_blend_enable_ext)
7911 ))
7912 }
7913 let val = _f(c"vkCmdSetColorBlendEnableEXT");
7914 if val.is_null() {
7915 cmd_set_color_blend_enable_ext
7916 } else {
7917 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorBlendEnableEXT>(
7918 val,
7919 )
7920 }
7921 },
7922 cmd_set_color_blend_equation_ext: unsafe {
7923 unsafe extern "system" fn cmd_set_color_blend_equation_ext(
7924 _command_buffer: CommandBuffer,
7925 _first_attachment: u32,
7926 _attachment_count: u32,
7927 _p_color_blend_equations: *const ColorBlendEquationEXT,
7928 ) {
7929 panic!(concat!(
7930 "Unable to load ",
7931 stringify!(cmd_set_color_blend_equation_ext)
7932 ))
7933 }
7934 let val = _f(c"vkCmdSetColorBlendEquationEXT");
7935 if val.is_null() {
7936 cmd_set_color_blend_equation_ext
7937 } else {
7938 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorBlendEquationEXT>(
7939 val,
7940 )
7941 }
7942 },
7943 cmd_set_color_write_mask_ext: unsafe {
7944 unsafe extern "system" fn cmd_set_color_write_mask_ext(
7945 _command_buffer: CommandBuffer,
7946 _first_attachment: u32,
7947 _attachment_count: u32,
7948 _p_color_write_masks: *const ColorComponentFlags,
7949 ) {
7950 panic!(concat!(
7951 "Unable to load ",
7952 stringify!(cmd_set_color_write_mask_ext)
7953 ))
7954 }
7955 let val = _f(c"vkCmdSetColorWriteMaskEXT");
7956 if val.is_null() {
7957 cmd_set_color_write_mask_ext
7958 } else {
7959 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorWriteMaskEXT>(
7960 val,
7961 )
7962 }
7963 },
7964 cmd_set_tessellation_domain_origin_ext: unsafe {
7965 unsafe extern "system" fn cmd_set_tessellation_domain_origin_ext(
7966 _command_buffer: CommandBuffer,
7967 _domain_origin: TessellationDomainOrigin,
7968 ) {
7969 panic!(concat!(
7970 "Unable to load ",
7971 stringify!(cmd_set_tessellation_domain_origin_ext)
7972 ))
7973 }
7974 let val = _f(c"vkCmdSetTessellationDomainOriginEXT");
7975 if val.is_null() {
7976 cmd_set_tessellation_domain_origin_ext
7977 } else {
7978 ::core::mem::transmute::<
7979 *const c_void,
7980 PFN_vkCmdSetTessellationDomainOriginEXT,
7981 >(val)
7982 }
7983 },
7984 cmd_set_rasterization_stream_ext: unsafe {
7985 unsafe extern "system" fn cmd_set_rasterization_stream_ext(
7986 _command_buffer: CommandBuffer,
7987 _rasterization_stream: u32,
7988 ) {
7989 panic!(concat!(
7990 "Unable to load ",
7991 stringify!(cmd_set_rasterization_stream_ext)
7992 ))
7993 }
7994 let val = _f(c"vkCmdSetRasterizationStreamEXT");
7995 if val.is_null() {
7996 cmd_set_rasterization_stream_ext
7997 } else {
7998 ::core::mem::transmute::<
7999 *const c_void,
8000 PFN_vkCmdSetRasterizationStreamEXT,
8001 >(val)
8002 }
8003 },
8004 cmd_set_conservative_rasterization_mode_ext: unsafe {
8005 unsafe extern "system" fn cmd_set_conservative_rasterization_mode_ext(
8006 _command_buffer: CommandBuffer,
8007 _conservative_rasterization_mode: ConservativeRasterizationModeEXT,
8008 ) {
8009 panic!(concat!(
8010 "Unable to load ",
8011 stringify!(cmd_set_conservative_rasterization_mode_ext)
8012 ))
8013 }
8014 let val = _f(c"vkCmdSetConservativeRasterizationModeEXT");
8015 if val.is_null() {
8016 cmd_set_conservative_rasterization_mode_ext
8017 } else {
8018 ::core::mem::transmute::<
8019 *const c_void,
8020 PFN_vkCmdSetConservativeRasterizationModeEXT,
8021 >(val)
8022 }
8023 },
8024 cmd_set_extra_primitive_overestimation_size_ext: unsafe {
8025 unsafe extern "system" fn cmd_set_extra_primitive_overestimation_size_ext(
8026 _command_buffer: CommandBuffer,
8027 _extra_primitive_overestimation_size: f32,
8028 ) {
8029 panic!(concat!(
8030 "Unable to load ",
8031 stringify!(cmd_set_extra_primitive_overestimation_size_ext)
8032 ))
8033 }
8034 let val = _f(c"vkCmdSetExtraPrimitiveOverestimationSizeEXT");
8035 if val.is_null() {
8036 cmd_set_extra_primitive_overestimation_size_ext
8037 } else {
8038 ::core::mem::transmute::<
8039 *const c_void,
8040 PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT,
8041 >(val)
8042 }
8043 },
8044 cmd_set_depth_clip_enable_ext: unsafe {
8045 unsafe extern "system" fn cmd_set_depth_clip_enable_ext(
8046 _command_buffer: CommandBuffer,
8047 _depth_clip_enable: Bool32,
8048 ) {
8049 panic!(concat!(
8050 "Unable to load ",
8051 stringify!(cmd_set_depth_clip_enable_ext)
8052 ))
8053 }
8054 let val = _f(c"vkCmdSetDepthClipEnableEXT");
8055 if val.is_null() {
8056 cmd_set_depth_clip_enable_ext
8057 } else {
8058 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthClipEnableEXT>(
8059 val,
8060 )
8061 }
8062 },
8063 cmd_set_sample_locations_enable_ext: unsafe {
8064 unsafe extern "system" fn cmd_set_sample_locations_enable_ext(
8065 _command_buffer: CommandBuffer,
8066 _sample_locations_enable: Bool32,
8067 ) {
8068 panic!(concat!(
8069 "Unable to load ",
8070 stringify!(cmd_set_sample_locations_enable_ext)
8071 ))
8072 }
8073 let val = _f(c"vkCmdSetSampleLocationsEnableEXT");
8074 if val.is_null() {
8075 cmd_set_sample_locations_enable_ext
8076 } else {
8077 ::core::mem::transmute::<
8078 *const c_void,
8079 PFN_vkCmdSetSampleLocationsEnableEXT,
8080 >(val)
8081 }
8082 },
8083 cmd_set_color_blend_advanced_ext: unsafe {
8084 unsafe extern "system" fn cmd_set_color_blend_advanced_ext(
8085 _command_buffer: CommandBuffer,
8086 _first_attachment: u32,
8087 _attachment_count: u32,
8088 _p_color_blend_advanced: *const ColorBlendAdvancedEXT,
8089 ) {
8090 panic!(concat!(
8091 "Unable to load ",
8092 stringify!(cmd_set_color_blend_advanced_ext)
8093 ))
8094 }
8095 let val = _f(c"vkCmdSetColorBlendAdvancedEXT");
8096 if val.is_null() {
8097 cmd_set_color_blend_advanced_ext
8098 } else {
8099 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorBlendAdvancedEXT>(
8100 val,
8101 )
8102 }
8103 },
8104 cmd_set_provoking_vertex_mode_ext: unsafe {
8105 unsafe extern "system" fn cmd_set_provoking_vertex_mode_ext(
8106 _command_buffer: CommandBuffer,
8107 _provoking_vertex_mode: ProvokingVertexModeEXT,
8108 ) {
8109 panic!(concat!(
8110 "Unable to load ",
8111 stringify!(cmd_set_provoking_vertex_mode_ext)
8112 ))
8113 }
8114 let val = _f(c"vkCmdSetProvokingVertexModeEXT");
8115 if val.is_null() {
8116 cmd_set_provoking_vertex_mode_ext
8117 } else {
8118 ::core::mem::transmute::<
8119 *const c_void,
8120 PFN_vkCmdSetProvokingVertexModeEXT,
8121 >(val)
8122 }
8123 },
8124 cmd_set_line_rasterization_mode_ext: unsafe {
8125 unsafe extern "system" fn cmd_set_line_rasterization_mode_ext(
8126 _command_buffer: CommandBuffer,
8127 _line_rasterization_mode: LineRasterizationModeEXT,
8128 ) {
8129 panic!(concat!(
8130 "Unable to load ",
8131 stringify!(cmd_set_line_rasterization_mode_ext)
8132 ))
8133 }
8134 let val = _f(c"vkCmdSetLineRasterizationModeEXT");
8135 if val.is_null() {
8136 cmd_set_line_rasterization_mode_ext
8137 } else {
8138 ::core::mem::transmute::<
8139 *const c_void,
8140 PFN_vkCmdSetLineRasterizationModeEXT,
8141 >(val)
8142 }
8143 },
8144 cmd_set_line_stipple_enable_ext: unsafe {
8145 unsafe extern "system" fn cmd_set_line_stipple_enable_ext(
8146 _command_buffer: CommandBuffer,
8147 _stippled_line_enable: Bool32,
8148 ) {
8149 panic!(concat!(
8150 "Unable to load ",
8151 stringify!(cmd_set_line_stipple_enable_ext)
8152 ))
8153 }
8154 let val = _f(c"vkCmdSetLineStippleEnableEXT");
8155 if val.is_null() {
8156 cmd_set_line_stipple_enable_ext
8157 } else {
8158 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLineStippleEnableEXT>(
8159 val,
8160 )
8161 }
8162 },
8163 cmd_set_depth_clip_negative_one_to_one_ext: unsafe {
8164 unsafe extern "system" fn cmd_set_depth_clip_negative_one_to_one_ext(
8165 _command_buffer: CommandBuffer,
8166 _negative_one_to_one: Bool32,
8167 ) {
8168 panic!(concat!(
8169 "Unable to load ",
8170 stringify!(cmd_set_depth_clip_negative_one_to_one_ext)
8171 ))
8172 }
8173 let val = _f(c"vkCmdSetDepthClipNegativeOneToOneEXT");
8174 if val.is_null() {
8175 cmd_set_depth_clip_negative_one_to_one_ext
8176 } else {
8177 ::core::mem::transmute::<
8178 *const c_void,
8179 PFN_vkCmdSetDepthClipNegativeOneToOneEXT,
8180 >(val)
8181 }
8182 },
8183 cmd_set_viewport_w_scaling_enable_nv: unsafe {
8184 unsafe extern "system" fn cmd_set_viewport_w_scaling_enable_nv(
8185 _command_buffer: CommandBuffer,
8186 _viewport_w_scaling_enable: Bool32,
8187 ) {
8188 panic!(concat!(
8189 "Unable to load ",
8190 stringify!(cmd_set_viewport_w_scaling_enable_nv)
8191 ))
8192 }
8193 let val = _f(c"vkCmdSetViewportWScalingEnableNV");
8194 if val.is_null() {
8195 cmd_set_viewport_w_scaling_enable_nv
8196 } else {
8197 ::core::mem::transmute::<
8198 *const c_void,
8199 PFN_vkCmdSetViewportWScalingEnableNV,
8200 >(val)
8201 }
8202 },
8203 cmd_set_viewport_swizzle_nv: unsafe {
8204 unsafe extern "system" fn cmd_set_viewport_swizzle_nv(
8205 _command_buffer: CommandBuffer,
8206 _first_viewport: u32,
8207 _viewport_count: u32,
8208 _p_viewport_swizzles: *const ViewportSwizzleNV,
8209 ) {
8210 panic!(concat!(
8211 "Unable to load ",
8212 stringify!(cmd_set_viewport_swizzle_nv)
8213 ))
8214 }
8215 let val = _f(c"vkCmdSetViewportSwizzleNV");
8216 if val.is_null() {
8217 cmd_set_viewport_swizzle_nv
8218 } else {
8219 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetViewportSwizzleNV>(
8220 val,
8221 )
8222 }
8223 },
8224 cmd_set_coverage_to_color_enable_nv: unsafe {
8225 unsafe extern "system" fn cmd_set_coverage_to_color_enable_nv(
8226 _command_buffer: CommandBuffer,
8227 _coverage_to_color_enable: Bool32,
8228 ) {
8229 panic!(concat!(
8230 "Unable to load ",
8231 stringify!(cmd_set_coverage_to_color_enable_nv)
8232 ))
8233 }
8234 let val = _f(c"vkCmdSetCoverageToColorEnableNV");
8235 if val.is_null() {
8236 cmd_set_coverage_to_color_enable_nv
8237 } else {
8238 ::core::mem::transmute::<
8239 *const c_void,
8240 PFN_vkCmdSetCoverageToColorEnableNV,
8241 >(val)
8242 }
8243 },
8244 cmd_set_coverage_to_color_location_nv: unsafe {
8245 unsafe extern "system" fn cmd_set_coverage_to_color_location_nv(
8246 _command_buffer: CommandBuffer,
8247 _coverage_to_color_location: u32,
8248 ) {
8249 panic!(concat!(
8250 "Unable to load ",
8251 stringify!(cmd_set_coverage_to_color_location_nv)
8252 ))
8253 }
8254 let val = _f(c"vkCmdSetCoverageToColorLocationNV");
8255 if val.is_null() {
8256 cmd_set_coverage_to_color_location_nv
8257 } else {
8258 ::core::mem::transmute::<
8259 *const c_void,
8260 PFN_vkCmdSetCoverageToColorLocationNV,
8261 >(val)
8262 }
8263 },
8264 cmd_set_coverage_modulation_mode_nv: unsafe {
8265 unsafe extern "system" fn cmd_set_coverage_modulation_mode_nv(
8266 _command_buffer: CommandBuffer,
8267 _coverage_modulation_mode: CoverageModulationModeNV,
8268 ) {
8269 panic!(concat!(
8270 "Unable to load ",
8271 stringify!(cmd_set_coverage_modulation_mode_nv)
8272 ))
8273 }
8274 let val = _f(c"vkCmdSetCoverageModulationModeNV");
8275 if val.is_null() {
8276 cmd_set_coverage_modulation_mode_nv
8277 } else {
8278 ::core::mem::transmute::<
8279 *const c_void,
8280 PFN_vkCmdSetCoverageModulationModeNV,
8281 >(val)
8282 }
8283 },
8284 cmd_set_coverage_modulation_table_enable_nv: unsafe {
8285 unsafe extern "system" fn cmd_set_coverage_modulation_table_enable_nv(
8286 _command_buffer: CommandBuffer,
8287 _coverage_modulation_table_enable: Bool32,
8288 ) {
8289 panic!(concat!(
8290 "Unable to load ",
8291 stringify!(cmd_set_coverage_modulation_table_enable_nv)
8292 ))
8293 }
8294 let val = _f(c"vkCmdSetCoverageModulationTableEnableNV");
8295 if val.is_null() {
8296 cmd_set_coverage_modulation_table_enable_nv
8297 } else {
8298 ::core::mem::transmute::<
8299 *const c_void,
8300 PFN_vkCmdSetCoverageModulationTableEnableNV,
8301 >(val)
8302 }
8303 },
8304 cmd_set_coverage_modulation_table_nv: unsafe {
8305 unsafe extern "system" fn cmd_set_coverage_modulation_table_nv(
8306 _command_buffer: CommandBuffer,
8307 _coverage_modulation_table_count: u32,
8308 _p_coverage_modulation_table: *const f32,
8309 ) {
8310 panic!(concat!(
8311 "Unable to load ",
8312 stringify!(cmd_set_coverage_modulation_table_nv)
8313 ))
8314 }
8315 let val = _f(c"vkCmdSetCoverageModulationTableNV");
8316 if val.is_null() {
8317 cmd_set_coverage_modulation_table_nv
8318 } else {
8319 ::core::mem::transmute::<
8320 *const c_void,
8321 PFN_vkCmdSetCoverageModulationTableNV,
8322 >(val)
8323 }
8324 },
8325 cmd_set_shading_rate_image_enable_nv: unsafe {
8326 unsafe extern "system" fn cmd_set_shading_rate_image_enable_nv(
8327 _command_buffer: CommandBuffer,
8328 _shading_rate_image_enable: Bool32,
8329 ) {
8330 panic!(concat!(
8331 "Unable to load ",
8332 stringify!(cmd_set_shading_rate_image_enable_nv)
8333 ))
8334 }
8335 let val = _f(c"vkCmdSetShadingRateImageEnableNV");
8336 if val.is_null() {
8337 cmd_set_shading_rate_image_enable_nv
8338 } else {
8339 ::core::mem::transmute::<
8340 *const c_void,
8341 PFN_vkCmdSetShadingRateImageEnableNV,
8342 >(val)
8343 }
8344 },
8345 cmd_set_representative_fragment_test_enable_nv: unsafe {
8346 unsafe extern "system" fn cmd_set_representative_fragment_test_enable_nv(
8347 _command_buffer: CommandBuffer,
8348 _representative_fragment_test_enable: Bool32,
8349 ) {
8350 panic!(concat!(
8351 "Unable to load ",
8352 stringify!(cmd_set_representative_fragment_test_enable_nv)
8353 ))
8354 }
8355 let val = _f(c"vkCmdSetRepresentativeFragmentTestEnableNV");
8356 if val.is_null() {
8357 cmd_set_representative_fragment_test_enable_nv
8358 } else {
8359 ::core::mem::transmute::<
8360 *const c_void,
8361 PFN_vkCmdSetRepresentativeFragmentTestEnableNV,
8362 >(val)
8363 }
8364 },
8365 cmd_set_coverage_reduction_mode_nv: unsafe {
8366 unsafe extern "system" fn cmd_set_coverage_reduction_mode_nv(
8367 _command_buffer: CommandBuffer,
8368 _coverage_reduction_mode: CoverageReductionModeNV,
8369 ) {
8370 panic!(concat!(
8371 "Unable to load ",
8372 stringify!(cmd_set_coverage_reduction_mode_nv)
8373 ))
8374 }
8375 let val = _f(c"vkCmdSetCoverageReductionModeNV");
8376 if val.is_null() {
8377 cmd_set_coverage_reduction_mode_nv
8378 } else {
8379 ::core::mem::transmute::<
8380 *const c_void,
8381 PFN_vkCmdSetCoverageReductionModeNV,
8382 >(val)
8383 }
8384 },
8385 }
8386 }
8387 }
8388 }
8389 #[doc = "VK_EXT_subpass_merge_feedback"]
8390 pub mod subpass_merge_feedback {
8391 use super::super::*;
8392 pub use {
8393 crate::vk::EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME as NAME,
8394 crate::vk::EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION as SPEC_VERSION,
8395 };
8396 }
8397 #[doc = "VK_EXT_shader_module_identifier"]
8398 pub mod shader_module_identifier {
8399 use super::super::*;
8400 pub use {
8401 crate::vk::EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME as NAME,
8402 crate::vk::EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION as SPEC_VERSION,
8403 };
8404 #[doc = "VK_EXT_shader_module_identifier device-level functions"]
8405 #[derive(Clone)]
8406 pub struct Device {
8407 pub(crate) fp: DeviceFn,
8408 pub(crate) handle: crate::vk::Device,
8409 }
8410 impl Device {
8411 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
8412 let handle = device.handle();
8413 let fp = DeviceFn::load(|name| unsafe {
8414 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
8415 instance.get_device_proc_addr(handle, name.as_ptr()),
8416 )
8417 });
8418 Self { handle, fp }
8419 }
8420 #[inline]
8421 pub fn fp(&self) -> &DeviceFn {
8422 &self.fp
8423 }
8424 #[inline]
8425 pub fn device(&self) -> crate::vk::Device {
8426 self.handle
8427 }
8428 }
8429 #[derive(Clone)]
8430 #[doc = "Raw VK_EXT_shader_module_identifier device-level function pointers"]
8431 pub struct DeviceFn {
8432 pub get_shader_module_identifier_ext: PFN_vkGetShaderModuleIdentifierEXT,
8433 pub get_shader_module_create_info_identifier_ext:
8434 PFN_vkGetShaderModuleCreateInfoIdentifierEXT,
8435 }
8436 unsafe impl Send for DeviceFn {}
8437 unsafe impl Sync for DeviceFn {}
8438 impl DeviceFn {
8439 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
8440 Self::load_erased(&mut f)
8441 }
8442 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
8443 Self {
8444 get_shader_module_identifier_ext: unsafe {
8445 unsafe extern "system" fn get_shader_module_identifier_ext(
8446 _device: crate::vk::Device,
8447 _shader_module: ShaderModule,
8448 _p_identifier: *mut ShaderModuleIdentifierEXT<'_>,
8449 ) {
8450 panic!(concat!(
8451 "Unable to load ",
8452 stringify!(get_shader_module_identifier_ext)
8453 ))
8454 }
8455 let val = _f(c"vkGetShaderModuleIdentifierEXT");
8456 if val.is_null() {
8457 get_shader_module_identifier_ext
8458 } else {
8459 ::core::mem::transmute::<
8460 *const c_void,
8461 PFN_vkGetShaderModuleIdentifierEXT,
8462 >(val)
8463 }
8464 },
8465 get_shader_module_create_info_identifier_ext: unsafe {
8466 unsafe extern "system" fn get_shader_module_create_info_identifier_ext(
8467 _device: crate::vk::Device,
8468 _p_create_info: *const ShaderModuleCreateInfo<'_>,
8469 _p_identifier: *mut ShaderModuleIdentifierEXT<'_>,
8470 ) {
8471 panic!(concat!(
8472 "Unable to load ",
8473 stringify!(get_shader_module_create_info_identifier_ext)
8474 ))
8475 }
8476 let val = _f(c"vkGetShaderModuleCreateInfoIdentifierEXT");
8477 if val.is_null() {
8478 get_shader_module_create_info_identifier_ext
8479 } else {
8480 ::core::mem::transmute::<
8481 *const c_void,
8482 PFN_vkGetShaderModuleCreateInfoIdentifierEXT,
8483 >(val)
8484 }
8485 },
8486 }
8487 }
8488 }
8489 }
8490 #[doc = "VK_EXT_rasterization_order_attachment_access"]
8491 pub mod rasterization_order_attachment_access {
8492 use super::super::*;
8493 pub use {
8494 crate::vk::EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME as NAME,
8495 crate::vk::EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION as SPEC_VERSION,
8496 };
8497 }
8498 #[doc = "VK_EXT_legacy_dithering"]
8499 pub mod legacy_dithering {
8500 use super::super::*;
8501 pub use {
8502 crate::vk::EXT_LEGACY_DITHERING_EXTENSION_NAME as NAME,
8503 crate::vk::EXT_LEGACY_DITHERING_SPEC_VERSION as SPEC_VERSION,
8504 };
8505 }
8506 #[doc = "VK_EXT_pipeline_protected_access"]
8507 pub mod pipeline_protected_access {
8508 use super::super::*;
8509 pub use {
8510 crate::vk::EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME as NAME,
8511 crate::vk::EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION as SPEC_VERSION,
8512 };
8513 }
8514 #[doc = "VK_EXT_shader_object"]
8515 pub mod shader_object {
8516 use super::super::*;
8517 pub use {
8518 crate::vk::EXT_SHADER_OBJECT_EXTENSION_NAME as NAME,
8519 crate::vk::EXT_SHADER_OBJECT_SPEC_VERSION as SPEC_VERSION,
8520 };
8521 #[doc = "VK_EXT_shader_object device-level functions"]
8522 #[derive(Clone)]
8523 pub struct Device {
8524 pub(crate) fp: DeviceFn,
8525 pub(crate) handle: crate::vk::Device,
8526 }
8527 impl Device {
8528 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
8529 let handle = device.handle();
8530 let fp = DeviceFn::load(|name| unsafe {
8531 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
8532 instance.get_device_proc_addr(handle, name.as_ptr()),
8533 )
8534 });
8535 Self { handle, fp }
8536 }
8537 #[inline]
8538 pub fn fp(&self) -> &DeviceFn {
8539 &self.fp
8540 }
8541 #[inline]
8542 pub fn device(&self) -> crate::vk::Device {
8543 self.handle
8544 }
8545 }
8546 #[derive(Clone)]
8547 #[doc = "Raw VK_EXT_shader_object device-level function pointers"]
8548 pub struct DeviceFn {
8549 pub create_shaders_ext: PFN_vkCreateShadersEXT,
8550 pub destroy_shader_ext: PFN_vkDestroyShaderEXT,
8551 pub get_shader_binary_data_ext: PFN_vkGetShaderBinaryDataEXT,
8552 pub cmd_bind_shaders_ext: PFN_vkCmdBindShadersEXT,
8553 pub cmd_set_cull_mode_ext: PFN_vkCmdSetCullMode,
8554 pub cmd_set_front_face_ext: PFN_vkCmdSetFrontFace,
8555 pub cmd_set_primitive_topology_ext: PFN_vkCmdSetPrimitiveTopology,
8556 pub cmd_set_viewport_with_count_ext: PFN_vkCmdSetViewportWithCount,
8557 pub cmd_set_scissor_with_count_ext: PFN_vkCmdSetScissorWithCount,
8558 pub cmd_bind_vertex_buffers2_ext: PFN_vkCmdBindVertexBuffers2,
8559 pub cmd_set_depth_test_enable_ext: PFN_vkCmdSetDepthTestEnable,
8560 pub cmd_set_depth_write_enable_ext: PFN_vkCmdSetDepthWriteEnable,
8561 pub cmd_set_depth_compare_op_ext: PFN_vkCmdSetDepthCompareOp,
8562 pub cmd_set_depth_bounds_test_enable_ext: PFN_vkCmdSetDepthBoundsTestEnable,
8563 pub cmd_set_stencil_test_enable_ext: PFN_vkCmdSetStencilTestEnable,
8564 pub cmd_set_stencil_op_ext: PFN_vkCmdSetStencilOp,
8565 pub cmd_set_vertex_input_ext: PFN_vkCmdSetVertexInputEXT,
8566 pub cmd_set_patch_control_points_ext: PFN_vkCmdSetPatchControlPointsEXT,
8567 pub cmd_set_rasterizer_discard_enable_ext: PFN_vkCmdSetRasterizerDiscardEnable,
8568 pub cmd_set_depth_bias_enable_ext: PFN_vkCmdSetDepthBiasEnable,
8569 pub cmd_set_logic_op_ext: PFN_vkCmdSetLogicOpEXT,
8570 pub cmd_set_primitive_restart_enable_ext: PFN_vkCmdSetPrimitiveRestartEnable,
8571 pub cmd_set_tessellation_domain_origin_ext: PFN_vkCmdSetTessellationDomainOriginEXT,
8572 pub cmd_set_depth_clamp_enable_ext: PFN_vkCmdSetDepthClampEnableEXT,
8573 pub cmd_set_polygon_mode_ext: PFN_vkCmdSetPolygonModeEXT,
8574 pub cmd_set_rasterization_samples_ext: PFN_vkCmdSetRasterizationSamplesEXT,
8575 pub cmd_set_sample_mask_ext: PFN_vkCmdSetSampleMaskEXT,
8576 pub cmd_set_alpha_to_coverage_enable_ext: PFN_vkCmdSetAlphaToCoverageEnableEXT,
8577 pub cmd_set_alpha_to_one_enable_ext: PFN_vkCmdSetAlphaToOneEnableEXT,
8578 pub cmd_set_logic_op_enable_ext: PFN_vkCmdSetLogicOpEnableEXT,
8579 pub cmd_set_color_blend_enable_ext: PFN_vkCmdSetColorBlendEnableEXT,
8580 pub cmd_set_color_blend_equation_ext: PFN_vkCmdSetColorBlendEquationEXT,
8581 pub cmd_set_color_write_mask_ext: PFN_vkCmdSetColorWriteMaskEXT,
8582 pub cmd_set_rasterization_stream_ext: PFN_vkCmdSetRasterizationStreamEXT,
8583 pub cmd_set_conservative_rasterization_mode_ext:
8584 PFN_vkCmdSetConservativeRasterizationModeEXT,
8585 pub cmd_set_extra_primitive_overestimation_size_ext:
8586 PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT,
8587 pub cmd_set_depth_clip_enable_ext: PFN_vkCmdSetDepthClipEnableEXT,
8588 pub cmd_set_sample_locations_enable_ext: PFN_vkCmdSetSampleLocationsEnableEXT,
8589 pub cmd_set_color_blend_advanced_ext: PFN_vkCmdSetColorBlendAdvancedEXT,
8590 pub cmd_set_provoking_vertex_mode_ext: PFN_vkCmdSetProvokingVertexModeEXT,
8591 pub cmd_set_line_rasterization_mode_ext: PFN_vkCmdSetLineRasterizationModeEXT,
8592 pub cmd_set_line_stipple_enable_ext: PFN_vkCmdSetLineStippleEnableEXT,
8593 pub cmd_set_depth_clip_negative_one_to_one_ext:
8594 PFN_vkCmdSetDepthClipNegativeOneToOneEXT,
8595 pub cmd_set_viewport_w_scaling_enable_nv: PFN_vkCmdSetViewportWScalingEnableNV,
8596 pub cmd_set_viewport_swizzle_nv: PFN_vkCmdSetViewportSwizzleNV,
8597 pub cmd_set_coverage_to_color_enable_nv: PFN_vkCmdSetCoverageToColorEnableNV,
8598 pub cmd_set_coverage_to_color_location_nv: PFN_vkCmdSetCoverageToColorLocationNV,
8599 pub cmd_set_coverage_modulation_mode_nv: PFN_vkCmdSetCoverageModulationModeNV,
8600 pub cmd_set_coverage_modulation_table_enable_nv:
8601 PFN_vkCmdSetCoverageModulationTableEnableNV,
8602 pub cmd_set_coverage_modulation_table_nv: PFN_vkCmdSetCoverageModulationTableNV,
8603 pub cmd_set_shading_rate_image_enable_nv: PFN_vkCmdSetShadingRateImageEnableNV,
8604 pub cmd_set_representative_fragment_test_enable_nv:
8605 PFN_vkCmdSetRepresentativeFragmentTestEnableNV,
8606 pub cmd_set_coverage_reduction_mode_nv: PFN_vkCmdSetCoverageReductionModeNV,
8607 pub cmd_set_depth_clamp_range_ext: PFN_vkCmdSetDepthClampRangeEXT,
8608 }
8609 unsafe impl Send for DeviceFn {}
8610 unsafe impl Sync for DeviceFn {}
8611 impl DeviceFn {
8612 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
8613 Self::load_erased(&mut f)
8614 }
8615 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
8616 Self {
8617 create_shaders_ext: unsafe {
8618 unsafe extern "system" fn create_shaders_ext(
8619 _device: crate::vk::Device,
8620 _create_info_count: u32,
8621 _p_create_infos: *const ShaderCreateInfoEXT<'_>,
8622 _p_allocator: *const AllocationCallbacks<'_>,
8623 _p_shaders: *mut ShaderEXT,
8624 ) -> Result {
8625 panic!(concat!("Unable to load ", stringify!(create_shaders_ext)))
8626 }
8627 let val = _f(c"vkCreateShadersEXT");
8628 if val.is_null() {
8629 create_shaders_ext
8630 } else {
8631 ::core::mem::transmute::<*const c_void, PFN_vkCreateShadersEXT>(val)
8632 }
8633 },
8634 destroy_shader_ext: unsafe {
8635 unsafe extern "system" fn destroy_shader_ext(
8636 _device: crate::vk::Device,
8637 _shader: ShaderEXT,
8638 _p_allocator: *const AllocationCallbacks<'_>,
8639 ) {
8640 panic!(concat!("Unable to load ", stringify!(destroy_shader_ext)))
8641 }
8642 let val = _f(c"vkDestroyShaderEXT");
8643 if val.is_null() {
8644 destroy_shader_ext
8645 } else {
8646 ::core::mem::transmute::<*const c_void, PFN_vkDestroyShaderEXT>(val)
8647 }
8648 },
8649 get_shader_binary_data_ext: unsafe {
8650 unsafe extern "system" fn get_shader_binary_data_ext(
8651 _device: crate::vk::Device,
8652 _shader: ShaderEXT,
8653 _p_data_size: *mut usize,
8654 _p_data: *mut c_void,
8655 ) -> Result {
8656 panic!(concat!(
8657 "Unable to load ",
8658 stringify!(get_shader_binary_data_ext)
8659 ))
8660 }
8661 let val = _f(c"vkGetShaderBinaryDataEXT");
8662 if val.is_null() {
8663 get_shader_binary_data_ext
8664 } else {
8665 ::core::mem::transmute::<*const c_void, PFN_vkGetShaderBinaryDataEXT>(
8666 val,
8667 )
8668 }
8669 },
8670 cmd_bind_shaders_ext: unsafe {
8671 unsafe extern "system" fn cmd_bind_shaders_ext(
8672 _command_buffer: CommandBuffer,
8673 _stage_count: u32,
8674 _p_stages: *const ShaderStageFlags,
8675 _p_shaders: *const ShaderEXT,
8676 ) {
8677 panic!(concat!("Unable to load ", stringify!(cmd_bind_shaders_ext)))
8678 }
8679 let val = _f(c"vkCmdBindShadersEXT");
8680 if val.is_null() {
8681 cmd_bind_shaders_ext
8682 } else {
8683 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindShadersEXT>(val)
8684 }
8685 },
8686 cmd_set_cull_mode_ext: unsafe {
8687 unsafe extern "system" fn cmd_set_cull_mode_ext(
8688 _command_buffer: CommandBuffer,
8689 _cull_mode: CullModeFlags,
8690 ) {
8691 panic!(concat!(
8692 "Unable to load ",
8693 stringify!(cmd_set_cull_mode_ext)
8694 ))
8695 }
8696 let val = _f(c"vkCmdSetCullModeEXT");
8697 if val.is_null() {
8698 cmd_set_cull_mode_ext
8699 } else {
8700 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetCullMode>(val)
8701 }
8702 },
8703 cmd_set_front_face_ext: unsafe {
8704 unsafe extern "system" fn cmd_set_front_face_ext(
8705 _command_buffer: CommandBuffer,
8706 _front_face: FrontFace,
8707 ) {
8708 panic!(concat!(
8709 "Unable to load ",
8710 stringify!(cmd_set_front_face_ext)
8711 ))
8712 }
8713 let val = _f(c"vkCmdSetFrontFaceEXT");
8714 if val.is_null() {
8715 cmd_set_front_face_ext
8716 } else {
8717 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetFrontFace>(val)
8718 }
8719 },
8720 cmd_set_primitive_topology_ext: unsafe {
8721 unsafe extern "system" fn cmd_set_primitive_topology_ext(
8722 _command_buffer: CommandBuffer,
8723 _primitive_topology: PrimitiveTopology,
8724 ) {
8725 panic!(concat!(
8726 "Unable to load ",
8727 stringify!(cmd_set_primitive_topology_ext)
8728 ))
8729 }
8730 let val = _f(c"vkCmdSetPrimitiveTopologyEXT");
8731 if val.is_null() {
8732 cmd_set_primitive_topology_ext
8733 } else {
8734 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetPrimitiveTopology>(
8735 val,
8736 )
8737 }
8738 },
8739 cmd_set_viewport_with_count_ext: unsafe {
8740 unsafe extern "system" fn cmd_set_viewport_with_count_ext(
8741 _command_buffer: CommandBuffer,
8742 _viewport_count: u32,
8743 _p_viewports: *const Viewport,
8744 ) {
8745 panic!(concat!(
8746 "Unable to load ",
8747 stringify!(cmd_set_viewport_with_count_ext)
8748 ))
8749 }
8750 let val = _f(c"vkCmdSetViewportWithCountEXT");
8751 if val.is_null() {
8752 cmd_set_viewport_with_count_ext
8753 } else {
8754 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetViewportWithCount>(
8755 val,
8756 )
8757 }
8758 },
8759 cmd_set_scissor_with_count_ext: unsafe {
8760 unsafe extern "system" fn cmd_set_scissor_with_count_ext(
8761 _command_buffer: CommandBuffer,
8762 _scissor_count: u32,
8763 _p_scissors: *const Rect2D,
8764 ) {
8765 panic!(concat!(
8766 "Unable to load ",
8767 stringify!(cmd_set_scissor_with_count_ext)
8768 ))
8769 }
8770 let val = _f(c"vkCmdSetScissorWithCountEXT");
8771 if val.is_null() {
8772 cmd_set_scissor_with_count_ext
8773 } else {
8774 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetScissorWithCount>(
8775 val,
8776 )
8777 }
8778 },
8779 cmd_bind_vertex_buffers2_ext: unsafe {
8780 unsafe extern "system" fn cmd_bind_vertex_buffers2_ext(
8781 _command_buffer: CommandBuffer,
8782 _first_binding: u32,
8783 _binding_count: u32,
8784 _p_buffers: *const Buffer,
8785 _p_offsets: *const DeviceSize,
8786 _p_sizes: *const DeviceSize,
8787 _p_strides: *const DeviceSize,
8788 ) {
8789 panic!(concat!(
8790 "Unable to load ",
8791 stringify!(cmd_bind_vertex_buffers2_ext)
8792 ))
8793 }
8794 let val = _f(c"vkCmdBindVertexBuffers2EXT");
8795 if val.is_null() {
8796 cmd_bind_vertex_buffers2_ext
8797 } else {
8798 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindVertexBuffers2>(
8799 val,
8800 )
8801 }
8802 },
8803 cmd_set_depth_test_enable_ext: unsafe {
8804 unsafe extern "system" fn cmd_set_depth_test_enable_ext(
8805 _command_buffer: CommandBuffer,
8806 _depth_test_enable: Bool32,
8807 ) {
8808 panic!(concat!(
8809 "Unable to load ",
8810 stringify!(cmd_set_depth_test_enable_ext)
8811 ))
8812 }
8813 let val = _f(c"vkCmdSetDepthTestEnableEXT");
8814 if val.is_null() {
8815 cmd_set_depth_test_enable_ext
8816 } else {
8817 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthTestEnable>(
8818 val,
8819 )
8820 }
8821 },
8822 cmd_set_depth_write_enable_ext: unsafe {
8823 unsafe extern "system" fn cmd_set_depth_write_enable_ext(
8824 _command_buffer: CommandBuffer,
8825 _depth_write_enable: Bool32,
8826 ) {
8827 panic!(concat!(
8828 "Unable to load ",
8829 stringify!(cmd_set_depth_write_enable_ext)
8830 ))
8831 }
8832 let val = _f(c"vkCmdSetDepthWriteEnableEXT");
8833 if val.is_null() {
8834 cmd_set_depth_write_enable_ext
8835 } else {
8836 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthWriteEnable>(
8837 val,
8838 )
8839 }
8840 },
8841 cmd_set_depth_compare_op_ext: unsafe {
8842 unsafe extern "system" fn cmd_set_depth_compare_op_ext(
8843 _command_buffer: CommandBuffer,
8844 _depth_compare_op: CompareOp,
8845 ) {
8846 panic!(concat!(
8847 "Unable to load ",
8848 stringify!(cmd_set_depth_compare_op_ext)
8849 ))
8850 }
8851 let val = _f(c"vkCmdSetDepthCompareOpEXT");
8852 if val.is_null() {
8853 cmd_set_depth_compare_op_ext
8854 } else {
8855 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthCompareOp>(val)
8856 }
8857 },
8858 cmd_set_depth_bounds_test_enable_ext: unsafe {
8859 unsafe extern "system" fn cmd_set_depth_bounds_test_enable_ext(
8860 _command_buffer: CommandBuffer,
8861 _depth_bounds_test_enable: Bool32,
8862 ) {
8863 panic!(concat!(
8864 "Unable to load ",
8865 stringify!(cmd_set_depth_bounds_test_enable_ext)
8866 ))
8867 }
8868 let val = _f(c"vkCmdSetDepthBoundsTestEnableEXT");
8869 if val.is_null() {
8870 cmd_set_depth_bounds_test_enable_ext
8871 } else {
8872 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthBoundsTestEnable>(
8873 val,
8874 )
8875 }
8876 },
8877 cmd_set_stencil_test_enable_ext: unsafe {
8878 unsafe extern "system" fn cmd_set_stencil_test_enable_ext(
8879 _command_buffer: CommandBuffer,
8880 _stencil_test_enable: Bool32,
8881 ) {
8882 panic!(concat!(
8883 "Unable to load ",
8884 stringify!(cmd_set_stencil_test_enable_ext)
8885 ))
8886 }
8887 let val = _f(c"vkCmdSetStencilTestEnableEXT");
8888 if val.is_null() {
8889 cmd_set_stencil_test_enable_ext
8890 } else {
8891 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetStencilTestEnable>(
8892 val,
8893 )
8894 }
8895 },
8896 cmd_set_stencil_op_ext: unsafe {
8897 unsafe extern "system" fn cmd_set_stencil_op_ext(
8898 _command_buffer: CommandBuffer,
8899 _face_mask: StencilFaceFlags,
8900 _fail_op: StencilOp,
8901 _pass_op: StencilOp,
8902 _depth_fail_op: StencilOp,
8903 _compare_op: CompareOp,
8904 ) {
8905 panic!(concat!(
8906 "Unable to load ",
8907 stringify!(cmd_set_stencil_op_ext)
8908 ))
8909 }
8910 let val = _f(c"vkCmdSetStencilOpEXT");
8911 if val.is_null() {
8912 cmd_set_stencil_op_ext
8913 } else {
8914 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetStencilOp>(val)
8915 }
8916 },
8917 cmd_set_vertex_input_ext: unsafe {
8918 unsafe extern "system" fn cmd_set_vertex_input_ext(
8919 _command_buffer: CommandBuffer,
8920 _vertex_binding_description_count: u32,
8921 _p_vertex_binding_descriptions : * const VertexInputBindingDescription2EXT < '_ >,
8922 _vertex_attribute_description_count: u32,
8923 _p_vertex_attribute_descriptions : * const VertexInputAttributeDescription2EXT < '_ >,
8924 ) {
8925 panic!(concat!(
8926 "Unable to load ",
8927 stringify!(cmd_set_vertex_input_ext)
8928 ))
8929 }
8930 let val = _f(c"vkCmdSetVertexInputEXT");
8931 if val.is_null() {
8932 cmd_set_vertex_input_ext
8933 } else {
8934 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetVertexInputEXT>(val)
8935 }
8936 },
8937 cmd_set_patch_control_points_ext: unsafe {
8938 unsafe extern "system" fn cmd_set_patch_control_points_ext(
8939 _command_buffer: CommandBuffer,
8940 _patch_control_points: u32,
8941 ) {
8942 panic!(concat!(
8943 "Unable to load ",
8944 stringify!(cmd_set_patch_control_points_ext)
8945 ))
8946 }
8947 let val = _f(c"vkCmdSetPatchControlPointsEXT");
8948 if val.is_null() {
8949 cmd_set_patch_control_points_ext
8950 } else {
8951 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetPatchControlPointsEXT>(
8952 val,
8953 )
8954 }
8955 },
8956 cmd_set_rasterizer_discard_enable_ext: unsafe {
8957 unsafe extern "system" fn cmd_set_rasterizer_discard_enable_ext(
8958 _command_buffer: CommandBuffer,
8959 _rasterizer_discard_enable: Bool32,
8960 ) {
8961 panic!(concat!(
8962 "Unable to load ",
8963 stringify!(cmd_set_rasterizer_discard_enable_ext)
8964 ))
8965 }
8966 let val = _f(c"vkCmdSetRasterizerDiscardEnableEXT");
8967 if val.is_null() {
8968 cmd_set_rasterizer_discard_enable_ext
8969 } else {
8970 ::core::mem::transmute::<
8971 *const c_void,
8972 PFN_vkCmdSetRasterizerDiscardEnable,
8973 >(val)
8974 }
8975 },
8976 cmd_set_depth_bias_enable_ext: unsafe {
8977 unsafe extern "system" fn cmd_set_depth_bias_enable_ext(
8978 _command_buffer: CommandBuffer,
8979 _depth_bias_enable: Bool32,
8980 ) {
8981 panic!(concat!(
8982 "Unable to load ",
8983 stringify!(cmd_set_depth_bias_enable_ext)
8984 ))
8985 }
8986 let val = _f(c"vkCmdSetDepthBiasEnableEXT");
8987 if val.is_null() {
8988 cmd_set_depth_bias_enable_ext
8989 } else {
8990 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthBiasEnable>(
8991 val,
8992 )
8993 }
8994 },
8995 cmd_set_logic_op_ext: unsafe {
8996 unsafe extern "system" fn cmd_set_logic_op_ext(
8997 _command_buffer: CommandBuffer,
8998 _logic_op: LogicOp,
8999 ) {
9000 panic!(concat!("Unable to load ", stringify!(cmd_set_logic_op_ext)))
9001 }
9002 let val = _f(c"vkCmdSetLogicOpEXT");
9003 if val.is_null() {
9004 cmd_set_logic_op_ext
9005 } else {
9006 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLogicOpEXT>(val)
9007 }
9008 },
9009 cmd_set_primitive_restart_enable_ext: unsafe {
9010 unsafe extern "system" fn cmd_set_primitive_restart_enable_ext(
9011 _command_buffer: CommandBuffer,
9012 _primitive_restart_enable: Bool32,
9013 ) {
9014 panic!(concat!(
9015 "Unable to load ",
9016 stringify!(cmd_set_primitive_restart_enable_ext)
9017 ))
9018 }
9019 let val = _f(c"vkCmdSetPrimitiveRestartEnableEXT");
9020 if val.is_null() {
9021 cmd_set_primitive_restart_enable_ext
9022 } else {
9023 ::core::mem::transmute::<
9024 *const c_void,
9025 PFN_vkCmdSetPrimitiveRestartEnable,
9026 >(val)
9027 }
9028 },
9029 cmd_set_tessellation_domain_origin_ext: unsafe {
9030 unsafe extern "system" fn cmd_set_tessellation_domain_origin_ext(
9031 _command_buffer: CommandBuffer,
9032 _domain_origin: TessellationDomainOrigin,
9033 ) {
9034 panic!(concat!(
9035 "Unable to load ",
9036 stringify!(cmd_set_tessellation_domain_origin_ext)
9037 ))
9038 }
9039 let val = _f(c"vkCmdSetTessellationDomainOriginEXT");
9040 if val.is_null() {
9041 cmd_set_tessellation_domain_origin_ext
9042 } else {
9043 ::core::mem::transmute::<
9044 *const c_void,
9045 PFN_vkCmdSetTessellationDomainOriginEXT,
9046 >(val)
9047 }
9048 },
9049 cmd_set_depth_clamp_enable_ext: unsafe {
9050 unsafe extern "system" fn cmd_set_depth_clamp_enable_ext(
9051 _command_buffer: CommandBuffer,
9052 _depth_clamp_enable: Bool32,
9053 ) {
9054 panic!(concat!(
9055 "Unable to load ",
9056 stringify!(cmd_set_depth_clamp_enable_ext)
9057 ))
9058 }
9059 let val = _f(c"vkCmdSetDepthClampEnableEXT");
9060 if val.is_null() {
9061 cmd_set_depth_clamp_enable_ext
9062 } else {
9063 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthClampEnableEXT>(
9064 val,
9065 )
9066 }
9067 },
9068 cmd_set_polygon_mode_ext: unsafe {
9069 unsafe extern "system" fn cmd_set_polygon_mode_ext(
9070 _command_buffer: CommandBuffer,
9071 _polygon_mode: PolygonMode,
9072 ) {
9073 panic!(concat!(
9074 "Unable to load ",
9075 stringify!(cmd_set_polygon_mode_ext)
9076 ))
9077 }
9078 let val = _f(c"vkCmdSetPolygonModeEXT");
9079 if val.is_null() {
9080 cmd_set_polygon_mode_ext
9081 } else {
9082 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetPolygonModeEXT>(val)
9083 }
9084 },
9085 cmd_set_rasterization_samples_ext: unsafe {
9086 unsafe extern "system" fn cmd_set_rasterization_samples_ext(
9087 _command_buffer: CommandBuffer,
9088 _rasterization_samples: SampleCountFlags,
9089 ) {
9090 panic!(concat!(
9091 "Unable to load ",
9092 stringify!(cmd_set_rasterization_samples_ext)
9093 ))
9094 }
9095 let val = _f(c"vkCmdSetRasterizationSamplesEXT");
9096 if val.is_null() {
9097 cmd_set_rasterization_samples_ext
9098 } else {
9099 ::core::mem::transmute::<
9100 *const c_void,
9101 PFN_vkCmdSetRasterizationSamplesEXT,
9102 >(val)
9103 }
9104 },
9105 cmd_set_sample_mask_ext: unsafe {
9106 unsafe extern "system" fn cmd_set_sample_mask_ext(
9107 _command_buffer: CommandBuffer,
9108 _samples: SampleCountFlags,
9109 _p_sample_mask: *const SampleMask,
9110 ) {
9111 panic!(concat!(
9112 "Unable to load ",
9113 stringify!(cmd_set_sample_mask_ext)
9114 ))
9115 }
9116 let val = _f(c"vkCmdSetSampleMaskEXT");
9117 if val.is_null() {
9118 cmd_set_sample_mask_ext
9119 } else {
9120 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetSampleMaskEXT>(val)
9121 }
9122 },
9123 cmd_set_alpha_to_coverage_enable_ext: unsafe {
9124 unsafe extern "system" fn cmd_set_alpha_to_coverage_enable_ext(
9125 _command_buffer: CommandBuffer,
9126 _alpha_to_coverage_enable: Bool32,
9127 ) {
9128 panic!(concat!(
9129 "Unable to load ",
9130 stringify!(cmd_set_alpha_to_coverage_enable_ext)
9131 ))
9132 }
9133 let val = _f(c"vkCmdSetAlphaToCoverageEnableEXT");
9134 if val.is_null() {
9135 cmd_set_alpha_to_coverage_enable_ext
9136 } else {
9137 ::core::mem::transmute::<
9138 *const c_void,
9139 PFN_vkCmdSetAlphaToCoverageEnableEXT,
9140 >(val)
9141 }
9142 },
9143 cmd_set_alpha_to_one_enable_ext: unsafe {
9144 unsafe extern "system" fn cmd_set_alpha_to_one_enable_ext(
9145 _command_buffer: CommandBuffer,
9146 _alpha_to_one_enable: Bool32,
9147 ) {
9148 panic!(concat!(
9149 "Unable to load ",
9150 stringify!(cmd_set_alpha_to_one_enable_ext)
9151 ))
9152 }
9153 let val = _f(c"vkCmdSetAlphaToOneEnableEXT");
9154 if val.is_null() {
9155 cmd_set_alpha_to_one_enable_ext
9156 } else {
9157 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetAlphaToOneEnableEXT>(
9158 val,
9159 )
9160 }
9161 },
9162 cmd_set_logic_op_enable_ext: unsafe {
9163 unsafe extern "system" fn cmd_set_logic_op_enable_ext(
9164 _command_buffer: CommandBuffer,
9165 _logic_op_enable: Bool32,
9166 ) {
9167 panic!(concat!(
9168 "Unable to load ",
9169 stringify!(cmd_set_logic_op_enable_ext)
9170 ))
9171 }
9172 let val = _f(c"vkCmdSetLogicOpEnableEXT");
9173 if val.is_null() {
9174 cmd_set_logic_op_enable_ext
9175 } else {
9176 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLogicOpEnableEXT>(
9177 val,
9178 )
9179 }
9180 },
9181 cmd_set_color_blend_enable_ext: unsafe {
9182 unsafe extern "system" fn cmd_set_color_blend_enable_ext(
9183 _command_buffer: CommandBuffer,
9184 _first_attachment: u32,
9185 _attachment_count: u32,
9186 _p_color_blend_enables: *const Bool32,
9187 ) {
9188 panic!(concat!(
9189 "Unable to load ",
9190 stringify!(cmd_set_color_blend_enable_ext)
9191 ))
9192 }
9193 let val = _f(c"vkCmdSetColorBlendEnableEXT");
9194 if val.is_null() {
9195 cmd_set_color_blend_enable_ext
9196 } else {
9197 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorBlendEnableEXT>(
9198 val,
9199 )
9200 }
9201 },
9202 cmd_set_color_blend_equation_ext: unsafe {
9203 unsafe extern "system" fn cmd_set_color_blend_equation_ext(
9204 _command_buffer: CommandBuffer,
9205 _first_attachment: u32,
9206 _attachment_count: u32,
9207 _p_color_blend_equations: *const ColorBlendEquationEXT,
9208 ) {
9209 panic!(concat!(
9210 "Unable to load ",
9211 stringify!(cmd_set_color_blend_equation_ext)
9212 ))
9213 }
9214 let val = _f(c"vkCmdSetColorBlendEquationEXT");
9215 if val.is_null() {
9216 cmd_set_color_blend_equation_ext
9217 } else {
9218 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorBlendEquationEXT>(
9219 val,
9220 )
9221 }
9222 },
9223 cmd_set_color_write_mask_ext: unsafe {
9224 unsafe extern "system" fn cmd_set_color_write_mask_ext(
9225 _command_buffer: CommandBuffer,
9226 _first_attachment: u32,
9227 _attachment_count: u32,
9228 _p_color_write_masks: *const ColorComponentFlags,
9229 ) {
9230 panic!(concat!(
9231 "Unable to load ",
9232 stringify!(cmd_set_color_write_mask_ext)
9233 ))
9234 }
9235 let val = _f(c"vkCmdSetColorWriteMaskEXT");
9236 if val.is_null() {
9237 cmd_set_color_write_mask_ext
9238 } else {
9239 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorWriteMaskEXT>(
9240 val,
9241 )
9242 }
9243 },
9244 cmd_set_rasterization_stream_ext: unsafe {
9245 unsafe extern "system" fn cmd_set_rasterization_stream_ext(
9246 _command_buffer: CommandBuffer,
9247 _rasterization_stream: u32,
9248 ) {
9249 panic!(concat!(
9250 "Unable to load ",
9251 stringify!(cmd_set_rasterization_stream_ext)
9252 ))
9253 }
9254 let val = _f(c"vkCmdSetRasterizationStreamEXT");
9255 if val.is_null() {
9256 cmd_set_rasterization_stream_ext
9257 } else {
9258 ::core::mem::transmute::<
9259 *const c_void,
9260 PFN_vkCmdSetRasterizationStreamEXT,
9261 >(val)
9262 }
9263 },
9264 cmd_set_conservative_rasterization_mode_ext: unsafe {
9265 unsafe extern "system" fn cmd_set_conservative_rasterization_mode_ext(
9266 _command_buffer: CommandBuffer,
9267 _conservative_rasterization_mode: ConservativeRasterizationModeEXT,
9268 ) {
9269 panic!(concat!(
9270 "Unable to load ",
9271 stringify!(cmd_set_conservative_rasterization_mode_ext)
9272 ))
9273 }
9274 let val = _f(c"vkCmdSetConservativeRasterizationModeEXT");
9275 if val.is_null() {
9276 cmd_set_conservative_rasterization_mode_ext
9277 } else {
9278 ::core::mem::transmute::<
9279 *const c_void,
9280 PFN_vkCmdSetConservativeRasterizationModeEXT,
9281 >(val)
9282 }
9283 },
9284 cmd_set_extra_primitive_overestimation_size_ext: unsafe {
9285 unsafe extern "system" fn cmd_set_extra_primitive_overestimation_size_ext(
9286 _command_buffer: CommandBuffer,
9287 _extra_primitive_overestimation_size: f32,
9288 ) {
9289 panic!(concat!(
9290 "Unable to load ",
9291 stringify!(cmd_set_extra_primitive_overestimation_size_ext)
9292 ))
9293 }
9294 let val = _f(c"vkCmdSetExtraPrimitiveOverestimationSizeEXT");
9295 if val.is_null() {
9296 cmd_set_extra_primitive_overestimation_size_ext
9297 } else {
9298 ::core::mem::transmute::<
9299 *const c_void,
9300 PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT,
9301 >(val)
9302 }
9303 },
9304 cmd_set_depth_clip_enable_ext: unsafe {
9305 unsafe extern "system" fn cmd_set_depth_clip_enable_ext(
9306 _command_buffer: CommandBuffer,
9307 _depth_clip_enable: Bool32,
9308 ) {
9309 panic!(concat!(
9310 "Unable to load ",
9311 stringify!(cmd_set_depth_clip_enable_ext)
9312 ))
9313 }
9314 let val = _f(c"vkCmdSetDepthClipEnableEXT");
9315 if val.is_null() {
9316 cmd_set_depth_clip_enable_ext
9317 } else {
9318 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthClipEnableEXT>(
9319 val,
9320 )
9321 }
9322 },
9323 cmd_set_sample_locations_enable_ext: unsafe {
9324 unsafe extern "system" fn cmd_set_sample_locations_enable_ext(
9325 _command_buffer: CommandBuffer,
9326 _sample_locations_enable: Bool32,
9327 ) {
9328 panic!(concat!(
9329 "Unable to load ",
9330 stringify!(cmd_set_sample_locations_enable_ext)
9331 ))
9332 }
9333 let val = _f(c"vkCmdSetSampleLocationsEnableEXT");
9334 if val.is_null() {
9335 cmd_set_sample_locations_enable_ext
9336 } else {
9337 ::core::mem::transmute::<
9338 *const c_void,
9339 PFN_vkCmdSetSampleLocationsEnableEXT,
9340 >(val)
9341 }
9342 },
9343 cmd_set_color_blend_advanced_ext: unsafe {
9344 unsafe extern "system" fn cmd_set_color_blend_advanced_ext(
9345 _command_buffer: CommandBuffer,
9346 _first_attachment: u32,
9347 _attachment_count: u32,
9348 _p_color_blend_advanced: *const ColorBlendAdvancedEXT,
9349 ) {
9350 panic!(concat!(
9351 "Unable to load ",
9352 stringify!(cmd_set_color_blend_advanced_ext)
9353 ))
9354 }
9355 let val = _f(c"vkCmdSetColorBlendAdvancedEXT");
9356 if val.is_null() {
9357 cmd_set_color_blend_advanced_ext
9358 } else {
9359 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetColorBlendAdvancedEXT>(
9360 val,
9361 )
9362 }
9363 },
9364 cmd_set_provoking_vertex_mode_ext: unsafe {
9365 unsafe extern "system" fn cmd_set_provoking_vertex_mode_ext(
9366 _command_buffer: CommandBuffer,
9367 _provoking_vertex_mode: ProvokingVertexModeEXT,
9368 ) {
9369 panic!(concat!(
9370 "Unable to load ",
9371 stringify!(cmd_set_provoking_vertex_mode_ext)
9372 ))
9373 }
9374 let val = _f(c"vkCmdSetProvokingVertexModeEXT");
9375 if val.is_null() {
9376 cmd_set_provoking_vertex_mode_ext
9377 } else {
9378 ::core::mem::transmute::<
9379 *const c_void,
9380 PFN_vkCmdSetProvokingVertexModeEXT,
9381 >(val)
9382 }
9383 },
9384 cmd_set_line_rasterization_mode_ext: unsafe {
9385 unsafe extern "system" fn cmd_set_line_rasterization_mode_ext(
9386 _command_buffer: CommandBuffer,
9387 _line_rasterization_mode: LineRasterizationModeEXT,
9388 ) {
9389 panic!(concat!(
9390 "Unable to load ",
9391 stringify!(cmd_set_line_rasterization_mode_ext)
9392 ))
9393 }
9394 let val = _f(c"vkCmdSetLineRasterizationModeEXT");
9395 if val.is_null() {
9396 cmd_set_line_rasterization_mode_ext
9397 } else {
9398 ::core::mem::transmute::<
9399 *const c_void,
9400 PFN_vkCmdSetLineRasterizationModeEXT,
9401 >(val)
9402 }
9403 },
9404 cmd_set_line_stipple_enable_ext: unsafe {
9405 unsafe extern "system" fn cmd_set_line_stipple_enable_ext(
9406 _command_buffer: CommandBuffer,
9407 _stippled_line_enable: Bool32,
9408 ) {
9409 panic!(concat!(
9410 "Unable to load ",
9411 stringify!(cmd_set_line_stipple_enable_ext)
9412 ))
9413 }
9414 let val = _f(c"vkCmdSetLineStippleEnableEXT");
9415 if val.is_null() {
9416 cmd_set_line_stipple_enable_ext
9417 } else {
9418 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLineStippleEnableEXT>(
9419 val,
9420 )
9421 }
9422 },
9423 cmd_set_depth_clip_negative_one_to_one_ext: unsafe {
9424 unsafe extern "system" fn cmd_set_depth_clip_negative_one_to_one_ext(
9425 _command_buffer: CommandBuffer,
9426 _negative_one_to_one: Bool32,
9427 ) {
9428 panic!(concat!(
9429 "Unable to load ",
9430 stringify!(cmd_set_depth_clip_negative_one_to_one_ext)
9431 ))
9432 }
9433 let val = _f(c"vkCmdSetDepthClipNegativeOneToOneEXT");
9434 if val.is_null() {
9435 cmd_set_depth_clip_negative_one_to_one_ext
9436 } else {
9437 ::core::mem::transmute::<
9438 *const c_void,
9439 PFN_vkCmdSetDepthClipNegativeOneToOneEXT,
9440 >(val)
9441 }
9442 },
9443 cmd_set_viewport_w_scaling_enable_nv: unsafe {
9444 unsafe extern "system" fn cmd_set_viewport_w_scaling_enable_nv(
9445 _command_buffer: CommandBuffer,
9446 _viewport_w_scaling_enable: Bool32,
9447 ) {
9448 panic!(concat!(
9449 "Unable to load ",
9450 stringify!(cmd_set_viewport_w_scaling_enable_nv)
9451 ))
9452 }
9453 let val = _f(c"vkCmdSetViewportWScalingEnableNV");
9454 if val.is_null() {
9455 cmd_set_viewport_w_scaling_enable_nv
9456 } else {
9457 ::core::mem::transmute::<
9458 *const c_void,
9459 PFN_vkCmdSetViewportWScalingEnableNV,
9460 >(val)
9461 }
9462 },
9463 cmd_set_viewport_swizzle_nv: unsafe {
9464 unsafe extern "system" fn cmd_set_viewport_swizzle_nv(
9465 _command_buffer: CommandBuffer,
9466 _first_viewport: u32,
9467 _viewport_count: u32,
9468 _p_viewport_swizzles: *const ViewportSwizzleNV,
9469 ) {
9470 panic!(concat!(
9471 "Unable to load ",
9472 stringify!(cmd_set_viewport_swizzle_nv)
9473 ))
9474 }
9475 let val = _f(c"vkCmdSetViewportSwizzleNV");
9476 if val.is_null() {
9477 cmd_set_viewport_swizzle_nv
9478 } else {
9479 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetViewportSwizzleNV>(
9480 val,
9481 )
9482 }
9483 },
9484 cmd_set_coverage_to_color_enable_nv: unsafe {
9485 unsafe extern "system" fn cmd_set_coverage_to_color_enable_nv(
9486 _command_buffer: CommandBuffer,
9487 _coverage_to_color_enable: Bool32,
9488 ) {
9489 panic!(concat!(
9490 "Unable to load ",
9491 stringify!(cmd_set_coverage_to_color_enable_nv)
9492 ))
9493 }
9494 let val = _f(c"vkCmdSetCoverageToColorEnableNV");
9495 if val.is_null() {
9496 cmd_set_coverage_to_color_enable_nv
9497 } else {
9498 ::core::mem::transmute::<
9499 *const c_void,
9500 PFN_vkCmdSetCoverageToColorEnableNV,
9501 >(val)
9502 }
9503 },
9504 cmd_set_coverage_to_color_location_nv: unsafe {
9505 unsafe extern "system" fn cmd_set_coverage_to_color_location_nv(
9506 _command_buffer: CommandBuffer,
9507 _coverage_to_color_location: u32,
9508 ) {
9509 panic!(concat!(
9510 "Unable to load ",
9511 stringify!(cmd_set_coverage_to_color_location_nv)
9512 ))
9513 }
9514 let val = _f(c"vkCmdSetCoverageToColorLocationNV");
9515 if val.is_null() {
9516 cmd_set_coverage_to_color_location_nv
9517 } else {
9518 ::core::mem::transmute::<
9519 *const c_void,
9520 PFN_vkCmdSetCoverageToColorLocationNV,
9521 >(val)
9522 }
9523 },
9524 cmd_set_coverage_modulation_mode_nv: unsafe {
9525 unsafe extern "system" fn cmd_set_coverage_modulation_mode_nv(
9526 _command_buffer: CommandBuffer,
9527 _coverage_modulation_mode: CoverageModulationModeNV,
9528 ) {
9529 panic!(concat!(
9530 "Unable to load ",
9531 stringify!(cmd_set_coverage_modulation_mode_nv)
9532 ))
9533 }
9534 let val = _f(c"vkCmdSetCoverageModulationModeNV");
9535 if val.is_null() {
9536 cmd_set_coverage_modulation_mode_nv
9537 } else {
9538 ::core::mem::transmute::<
9539 *const c_void,
9540 PFN_vkCmdSetCoverageModulationModeNV,
9541 >(val)
9542 }
9543 },
9544 cmd_set_coverage_modulation_table_enable_nv: unsafe {
9545 unsafe extern "system" fn cmd_set_coverage_modulation_table_enable_nv(
9546 _command_buffer: CommandBuffer,
9547 _coverage_modulation_table_enable: Bool32,
9548 ) {
9549 panic!(concat!(
9550 "Unable to load ",
9551 stringify!(cmd_set_coverage_modulation_table_enable_nv)
9552 ))
9553 }
9554 let val = _f(c"vkCmdSetCoverageModulationTableEnableNV");
9555 if val.is_null() {
9556 cmd_set_coverage_modulation_table_enable_nv
9557 } else {
9558 ::core::mem::transmute::<
9559 *const c_void,
9560 PFN_vkCmdSetCoverageModulationTableEnableNV,
9561 >(val)
9562 }
9563 },
9564 cmd_set_coverage_modulation_table_nv: unsafe {
9565 unsafe extern "system" fn cmd_set_coverage_modulation_table_nv(
9566 _command_buffer: CommandBuffer,
9567 _coverage_modulation_table_count: u32,
9568 _p_coverage_modulation_table: *const f32,
9569 ) {
9570 panic!(concat!(
9571 "Unable to load ",
9572 stringify!(cmd_set_coverage_modulation_table_nv)
9573 ))
9574 }
9575 let val = _f(c"vkCmdSetCoverageModulationTableNV");
9576 if val.is_null() {
9577 cmd_set_coverage_modulation_table_nv
9578 } else {
9579 ::core::mem::transmute::<
9580 *const c_void,
9581 PFN_vkCmdSetCoverageModulationTableNV,
9582 >(val)
9583 }
9584 },
9585 cmd_set_shading_rate_image_enable_nv: unsafe {
9586 unsafe extern "system" fn cmd_set_shading_rate_image_enable_nv(
9587 _command_buffer: CommandBuffer,
9588 _shading_rate_image_enable: Bool32,
9589 ) {
9590 panic!(concat!(
9591 "Unable to load ",
9592 stringify!(cmd_set_shading_rate_image_enable_nv)
9593 ))
9594 }
9595 let val = _f(c"vkCmdSetShadingRateImageEnableNV");
9596 if val.is_null() {
9597 cmd_set_shading_rate_image_enable_nv
9598 } else {
9599 ::core::mem::transmute::<
9600 *const c_void,
9601 PFN_vkCmdSetShadingRateImageEnableNV,
9602 >(val)
9603 }
9604 },
9605 cmd_set_representative_fragment_test_enable_nv: unsafe {
9606 unsafe extern "system" fn cmd_set_representative_fragment_test_enable_nv(
9607 _command_buffer: CommandBuffer,
9608 _representative_fragment_test_enable: Bool32,
9609 ) {
9610 panic!(concat!(
9611 "Unable to load ",
9612 stringify!(cmd_set_representative_fragment_test_enable_nv)
9613 ))
9614 }
9615 let val = _f(c"vkCmdSetRepresentativeFragmentTestEnableNV");
9616 if val.is_null() {
9617 cmd_set_representative_fragment_test_enable_nv
9618 } else {
9619 ::core::mem::transmute::<
9620 *const c_void,
9621 PFN_vkCmdSetRepresentativeFragmentTestEnableNV,
9622 >(val)
9623 }
9624 },
9625 cmd_set_coverage_reduction_mode_nv: unsafe {
9626 unsafe extern "system" fn cmd_set_coverage_reduction_mode_nv(
9627 _command_buffer: CommandBuffer,
9628 _coverage_reduction_mode: CoverageReductionModeNV,
9629 ) {
9630 panic!(concat!(
9631 "Unable to load ",
9632 stringify!(cmd_set_coverage_reduction_mode_nv)
9633 ))
9634 }
9635 let val = _f(c"vkCmdSetCoverageReductionModeNV");
9636 if val.is_null() {
9637 cmd_set_coverage_reduction_mode_nv
9638 } else {
9639 ::core::mem::transmute::<
9640 *const c_void,
9641 PFN_vkCmdSetCoverageReductionModeNV,
9642 >(val)
9643 }
9644 },
9645 cmd_set_depth_clamp_range_ext: unsafe {
9646 unsafe extern "system" fn cmd_set_depth_clamp_range_ext(
9647 _command_buffer: CommandBuffer,
9648 _depth_clamp_mode: DepthClampModeEXT,
9649 _p_depth_clamp_range: *const DepthClampRangeEXT,
9650 ) {
9651 panic!(concat!(
9652 "Unable to load ",
9653 stringify!(cmd_set_depth_clamp_range_ext)
9654 ))
9655 }
9656 let val = _f(c"vkCmdSetDepthClampRangeEXT");
9657 if val.is_null() {
9658 cmd_set_depth_clamp_range_ext
9659 } else {
9660 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthClampRangeEXT>(
9661 val,
9662 )
9663 }
9664 },
9665 }
9666 }
9667 }
9668 }
9669 #[doc = "VK_EXT_mutable_descriptor_type"]
9670 pub mod mutable_descriptor_type {
9671 use super::super::*;
9672 pub use {
9673 crate::vk::EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME as NAME,
9674 crate::vk::EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION as SPEC_VERSION,
9675 };
9676 }
9677 #[doc = "VK_EXT_legacy_vertex_attributes"]
9678 pub mod legacy_vertex_attributes {
9679 use super::super::*;
9680 pub use {
9681 crate::vk::EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME as NAME,
9682 crate::vk::EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION as SPEC_VERSION,
9683 };
9684 }
9685 #[doc = "VK_EXT_layer_settings"]
9686 pub mod layer_settings {
9687 use super::super::*;
9688 pub use {
9689 crate::vk::EXT_LAYER_SETTINGS_EXTENSION_NAME as NAME,
9690 crate::vk::EXT_LAYER_SETTINGS_SPEC_VERSION as SPEC_VERSION,
9691 };
9692 }
9693 #[doc = "VK_EXT_pipeline_library_group_handles"]
9694 pub mod pipeline_library_group_handles {
9695 use super::super::*;
9696 pub use {
9697 crate::vk::EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME as NAME,
9698 crate::vk::EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION as SPEC_VERSION,
9699 };
9700 }
9701 #[doc = "VK_EXT_dynamic_rendering_unused_attachments"]
9702 pub mod dynamic_rendering_unused_attachments {
9703 use super::super::*;
9704 pub use {
9705 crate::vk::EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME as NAME,
9706 crate::vk::EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION as SPEC_VERSION,
9707 };
9708 }
9709 #[doc = "VK_EXT_attachment_feedback_loop_dynamic_state"]
9710 pub mod attachment_feedback_loop_dynamic_state {
9711 use super::super::*;
9712 pub use {
9713 crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME as NAME,
9714 crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION as SPEC_VERSION,
9715 };
9716 #[doc = "VK_EXT_attachment_feedback_loop_dynamic_state device-level functions"]
9717 #[derive(Clone)]
9718 pub struct Device {
9719 pub(crate) fp: DeviceFn,
9720 pub(crate) handle: crate::vk::Device,
9721 }
9722 impl Device {
9723 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
9724 let handle = device.handle();
9725 let fp = DeviceFn::load(|name| unsafe {
9726 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
9727 instance.get_device_proc_addr(handle, name.as_ptr()),
9728 )
9729 });
9730 Self { handle, fp }
9731 }
9732 #[inline]
9733 pub fn fp(&self) -> &DeviceFn {
9734 &self.fp
9735 }
9736 #[inline]
9737 pub fn device(&self) -> crate::vk::Device {
9738 self.handle
9739 }
9740 }
9741 #[derive(Clone)]
9742 #[doc = "Raw VK_EXT_attachment_feedback_loop_dynamic_state device-level function pointers"]
9743 pub struct DeviceFn {
9744 pub cmd_set_attachment_feedback_loop_enable_ext:
9745 PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT,
9746 }
9747 unsafe impl Send for DeviceFn {}
9748 unsafe impl Sync for DeviceFn {}
9749 impl DeviceFn {
9750 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
9751 Self::load_erased(&mut f)
9752 }
9753 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
9754 Self {
9755 cmd_set_attachment_feedback_loop_enable_ext: unsafe {
9756 unsafe extern "system" fn cmd_set_attachment_feedback_loop_enable_ext(
9757 _command_buffer: CommandBuffer,
9758 _aspect_mask: ImageAspectFlags,
9759 ) {
9760 panic!(concat!(
9761 "Unable to load ",
9762 stringify!(cmd_set_attachment_feedback_loop_enable_ext)
9763 ))
9764 }
9765 let val = _f(c"vkCmdSetAttachmentFeedbackLoopEnableEXT");
9766 if val.is_null() {
9767 cmd_set_attachment_feedback_loop_enable_ext
9768 } else {
9769 ::core::mem::transmute::<
9770 *const c_void,
9771 PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT,
9772 >(val)
9773 }
9774 },
9775 }
9776 }
9777 }
9778 }
9779 #[doc = "VK_EXT_memory_decompression"]
9780 pub mod memory_decompression {
9781 use super::super::*;
9782 pub use {
9783 crate::vk::EXT_MEMORY_DECOMPRESSION_EXTENSION_NAME as NAME,
9784 crate::vk::EXT_MEMORY_DECOMPRESSION_SPEC_VERSION as SPEC_VERSION,
9785 };
9786 #[doc = "VK_EXT_memory_decompression device-level functions"]
9787 #[derive(Clone)]
9788 pub struct Device {
9789 pub(crate) fp: DeviceFn,
9790 pub(crate) handle: crate::vk::Device,
9791 }
9792 impl Device {
9793 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
9794 let handle = device.handle();
9795 let fp = DeviceFn::load(|name| unsafe {
9796 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
9797 instance.get_device_proc_addr(handle, name.as_ptr()),
9798 )
9799 });
9800 Self { handle, fp }
9801 }
9802 #[inline]
9803 pub fn fp(&self) -> &DeviceFn {
9804 &self.fp
9805 }
9806 #[inline]
9807 pub fn device(&self) -> crate::vk::Device {
9808 self.handle
9809 }
9810 }
9811 #[derive(Clone)]
9812 #[doc = "Raw VK_EXT_memory_decompression device-level function pointers"]
9813 pub struct DeviceFn {
9814 pub cmd_decompress_memory_ext: PFN_vkCmdDecompressMemoryEXT,
9815 pub cmd_decompress_memory_indirect_count_ext: PFN_vkCmdDecompressMemoryIndirectCountEXT,
9816 }
9817 unsafe impl Send for DeviceFn {}
9818 unsafe impl Sync for DeviceFn {}
9819 impl DeviceFn {
9820 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
9821 Self::load_erased(&mut f)
9822 }
9823 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
9824 Self {
9825 cmd_decompress_memory_ext: unsafe {
9826 unsafe extern "system" fn cmd_decompress_memory_ext(
9827 _command_buffer: CommandBuffer,
9828 _p_decompress_memory_info_ext: *const DecompressMemoryInfoEXT<'_>,
9829 ) {
9830 panic!(concat!(
9831 "Unable to load ",
9832 stringify!(cmd_decompress_memory_ext)
9833 ))
9834 }
9835 let val = _f(c"vkCmdDecompressMemoryEXT");
9836 if val.is_null() {
9837 cmd_decompress_memory_ext
9838 } else {
9839 ::core::mem::transmute::<*const c_void, PFN_vkCmdDecompressMemoryEXT>(
9840 val,
9841 )
9842 }
9843 },
9844 cmd_decompress_memory_indirect_count_ext: unsafe {
9845 unsafe extern "system" fn cmd_decompress_memory_indirect_count_ext(
9846 _command_buffer: CommandBuffer,
9847 _decompression_method: MemoryDecompressionMethodFlagsEXT,
9848 _indirect_commands_address: DeviceAddress,
9849 _indirect_commands_count_address: DeviceAddress,
9850 _max_decompression_count: u32,
9851 _stride: u32,
9852 ) {
9853 panic!(concat!(
9854 "Unable to load ",
9855 stringify!(cmd_decompress_memory_indirect_count_ext)
9856 ))
9857 }
9858 let val = _f(c"vkCmdDecompressMemoryIndirectCountEXT");
9859 if val.is_null() {
9860 cmd_decompress_memory_indirect_count_ext
9861 } else {
9862 ::core::mem::transmute::<
9863 *const c_void,
9864 PFN_vkCmdDecompressMemoryIndirectCountEXT,
9865 >(val)
9866 }
9867 },
9868 }
9869 }
9870 }
9871 }
9872 #[doc = "VK_EXT_shader_replicated_composites"]
9873 pub mod shader_replicated_composites {
9874 use super::super::*;
9875 pub use {
9876 crate::vk::EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME as NAME,
9877 crate::vk::EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION as SPEC_VERSION,
9878 };
9879 }
9880 #[doc = "VK_EXT_shader_float8"]
9881 pub mod shader_float8 {
9882 use super::super::*;
9883 pub use {
9884 crate::vk::EXT_SHADER_FLOAT8_EXTENSION_NAME as NAME,
9885 crate::vk::EXT_SHADER_FLOAT8_SPEC_VERSION as SPEC_VERSION,
9886 };
9887 }
9888 #[doc = "VK_EXT_device_generated_commands"]
9889 pub mod device_generated_commands {
9890 use super::super::*;
9891 pub use {
9892 crate::vk::EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME as NAME,
9893 crate::vk::EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION as SPEC_VERSION,
9894 };
9895 #[doc = "VK_EXT_device_generated_commands device-level functions"]
9896 #[derive(Clone)]
9897 pub struct Device {
9898 pub(crate) fp: DeviceFn,
9899 pub(crate) handle: crate::vk::Device,
9900 }
9901 impl Device {
9902 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
9903 let handle = device.handle();
9904 let fp = DeviceFn::load(|name| unsafe {
9905 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
9906 instance.get_device_proc_addr(handle, name.as_ptr()),
9907 )
9908 });
9909 Self { handle, fp }
9910 }
9911 #[inline]
9912 pub fn fp(&self) -> &DeviceFn {
9913 &self.fp
9914 }
9915 #[inline]
9916 pub fn device(&self) -> crate::vk::Device {
9917 self.handle
9918 }
9919 }
9920 #[derive(Clone)]
9921 #[doc = "Raw VK_EXT_device_generated_commands device-level function pointers"]
9922 pub struct DeviceFn {
9923 pub get_generated_commands_memory_requirements_ext:
9924 PFN_vkGetGeneratedCommandsMemoryRequirementsEXT,
9925 pub cmd_preprocess_generated_commands_ext: PFN_vkCmdPreprocessGeneratedCommandsEXT,
9926 pub cmd_execute_generated_commands_ext: PFN_vkCmdExecuteGeneratedCommandsEXT,
9927 pub create_indirect_commands_layout_ext: PFN_vkCreateIndirectCommandsLayoutEXT,
9928 pub destroy_indirect_commands_layout_ext: PFN_vkDestroyIndirectCommandsLayoutEXT,
9929 pub create_indirect_execution_set_ext: PFN_vkCreateIndirectExecutionSetEXT,
9930 pub destroy_indirect_execution_set_ext: PFN_vkDestroyIndirectExecutionSetEXT,
9931 pub update_indirect_execution_set_pipeline_ext:
9932 PFN_vkUpdateIndirectExecutionSetPipelineEXT,
9933 pub update_indirect_execution_set_shader_ext: PFN_vkUpdateIndirectExecutionSetShaderEXT,
9934 }
9935 unsafe impl Send for DeviceFn {}
9936 unsafe impl Sync for DeviceFn {}
9937 impl DeviceFn {
9938 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
9939 Self::load_erased(&mut f)
9940 }
9941 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
9942 Self {
9943 get_generated_commands_memory_requirements_ext: unsafe {
9944 unsafe extern "system" fn get_generated_commands_memory_requirements_ext(
9945 _device: crate::vk::Device,
9946 _p_info: *const GeneratedCommandsMemoryRequirementsInfoEXT<'_>,
9947 _p_memory_requirements: *mut MemoryRequirements2<'_>,
9948 ) {
9949 panic!(concat!(
9950 "Unable to load ",
9951 stringify!(get_generated_commands_memory_requirements_ext)
9952 ))
9953 }
9954 let val = _f(c"vkGetGeneratedCommandsMemoryRequirementsEXT");
9955 if val.is_null() {
9956 get_generated_commands_memory_requirements_ext
9957 } else {
9958 ::core::mem::transmute::<
9959 *const c_void,
9960 PFN_vkGetGeneratedCommandsMemoryRequirementsEXT,
9961 >(val)
9962 }
9963 },
9964 cmd_preprocess_generated_commands_ext: unsafe {
9965 unsafe extern "system" fn cmd_preprocess_generated_commands_ext(
9966 _command_buffer: CommandBuffer,
9967 _p_generated_commands_info: *const GeneratedCommandsInfoEXT<'_>,
9968 _state_command_buffer: CommandBuffer,
9969 ) {
9970 panic!(concat!(
9971 "Unable to load ",
9972 stringify!(cmd_preprocess_generated_commands_ext)
9973 ))
9974 }
9975 let val = _f(c"vkCmdPreprocessGeneratedCommandsEXT");
9976 if val.is_null() {
9977 cmd_preprocess_generated_commands_ext
9978 } else {
9979 ::core::mem::transmute::<
9980 *const c_void,
9981 PFN_vkCmdPreprocessGeneratedCommandsEXT,
9982 >(val)
9983 }
9984 },
9985 cmd_execute_generated_commands_ext: unsafe {
9986 unsafe extern "system" fn cmd_execute_generated_commands_ext(
9987 _command_buffer: CommandBuffer,
9988 _is_preprocessed: Bool32,
9989 _p_generated_commands_info: *const GeneratedCommandsInfoEXT<'_>,
9990 ) {
9991 panic!(concat!(
9992 "Unable to load ",
9993 stringify!(cmd_execute_generated_commands_ext)
9994 ))
9995 }
9996 let val = _f(c"vkCmdExecuteGeneratedCommandsEXT");
9997 if val.is_null() {
9998 cmd_execute_generated_commands_ext
9999 } else {
10000 ::core::mem::transmute::<
10001 *const c_void,
10002 PFN_vkCmdExecuteGeneratedCommandsEXT,
10003 >(val)
10004 }
10005 },
10006 create_indirect_commands_layout_ext: unsafe {
10007 unsafe extern "system" fn create_indirect_commands_layout_ext(
10008 _device: crate::vk::Device,
10009 _p_create_info: *const IndirectCommandsLayoutCreateInfoEXT<'_>,
10010 _p_allocator: *const AllocationCallbacks<'_>,
10011 _p_indirect_commands_layout: *mut IndirectCommandsLayoutEXT,
10012 ) -> Result {
10013 panic!(concat!(
10014 "Unable to load ",
10015 stringify!(create_indirect_commands_layout_ext)
10016 ))
10017 }
10018 let val = _f(c"vkCreateIndirectCommandsLayoutEXT");
10019 if val.is_null() {
10020 create_indirect_commands_layout_ext
10021 } else {
10022 ::core::mem::transmute::<
10023 *const c_void,
10024 PFN_vkCreateIndirectCommandsLayoutEXT,
10025 >(val)
10026 }
10027 },
10028 destroy_indirect_commands_layout_ext: unsafe {
10029 unsafe extern "system" fn destroy_indirect_commands_layout_ext(
10030 _device: crate::vk::Device,
10031 _indirect_commands_layout: IndirectCommandsLayoutEXT,
10032 _p_allocator: *const AllocationCallbacks<'_>,
10033 ) {
10034 panic!(concat!(
10035 "Unable to load ",
10036 stringify!(destroy_indirect_commands_layout_ext)
10037 ))
10038 }
10039 let val = _f(c"vkDestroyIndirectCommandsLayoutEXT");
10040 if val.is_null() {
10041 destroy_indirect_commands_layout_ext
10042 } else {
10043 ::core::mem::transmute::<
10044 *const c_void,
10045 PFN_vkDestroyIndirectCommandsLayoutEXT,
10046 >(val)
10047 }
10048 },
10049 create_indirect_execution_set_ext: unsafe {
10050 unsafe extern "system" fn create_indirect_execution_set_ext(
10051 _device: crate::vk::Device,
10052 _p_create_info: *const IndirectExecutionSetCreateInfoEXT<'_>,
10053 _p_allocator: *const AllocationCallbacks<'_>,
10054 _p_indirect_execution_set: *mut IndirectExecutionSetEXT,
10055 ) -> Result {
10056 panic!(concat!(
10057 "Unable to load ",
10058 stringify!(create_indirect_execution_set_ext)
10059 ))
10060 }
10061 let val = _f(c"vkCreateIndirectExecutionSetEXT");
10062 if val.is_null() {
10063 create_indirect_execution_set_ext
10064 } else {
10065 ::core::mem::transmute::<
10066 *const c_void,
10067 PFN_vkCreateIndirectExecutionSetEXT,
10068 >(val)
10069 }
10070 },
10071 destroy_indirect_execution_set_ext: unsafe {
10072 unsafe extern "system" fn destroy_indirect_execution_set_ext(
10073 _device: crate::vk::Device,
10074 _indirect_execution_set: IndirectExecutionSetEXT,
10075 _p_allocator: *const AllocationCallbacks<'_>,
10076 ) {
10077 panic!(concat!(
10078 "Unable to load ",
10079 stringify!(destroy_indirect_execution_set_ext)
10080 ))
10081 }
10082 let val = _f(c"vkDestroyIndirectExecutionSetEXT");
10083 if val.is_null() {
10084 destroy_indirect_execution_set_ext
10085 } else {
10086 ::core::mem::transmute::<
10087 *const c_void,
10088 PFN_vkDestroyIndirectExecutionSetEXT,
10089 >(val)
10090 }
10091 },
10092 update_indirect_execution_set_pipeline_ext: unsafe {
10093 unsafe extern "system" fn update_indirect_execution_set_pipeline_ext(
10094 _device: crate::vk::Device,
10095 _indirect_execution_set: IndirectExecutionSetEXT,
10096 _execution_set_write_count: u32,
10097 _p_execution_set_writes: *const WriteIndirectExecutionSetPipelineEXT<
10098 '_,
10099 >,
10100 ) {
10101 panic!(concat!(
10102 "Unable to load ",
10103 stringify!(update_indirect_execution_set_pipeline_ext)
10104 ))
10105 }
10106 let val = _f(c"vkUpdateIndirectExecutionSetPipelineEXT");
10107 if val.is_null() {
10108 update_indirect_execution_set_pipeline_ext
10109 } else {
10110 ::core::mem::transmute::<
10111 *const c_void,
10112 PFN_vkUpdateIndirectExecutionSetPipelineEXT,
10113 >(val)
10114 }
10115 },
10116 update_indirect_execution_set_shader_ext: unsafe {
10117 unsafe extern "system" fn update_indirect_execution_set_shader_ext(
10118 _device: crate::vk::Device,
10119 _indirect_execution_set: IndirectExecutionSetEXT,
10120 _execution_set_write_count: u32,
10121 _p_execution_set_writes: *const WriteIndirectExecutionSetShaderEXT<'_>,
10122 ) {
10123 panic!(concat!(
10124 "Unable to load ",
10125 stringify!(update_indirect_execution_set_shader_ext)
10126 ))
10127 }
10128 let val = _f(c"vkUpdateIndirectExecutionSetShaderEXT");
10129 if val.is_null() {
10130 update_indirect_execution_set_shader_ext
10131 } else {
10132 ::core::mem::transmute::<
10133 *const c_void,
10134 PFN_vkUpdateIndirectExecutionSetShaderEXT,
10135 >(val)
10136 }
10137 },
10138 }
10139 }
10140 }
10141 }
10142 #[doc = "VK_EXT_ray_tracing_invocation_reorder"]
10143 pub mod ray_tracing_invocation_reorder {
10144 use super::super::*;
10145 pub use {
10146 crate::vk::EXT_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME as NAME,
10147 crate::vk::EXT_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION as SPEC_VERSION,
10148 };
10149 }
10150 #[doc = "VK_EXT_depth_clamp_control"]
10151 pub mod depth_clamp_control {
10152 use super::super::*;
10153 pub use {
10154 crate::vk::EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME as NAME,
10155 crate::vk::EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION as SPEC_VERSION,
10156 };
10157 #[doc = "VK_EXT_depth_clamp_control device-level functions"]
10158 #[derive(Clone)]
10159 pub struct Device {
10160 pub(crate) fp: DeviceFn,
10161 pub(crate) handle: crate::vk::Device,
10162 }
10163 impl Device {
10164 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
10165 let handle = device.handle();
10166 let fp = DeviceFn::load(|name| unsafe {
10167 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10168 instance.get_device_proc_addr(handle, name.as_ptr()),
10169 )
10170 });
10171 Self { handle, fp }
10172 }
10173 #[inline]
10174 pub fn fp(&self) -> &DeviceFn {
10175 &self.fp
10176 }
10177 #[inline]
10178 pub fn device(&self) -> crate::vk::Device {
10179 self.handle
10180 }
10181 }
10182 #[derive(Clone)]
10183 #[doc = "Raw VK_EXT_depth_clamp_control device-level function pointers"]
10184 pub struct DeviceFn {
10185 pub cmd_set_depth_clamp_range_ext: PFN_vkCmdSetDepthClampRangeEXT,
10186 }
10187 unsafe impl Send for DeviceFn {}
10188 unsafe impl Sync for DeviceFn {}
10189 impl DeviceFn {
10190 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10191 Self::load_erased(&mut f)
10192 }
10193 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10194 Self {
10195 cmd_set_depth_clamp_range_ext: unsafe {
10196 unsafe extern "system" fn cmd_set_depth_clamp_range_ext(
10197 _command_buffer: CommandBuffer,
10198 _depth_clamp_mode: DepthClampModeEXT,
10199 _p_depth_clamp_range: *const DepthClampRangeEXT,
10200 ) {
10201 panic!(concat!(
10202 "Unable to load ",
10203 stringify!(cmd_set_depth_clamp_range_ext)
10204 ))
10205 }
10206 let val = _f(c"vkCmdSetDepthClampRangeEXT");
10207 if val.is_null() {
10208 cmd_set_depth_clamp_range_ext
10209 } else {
10210 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDepthClampRangeEXT>(
10211 val,
10212 )
10213 }
10214 },
10215 }
10216 }
10217 }
10218 }
10219 #[doc = "VK_EXT_external_memory_metal"]
10220 pub mod external_memory_metal {
10221 use super::super::*;
10222 pub use {
10223 crate::vk::EXT_EXTERNAL_MEMORY_METAL_EXTENSION_NAME as NAME,
10224 crate::vk::EXT_EXTERNAL_MEMORY_METAL_SPEC_VERSION as SPEC_VERSION,
10225 };
10226 #[doc = "VK_EXT_external_memory_metal device-level functions"]
10227 #[derive(Clone)]
10228 pub struct Device {
10229 pub(crate) fp: DeviceFn,
10230 pub(crate) handle: crate::vk::Device,
10231 }
10232 impl Device {
10233 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
10234 let handle = device.handle();
10235 let fp = DeviceFn::load(|name| unsafe {
10236 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10237 instance.get_device_proc_addr(handle, name.as_ptr()),
10238 )
10239 });
10240 Self { handle, fp }
10241 }
10242 #[inline]
10243 pub fn fp(&self) -> &DeviceFn {
10244 &self.fp
10245 }
10246 #[inline]
10247 pub fn device(&self) -> crate::vk::Device {
10248 self.handle
10249 }
10250 }
10251 #[derive(Clone)]
10252 #[doc = "Raw VK_EXT_external_memory_metal device-level function pointers"]
10253 pub struct DeviceFn {
10254 pub get_memory_metal_handle_ext: PFN_vkGetMemoryMetalHandleEXT,
10255 pub get_memory_metal_handle_properties_ext: PFN_vkGetMemoryMetalHandlePropertiesEXT,
10256 }
10257 unsafe impl Send for DeviceFn {}
10258 unsafe impl Sync for DeviceFn {}
10259 impl DeviceFn {
10260 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10261 Self::load_erased(&mut f)
10262 }
10263 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10264 Self {
10265 get_memory_metal_handle_ext: unsafe {
10266 unsafe extern "system" fn get_memory_metal_handle_ext(
10267 _device: crate::vk::Device,
10268 _p_get_metal_handle_info: *const MemoryGetMetalHandleInfoEXT<'_>,
10269 _p_handle: *mut *mut c_void,
10270 ) -> Result {
10271 panic!(concat!(
10272 "Unable to load ",
10273 stringify!(get_memory_metal_handle_ext)
10274 ))
10275 }
10276 let val = _f(c"vkGetMemoryMetalHandleEXT");
10277 if val.is_null() {
10278 get_memory_metal_handle_ext
10279 } else {
10280 ::core::mem::transmute::<*const c_void, PFN_vkGetMemoryMetalHandleEXT>(
10281 val,
10282 )
10283 }
10284 },
10285 get_memory_metal_handle_properties_ext: unsafe {
10286 unsafe extern "system" fn get_memory_metal_handle_properties_ext(
10287 _device: crate::vk::Device,
10288 _handle_type: ExternalMemoryHandleTypeFlags,
10289 _p_handle: *const c_void,
10290 _p_memory_metal_handle_properties: *mut MemoryMetalHandlePropertiesEXT<
10291 '_,
10292 >,
10293 ) -> Result {
10294 panic!(concat!(
10295 "Unable to load ",
10296 stringify!(get_memory_metal_handle_properties_ext)
10297 ))
10298 }
10299 let val = _f(c"vkGetMemoryMetalHandlePropertiesEXT");
10300 if val.is_null() {
10301 get_memory_metal_handle_properties_ext
10302 } else {
10303 ::core::mem::transmute::<
10304 *const c_void,
10305 PFN_vkGetMemoryMetalHandlePropertiesEXT,
10306 >(val)
10307 }
10308 },
10309 }
10310 }
10311 }
10312 }
10313 #[doc = "VK_EXT_vertex_attribute_robustness"]
10314 pub mod vertex_attribute_robustness {
10315 use super::super::*;
10316 pub use {
10317 crate::vk::EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME as NAME,
10318 crate::vk::EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION as SPEC_VERSION,
10319 };
10320 }
10321 #[doc = "VK_EXT_fragment_density_map_offset"]
10322 pub mod fragment_density_map_offset {
10323 use super::super::*;
10324 pub use {
10325 crate::vk::EXT_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME as NAME,
10326 crate::vk::EXT_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION as SPEC_VERSION,
10327 };
10328 #[doc = "VK_EXT_fragment_density_map_offset device-level functions"]
10329 #[derive(Clone)]
10330 pub struct Device {
10331 pub(crate) fp: DeviceFn,
10332 pub(crate) handle: crate::vk::Device,
10333 }
10334 impl Device {
10335 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
10336 let handle = device.handle();
10337 let fp = DeviceFn::load(|name| unsafe {
10338 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10339 instance.get_device_proc_addr(handle, name.as_ptr()),
10340 )
10341 });
10342 Self { handle, fp }
10343 }
10344 #[inline]
10345 pub fn fp(&self) -> &DeviceFn {
10346 &self.fp
10347 }
10348 #[inline]
10349 pub fn device(&self) -> crate::vk::Device {
10350 self.handle
10351 }
10352 }
10353 #[derive(Clone)]
10354 #[doc = "Raw VK_EXT_fragment_density_map_offset device-level function pointers"]
10355 pub struct DeviceFn {
10356 pub cmd_end_rendering2_ext: PFN_vkCmdEndRendering2KHR,
10357 }
10358 unsafe impl Send for DeviceFn {}
10359 unsafe impl Sync for DeviceFn {}
10360 impl DeviceFn {
10361 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10362 Self::load_erased(&mut f)
10363 }
10364 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10365 Self {
10366 cmd_end_rendering2_ext: unsafe {
10367 unsafe extern "system" fn cmd_end_rendering2_ext(
10368 _command_buffer: CommandBuffer,
10369 _p_rendering_end_info: *const RenderingEndInfoKHR<'_>,
10370 ) {
10371 panic!(concat!(
10372 "Unable to load ",
10373 stringify!(cmd_end_rendering2_ext)
10374 ))
10375 }
10376 let val = _f(c"vkCmdEndRendering2EXT");
10377 if val.is_null() {
10378 cmd_end_rendering2_ext
10379 } else {
10380 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndRendering2KHR>(val)
10381 }
10382 },
10383 }
10384 }
10385 }
10386 }
10387 #[doc = "VK_EXT_zero_initialize_device_memory"]
10388 pub mod zero_initialize_device_memory {
10389 use super::super::*;
10390 pub use {
10391 crate::vk::EXT_ZERO_INITIALIZE_DEVICE_MEMORY_EXTENSION_NAME as NAME,
10392 crate::vk::EXT_ZERO_INITIALIZE_DEVICE_MEMORY_SPEC_VERSION as SPEC_VERSION,
10393 };
10394 }
10395 #[doc = "VK_EXT_shader_64bit_indexing"]
10396 pub mod shader_64bit_indexing {
10397 use super::super::*;
10398 pub use {
10399 crate::vk::EXT_SHADER_64BIT_INDEXING_EXTENSION_NAME as NAME,
10400 crate::vk::EXT_SHADER_64BIT_INDEXING_SPEC_VERSION as SPEC_VERSION,
10401 };
10402 }
10403 #[doc = "VK_EXT_custom_resolve"]
10404 pub mod custom_resolve {
10405 use super::super::*;
10406 pub use {
10407 crate::vk::EXT_CUSTOM_RESOLVE_EXTENSION_NAME as NAME,
10408 crate::vk::EXT_CUSTOM_RESOLVE_SPEC_VERSION as SPEC_VERSION,
10409 };
10410 #[doc = "VK_EXT_custom_resolve device-level functions"]
10411 #[derive(Clone)]
10412 pub struct Device {
10413 pub(crate) fp: DeviceFn,
10414 pub(crate) handle: crate::vk::Device,
10415 }
10416 impl Device {
10417 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
10418 let handle = device.handle();
10419 let fp = DeviceFn::load(|name| unsafe {
10420 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10421 instance.get_device_proc_addr(handle, name.as_ptr()),
10422 )
10423 });
10424 Self { handle, fp }
10425 }
10426 #[inline]
10427 pub fn fp(&self) -> &DeviceFn {
10428 &self.fp
10429 }
10430 #[inline]
10431 pub fn device(&self) -> crate::vk::Device {
10432 self.handle
10433 }
10434 }
10435 #[derive(Clone)]
10436 #[doc = "Raw VK_EXT_custom_resolve device-level function pointers"]
10437 pub struct DeviceFn {
10438 pub cmd_begin_custom_resolve_ext: PFN_vkCmdBeginCustomResolveEXT,
10439 }
10440 unsafe impl Send for DeviceFn {}
10441 unsafe impl Sync for DeviceFn {}
10442 impl DeviceFn {
10443 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10444 Self::load_erased(&mut f)
10445 }
10446 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10447 Self {
10448 cmd_begin_custom_resolve_ext: unsafe {
10449 unsafe extern "system" fn cmd_begin_custom_resolve_ext(
10450 _command_buffer: CommandBuffer,
10451 _p_begin_custom_resolve_info: *const BeginCustomResolveInfoEXT<'_>,
10452 ) {
10453 panic!(concat!(
10454 "Unable to load ",
10455 stringify!(cmd_begin_custom_resolve_ext)
10456 ))
10457 }
10458 let val = _f(c"vkCmdBeginCustomResolveEXT");
10459 if val.is_null() {
10460 cmd_begin_custom_resolve_ext
10461 } else {
10462 ::core::mem::transmute::<*const c_void, PFN_vkCmdBeginCustomResolveEXT>(
10463 val,
10464 )
10465 }
10466 },
10467 }
10468 }
10469 }
10470 }
10471 #[doc = "VK_EXT_shader_long_vector"]
10472 pub mod shader_long_vector {
10473 use super::super::*;
10474 pub use {
10475 crate::vk::EXT_SHADER_LONG_VECTOR_EXTENSION_NAME as NAME,
10476 crate::vk::EXT_SHADER_LONG_VECTOR_SPEC_VERSION as SPEC_VERSION,
10477 };
10478 }
10479 #[doc = "VK_EXT_shader_uniform_buffer_unsized_array"]
10480 pub mod shader_uniform_buffer_unsized_array {
10481 use super::super::*;
10482 pub use {
10483 crate::vk::EXT_SHADER_UNIFORM_BUFFER_UNSIZED_ARRAY_EXTENSION_NAME as NAME,
10484 crate::vk::EXT_SHADER_UNIFORM_BUFFER_UNSIZED_ARRAY_SPEC_VERSION as SPEC_VERSION,
10485 };
10486 }
10487 #[doc = "VK_EXT_shader_subgroup_partitioned"]
10488 pub mod shader_subgroup_partitioned {
10489 use super::super::*;
10490 pub use {
10491 crate::vk::EXT_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME as NAME,
10492 crate::vk::EXT_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION as SPEC_VERSION,
10493 };
10494 }
10495}
10496#[doc = "Extensions tagged FUCHSIA"]
10497pub mod fuchsia {
10498 #[doc = "VK_FUCHSIA_imagepipe_surface"]
10499 pub mod imagepipe_surface {
10500 use super::super::*;
10501 pub use {
10502 crate::vk::FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME as NAME,
10503 crate::vk::FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION as SPEC_VERSION,
10504 };
10505 #[doc = "VK_FUCHSIA_imagepipe_surface instance-level functions"]
10506 #[derive(Clone)]
10507 pub struct Instance {
10508 pub(crate) fp: InstanceFn,
10509 pub(crate) handle: crate::vk::Instance,
10510 }
10511 impl Instance {
10512 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
10513 let handle = instance.handle();
10514 let fp = InstanceFn::load(|name| unsafe {
10515 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10516 entry.get_instance_proc_addr(handle, name.as_ptr()),
10517 )
10518 });
10519 Self { handle, fp }
10520 }
10521 #[inline]
10522 pub fn fp(&self) -> &InstanceFn {
10523 &self.fp
10524 }
10525 #[inline]
10526 pub fn instance(&self) -> crate::vk::Instance {
10527 self.handle
10528 }
10529 }
10530 #[derive(Clone)]
10531 #[doc = "Raw VK_FUCHSIA_imagepipe_surface instance-level function pointers"]
10532 pub struct InstanceFn {
10533 pub create_image_pipe_surface_fuchsia: PFN_vkCreateImagePipeSurfaceFUCHSIA,
10534 }
10535 unsafe impl Send for InstanceFn {}
10536 unsafe impl Sync for InstanceFn {}
10537 impl InstanceFn {
10538 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10539 Self::load_erased(&mut f)
10540 }
10541 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10542 Self {
10543 create_image_pipe_surface_fuchsia: unsafe {
10544 unsafe extern "system" fn create_image_pipe_surface_fuchsia(
10545 _instance: crate::vk::Instance,
10546 _p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA<'_>,
10547 _p_allocator: *const AllocationCallbacks<'_>,
10548 _p_surface: *mut SurfaceKHR,
10549 ) -> Result {
10550 panic!(concat!(
10551 "Unable to load ",
10552 stringify!(create_image_pipe_surface_fuchsia)
10553 ))
10554 }
10555 let val = _f(c"vkCreateImagePipeSurfaceFUCHSIA");
10556 if val.is_null() {
10557 create_image_pipe_surface_fuchsia
10558 } else {
10559 ::core::mem::transmute::<
10560 *const c_void,
10561 PFN_vkCreateImagePipeSurfaceFUCHSIA,
10562 >(val)
10563 }
10564 },
10565 }
10566 }
10567 }
10568 }
10569 #[doc = "VK_FUCHSIA_external_memory"]
10570 pub mod external_memory {
10571 use super::super::*;
10572 pub use {
10573 crate::vk::FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME as NAME,
10574 crate::vk::FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION as SPEC_VERSION,
10575 };
10576 #[doc = "VK_FUCHSIA_external_memory device-level functions"]
10577 #[derive(Clone)]
10578 pub struct Device {
10579 pub(crate) fp: DeviceFn,
10580 pub(crate) handle: crate::vk::Device,
10581 }
10582 impl Device {
10583 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
10584 let handle = device.handle();
10585 let fp = DeviceFn::load(|name| unsafe {
10586 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10587 instance.get_device_proc_addr(handle, name.as_ptr()),
10588 )
10589 });
10590 Self { handle, fp }
10591 }
10592 #[inline]
10593 pub fn fp(&self) -> &DeviceFn {
10594 &self.fp
10595 }
10596 #[inline]
10597 pub fn device(&self) -> crate::vk::Device {
10598 self.handle
10599 }
10600 }
10601 #[derive(Clone)]
10602 #[doc = "Raw VK_FUCHSIA_external_memory device-level function pointers"]
10603 pub struct DeviceFn {
10604 pub get_memory_zircon_handle_fuchsia: PFN_vkGetMemoryZirconHandleFUCHSIA,
10605 pub get_memory_zircon_handle_properties_fuchsia:
10606 PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA,
10607 }
10608 unsafe impl Send for DeviceFn {}
10609 unsafe impl Sync for DeviceFn {}
10610 impl DeviceFn {
10611 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10612 Self::load_erased(&mut f)
10613 }
10614 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10615 Self {
10616 get_memory_zircon_handle_fuchsia: unsafe {
10617 unsafe extern "system" fn get_memory_zircon_handle_fuchsia(
10618 _device: crate::vk::Device,
10619 _p_get_zircon_handle_info: *const MemoryGetZirconHandleInfoFUCHSIA<'_>,
10620 _p_zircon_handle: *mut zx_handle_t,
10621 ) -> Result {
10622 panic!(concat!(
10623 "Unable to load ",
10624 stringify!(get_memory_zircon_handle_fuchsia)
10625 ))
10626 }
10627 let val = _f(c"vkGetMemoryZirconHandleFUCHSIA");
10628 if val.is_null() {
10629 get_memory_zircon_handle_fuchsia
10630 } else {
10631 ::core::mem::transmute::<
10632 *const c_void,
10633 PFN_vkGetMemoryZirconHandleFUCHSIA,
10634 >(val)
10635 }
10636 },
10637 get_memory_zircon_handle_properties_fuchsia: unsafe {
10638 unsafe extern "system" fn get_memory_zircon_handle_properties_fuchsia(
10639 _device: crate::vk::Device,
10640 _handle_type: ExternalMemoryHandleTypeFlags,
10641 _zircon_handle: zx_handle_t,
10642 _p_memory_zircon_handle_properties : * mut MemoryZirconHandlePropertiesFUCHSIA < '_ >,
10643 ) -> Result {
10644 panic!(concat!(
10645 "Unable to load ",
10646 stringify!(get_memory_zircon_handle_properties_fuchsia)
10647 ))
10648 }
10649 let val = _f(c"vkGetMemoryZirconHandlePropertiesFUCHSIA");
10650 if val.is_null() {
10651 get_memory_zircon_handle_properties_fuchsia
10652 } else {
10653 ::core::mem::transmute::<
10654 *const c_void,
10655 PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA,
10656 >(val)
10657 }
10658 },
10659 }
10660 }
10661 }
10662 }
10663 #[doc = "VK_FUCHSIA_external_semaphore"]
10664 pub mod external_semaphore {
10665 use super::super::*;
10666 pub use {
10667 crate::vk::FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME as NAME,
10668 crate::vk::FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION as SPEC_VERSION,
10669 };
10670 #[doc = "VK_FUCHSIA_external_semaphore device-level functions"]
10671 #[derive(Clone)]
10672 pub struct Device {
10673 pub(crate) fp: DeviceFn,
10674 pub(crate) handle: crate::vk::Device,
10675 }
10676 impl Device {
10677 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
10678 let handle = device.handle();
10679 let fp = DeviceFn::load(|name| unsafe {
10680 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10681 instance.get_device_proc_addr(handle, name.as_ptr()),
10682 )
10683 });
10684 Self { handle, fp }
10685 }
10686 #[inline]
10687 pub fn fp(&self) -> &DeviceFn {
10688 &self.fp
10689 }
10690 #[inline]
10691 pub fn device(&self) -> crate::vk::Device {
10692 self.handle
10693 }
10694 }
10695 #[derive(Clone)]
10696 #[doc = "Raw VK_FUCHSIA_external_semaphore device-level function pointers"]
10697 pub struct DeviceFn {
10698 pub import_semaphore_zircon_handle_fuchsia: PFN_vkImportSemaphoreZirconHandleFUCHSIA,
10699 pub get_semaphore_zircon_handle_fuchsia: PFN_vkGetSemaphoreZirconHandleFUCHSIA,
10700 }
10701 unsafe impl Send for DeviceFn {}
10702 unsafe impl Sync for DeviceFn {}
10703 impl DeviceFn {
10704 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10705 Self::load_erased(&mut f)
10706 }
10707 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10708 Self {
10709 import_semaphore_zircon_handle_fuchsia: unsafe {
10710 unsafe extern "system" fn import_semaphore_zircon_handle_fuchsia(
10711 _device: crate::vk::Device,
10712 _p_import_semaphore_zircon_handle_info : * const ImportSemaphoreZirconHandleInfoFUCHSIA < '_ >,
10713 ) -> Result {
10714 panic!(concat!(
10715 "Unable to load ",
10716 stringify!(import_semaphore_zircon_handle_fuchsia)
10717 ))
10718 }
10719 let val = _f(c"vkImportSemaphoreZirconHandleFUCHSIA");
10720 if val.is_null() {
10721 import_semaphore_zircon_handle_fuchsia
10722 } else {
10723 ::core::mem::transmute::<
10724 *const c_void,
10725 PFN_vkImportSemaphoreZirconHandleFUCHSIA,
10726 >(val)
10727 }
10728 },
10729 get_semaphore_zircon_handle_fuchsia: unsafe {
10730 unsafe extern "system" fn get_semaphore_zircon_handle_fuchsia(
10731 _device: crate::vk::Device,
10732 _p_get_zircon_handle_info: *const SemaphoreGetZirconHandleInfoFUCHSIA<
10733 '_,
10734 >,
10735 _p_zircon_handle: *mut zx_handle_t,
10736 ) -> Result {
10737 panic!(concat!(
10738 "Unable to load ",
10739 stringify!(get_semaphore_zircon_handle_fuchsia)
10740 ))
10741 }
10742 let val = _f(c"vkGetSemaphoreZirconHandleFUCHSIA");
10743 if val.is_null() {
10744 get_semaphore_zircon_handle_fuchsia
10745 } else {
10746 ::core::mem::transmute::<
10747 *const c_void,
10748 PFN_vkGetSemaphoreZirconHandleFUCHSIA,
10749 >(val)
10750 }
10751 },
10752 }
10753 }
10754 }
10755 }
10756 #[doc = "VK_FUCHSIA_buffer_collection"]
10757 pub mod buffer_collection {
10758 use super::super::*;
10759 pub use {
10760 crate::vk::FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME as NAME,
10761 crate::vk::FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION as SPEC_VERSION,
10762 };
10763 #[doc = "VK_FUCHSIA_buffer_collection device-level functions"]
10764 #[derive(Clone)]
10765 pub struct Device {
10766 pub(crate) fp: DeviceFn,
10767 pub(crate) handle: crate::vk::Device,
10768 }
10769 impl Device {
10770 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
10771 let handle = device.handle();
10772 let fp = DeviceFn::load(|name| unsafe {
10773 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10774 instance.get_device_proc_addr(handle, name.as_ptr()),
10775 )
10776 });
10777 Self { handle, fp }
10778 }
10779 #[inline]
10780 pub fn fp(&self) -> &DeviceFn {
10781 &self.fp
10782 }
10783 #[inline]
10784 pub fn device(&self) -> crate::vk::Device {
10785 self.handle
10786 }
10787 }
10788 #[derive(Clone)]
10789 #[doc = "Raw VK_FUCHSIA_buffer_collection device-level function pointers"]
10790 pub struct DeviceFn {
10791 pub create_buffer_collection_fuchsia: PFN_vkCreateBufferCollectionFUCHSIA,
10792 pub set_buffer_collection_image_constraints_fuchsia:
10793 PFN_vkSetBufferCollectionImageConstraintsFUCHSIA,
10794 pub set_buffer_collection_buffer_constraints_fuchsia:
10795 PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA,
10796 pub destroy_buffer_collection_fuchsia: PFN_vkDestroyBufferCollectionFUCHSIA,
10797 pub get_buffer_collection_properties_fuchsia:
10798 PFN_vkGetBufferCollectionPropertiesFUCHSIA,
10799 }
10800 unsafe impl Send for DeviceFn {}
10801 unsafe impl Sync for DeviceFn {}
10802 impl DeviceFn {
10803 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10804 Self::load_erased(&mut f)
10805 }
10806 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10807 Self {
10808 create_buffer_collection_fuchsia: unsafe {
10809 unsafe extern "system" fn create_buffer_collection_fuchsia(
10810 _device: crate::vk::Device,
10811 _p_create_info: *const BufferCollectionCreateInfoFUCHSIA<'_>,
10812 _p_allocator: *const AllocationCallbacks<'_>,
10813 _p_collection: *mut BufferCollectionFUCHSIA,
10814 ) -> Result {
10815 panic!(concat!(
10816 "Unable to load ",
10817 stringify!(create_buffer_collection_fuchsia)
10818 ))
10819 }
10820 let val = _f(c"vkCreateBufferCollectionFUCHSIA");
10821 if val.is_null() {
10822 create_buffer_collection_fuchsia
10823 } else {
10824 ::core::mem::transmute::<
10825 *const c_void,
10826 PFN_vkCreateBufferCollectionFUCHSIA,
10827 >(val)
10828 }
10829 },
10830 set_buffer_collection_image_constraints_fuchsia: unsafe {
10831 unsafe extern "system" fn set_buffer_collection_image_constraints_fuchsia(
10832 _device: crate::vk::Device,
10833 _collection: BufferCollectionFUCHSIA,
10834 _p_image_constraints_info: *const ImageConstraintsInfoFUCHSIA<'_>,
10835 ) -> Result {
10836 panic!(concat!(
10837 "Unable to load ",
10838 stringify!(set_buffer_collection_image_constraints_fuchsia)
10839 ))
10840 }
10841 let val = _f(c"vkSetBufferCollectionImageConstraintsFUCHSIA");
10842 if val.is_null() {
10843 set_buffer_collection_image_constraints_fuchsia
10844 } else {
10845 ::core::mem::transmute::<
10846 *const c_void,
10847 PFN_vkSetBufferCollectionImageConstraintsFUCHSIA,
10848 >(val)
10849 }
10850 },
10851 set_buffer_collection_buffer_constraints_fuchsia: unsafe {
10852 unsafe extern "system" fn set_buffer_collection_buffer_constraints_fuchsia(
10853 _device: crate::vk::Device,
10854 _collection: BufferCollectionFUCHSIA,
10855 _p_buffer_constraints_info: *const BufferConstraintsInfoFUCHSIA<'_>,
10856 ) -> Result {
10857 panic!(concat!(
10858 "Unable to load ",
10859 stringify!(set_buffer_collection_buffer_constraints_fuchsia)
10860 ))
10861 }
10862 let val = _f(c"vkSetBufferCollectionBufferConstraintsFUCHSIA");
10863 if val.is_null() {
10864 set_buffer_collection_buffer_constraints_fuchsia
10865 } else {
10866 ::core::mem::transmute::<
10867 *const c_void,
10868 PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA,
10869 >(val)
10870 }
10871 },
10872 destroy_buffer_collection_fuchsia: unsafe {
10873 unsafe extern "system" fn destroy_buffer_collection_fuchsia(
10874 _device: crate::vk::Device,
10875 _collection: BufferCollectionFUCHSIA,
10876 _p_allocator: *const AllocationCallbacks<'_>,
10877 ) {
10878 panic!(concat!(
10879 "Unable to load ",
10880 stringify!(destroy_buffer_collection_fuchsia)
10881 ))
10882 }
10883 let val = _f(c"vkDestroyBufferCollectionFUCHSIA");
10884 if val.is_null() {
10885 destroy_buffer_collection_fuchsia
10886 } else {
10887 ::core::mem::transmute::<
10888 *const c_void,
10889 PFN_vkDestroyBufferCollectionFUCHSIA,
10890 >(val)
10891 }
10892 },
10893 get_buffer_collection_properties_fuchsia: unsafe {
10894 unsafe extern "system" fn get_buffer_collection_properties_fuchsia(
10895 _device: crate::vk::Device,
10896 _collection: BufferCollectionFUCHSIA,
10897 _p_properties: *mut BufferCollectionPropertiesFUCHSIA<'_>,
10898 ) -> Result {
10899 panic!(concat!(
10900 "Unable to load ",
10901 stringify!(get_buffer_collection_properties_fuchsia)
10902 ))
10903 }
10904 let val = _f(c"vkGetBufferCollectionPropertiesFUCHSIA");
10905 if val.is_null() {
10906 get_buffer_collection_properties_fuchsia
10907 } else {
10908 ::core::mem::transmute::<
10909 *const c_void,
10910 PFN_vkGetBufferCollectionPropertiesFUCHSIA,
10911 >(val)
10912 }
10913 },
10914 }
10915 }
10916 }
10917 }
10918}
10919#[doc = "Extensions tagged GGP"]
10920pub mod ggp {
10921 #[doc = "VK_GGP_stream_descriptor_surface"]
10922 pub mod stream_descriptor_surface {
10923 use super::super::*;
10924 pub use {
10925 crate::vk::GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME as NAME,
10926 crate::vk::GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION as SPEC_VERSION,
10927 };
10928 #[doc = "VK_GGP_stream_descriptor_surface instance-level functions"]
10929 #[derive(Clone)]
10930 pub struct Instance {
10931 pub(crate) fp: InstanceFn,
10932 pub(crate) handle: crate::vk::Instance,
10933 }
10934 impl Instance {
10935 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
10936 let handle = instance.handle();
10937 let fp = InstanceFn::load(|name| unsafe {
10938 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
10939 entry.get_instance_proc_addr(handle, name.as_ptr()),
10940 )
10941 });
10942 Self { handle, fp }
10943 }
10944 #[inline]
10945 pub fn fp(&self) -> &InstanceFn {
10946 &self.fp
10947 }
10948 #[inline]
10949 pub fn instance(&self) -> crate::vk::Instance {
10950 self.handle
10951 }
10952 }
10953 #[derive(Clone)]
10954 #[doc = "Raw VK_GGP_stream_descriptor_surface instance-level function pointers"]
10955 pub struct InstanceFn {
10956 pub create_stream_descriptor_surface_ggp: PFN_vkCreateStreamDescriptorSurfaceGGP,
10957 }
10958 unsafe impl Send for InstanceFn {}
10959 unsafe impl Sync for InstanceFn {}
10960 impl InstanceFn {
10961 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
10962 Self::load_erased(&mut f)
10963 }
10964 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
10965 Self {
10966 create_stream_descriptor_surface_ggp: unsafe {
10967 unsafe extern "system" fn create_stream_descriptor_surface_ggp(
10968 _instance: crate::vk::Instance,
10969 _p_create_info: *const StreamDescriptorSurfaceCreateInfoGGP<'_>,
10970 _p_allocator: *const AllocationCallbacks<'_>,
10971 _p_surface: *mut SurfaceKHR,
10972 ) -> Result {
10973 panic!(concat!(
10974 "Unable to load ",
10975 stringify!(create_stream_descriptor_surface_ggp)
10976 ))
10977 }
10978 let val = _f(c"vkCreateStreamDescriptorSurfaceGGP");
10979 if val.is_null() {
10980 create_stream_descriptor_surface_ggp
10981 } else {
10982 ::core::mem::transmute::<
10983 *const c_void,
10984 PFN_vkCreateStreamDescriptorSurfaceGGP,
10985 >(val)
10986 }
10987 },
10988 }
10989 }
10990 }
10991 }
10992 #[doc = "VK_GGP_frame_token"]
10993 pub mod frame_token {
10994 use super::super::*;
10995 pub use {
10996 crate::vk::GGP_FRAME_TOKEN_EXTENSION_NAME as NAME,
10997 crate::vk::GGP_FRAME_TOKEN_SPEC_VERSION as SPEC_VERSION,
10998 };
10999 }
11000}
11001#[doc = "Extensions tagged GOOGLE"]
11002pub mod google {
11003 #[doc = "VK_GOOGLE_display_timing"]
11004 pub mod display_timing {
11005 use super::super::*;
11006 pub use {
11007 crate::vk::GOOGLE_DISPLAY_TIMING_EXTENSION_NAME as NAME,
11008 crate::vk::GOOGLE_DISPLAY_TIMING_SPEC_VERSION as SPEC_VERSION,
11009 };
11010 #[doc = "VK_GOOGLE_display_timing device-level functions"]
11011 #[derive(Clone)]
11012 pub struct Device {
11013 pub(crate) fp: DeviceFn,
11014 pub(crate) handle: crate::vk::Device,
11015 }
11016 impl Device {
11017 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
11018 let handle = device.handle();
11019 let fp = DeviceFn::load(|name| unsafe {
11020 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11021 instance.get_device_proc_addr(handle, name.as_ptr()),
11022 )
11023 });
11024 Self { handle, fp }
11025 }
11026 #[inline]
11027 pub fn fp(&self) -> &DeviceFn {
11028 &self.fp
11029 }
11030 #[inline]
11031 pub fn device(&self) -> crate::vk::Device {
11032 self.handle
11033 }
11034 }
11035 #[derive(Clone)]
11036 #[doc = "Raw VK_GOOGLE_display_timing device-level function pointers"]
11037 pub struct DeviceFn {
11038 pub get_refresh_cycle_duration_google: PFN_vkGetRefreshCycleDurationGOOGLE,
11039 pub get_past_presentation_timing_google: PFN_vkGetPastPresentationTimingGOOGLE,
11040 }
11041 unsafe impl Send for DeviceFn {}
11042 unsafe impl Sync for DeviceFn {}
11043 impl DeviceFn {
11044 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11045 Self::load_erased(&mut f)
11046 }
11047 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11048 Self {
11049 get_refresh_cycle_duration_google: unsafe {
11050 unsafe extern "system" fn get_refresh_cycle_duration_google(
11051 _device: crate::vk::Device,
11052 _swapchain: SwapchainKHR,
11053 _p_display_timing_properties: *mut RefreshCycleDurationGOOGLE,
11054 ) -> Result {
11055 panic!(concat!(
11056 "Unable to load ",
11057 stringify!(get_refresh_cycle_duration_google)
11058 ))
11059 }
11060 let val = _f(c"vkGetRefreshCycleDurationGOOGLE");
11061 if val.is_null() {
11062 get_refresh_cycle_duration_google
11063 } else {
11064 ::core::mem::transmute::<
11065 *const c_void,
11066 PFN_vkGetRefreshCycleDurationGOOGLE,
11067 >(val)
11068 }
11069 },
11070 get_past_presentation_timing_google: unsafe {
11071 unsafe extern "system" fn get_past_presentation_timing_google(
11072 _device: crate::vk::Device,
11073 _swapchain: SwapchainKHR,
11074 _p_presentation_timing_count: *mut u32,
11075 _p_presentation_timings: *mut PastPresentationTimingGOOGLE,
11076 ) -> Result {
11077 panic!(concat!(
11078 "Unable to load ",
11079 stringify!(get_past_presentation_timing_google)
11080 ))
11081 }
11082 let val = _f(c"vkGetPastPresentationTimingGOOGLE");
11083 if val.is_null() {
11084 get_past_presentation_timing_google
11085 } else {
11086 ::core::mem::transmute::<
11087 *const c_void,
11088 PFN_vkGetPastPresentationTimingGOOGLE,
11089 >(val)
11090 }
11091 },
11092 }
11093 }
11094 }
11095 }
11096 #[doc = "VK_GOOGLE_hlsl_functionality1"]
11097 pub mod hlsl_functionality1 {
11098 use super::super::*;
11099 pub use {
11100 crate::vk::GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME as NAME,
11101 crate::vk::GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION as SPEC_VERSION,
11102 };
11103 }
11104 #[doc = "VK_GOOGLE_decorate_string"]
11105 pub mod decorate_string {
11106 use super::super::*;
11107 pub use {
11108 crate::vk::GOOGLE_DECORATE_STRING_EXTENSION_NAME as NAME,
11109 crate::vk::GOOGLE_DECORATE_STRING_SPEC_VERSION as SPEC_VERSION,
11110 };
11111 }
11112 #[doc = "VK_GOOGLE_user_type"]
11113 pub mod user_type {
11114 use super::super::*;
11115 pub use {
11116 crate::vk::GOOGLE_USER_TYPE_EXTENSION_NAME as NAME,
11117 crate::vk::GOOGLE_USER_TYPE_SPEC_VERSION as SPEC_VERSION,
11118 };
11119 }
11120 #[doc = "VK_GOOGLE_surfaceless_query"]
11121 pub mod surfaceless_query {
11122 use super::super::*;
11123 pub use {
11124 crate::vk::GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME as NAME,
11125 crate::vk::GOOGLE_SURFACELESS_QUERY_SPEC_VERSION as SPEC_VERSION,
11126 };
11127 }
11128}
11129#[doc = "Extensions tagged HUAWEI"]
11130pub mod huawei {
11131 #[doc = "VK_HUAWEI_subpass_shading"]
11132 pub mod subpass_shading {
11133 use super::super::*;
11134 pub use {
11135 crate::vk::HUAWEI_SUBPASS_SHADING_EXTENSION_NAME as NAME,
11136 crate::vk::HUAWEI_SUBPASS_SHADING_SPEC_VERSION as SPEC_VERSION,
11137 };
11138 #[doc = "VK_HUAWEI_subpass_shading device-level functions"]
11139 #[derive(Clone)]
11140 pub struct Device {
11141 pub(crate) fp: DeviceFn,
11142 pub(crate) handle: crate::vk::Device,
11143 }
11144 impl Device {
11145 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
11146 let handle = device.handle();
11147 let fp = DeviceFn::load(|name| unsafe {
11148 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11149 instance.get_device_proc_addr(handle, name.as_ptr()),
11150 )
11151 });
11152 Self { handle, fp }
11153 }
11154 #[inline]
11155 pub fn fp(&self) -> &DeviceFn {
11156 &self.fp
11157 }
11158 #[inline]
11159 pub fn device(&self) -> crate::vk::Device {
11160 self.handle
11161 }
11162 }
11163 #[derive(Clone)]
11164 #[doc = "Raw VK_HUAWEI_subpass_shading device-level function pointers"]
11165 pub struct DeviceFn {
11166 pub get_device_subpass_shading_max_workgroup_size_huawei:
11167 PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI,
11168 pub cmd_subpass_shading_huawei: PFN_vkCmdSubpassShadingHUAWEI,
11169 }
11170 unsafe impl Send for DeviceFn {}
11171 unsafe impl Sync for DeviceFn {}
11172 impl DeviceFn {
11173 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11174 Self::load_erased(&mut f)
11175 }
11176 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11177 Self {
11178 get_device_subpass_shading_max_workgroup_size_huawei: unsafe {
11179 unsafe extern "system" fn get_device_subpass_shading_max_workgroup_size_huawei(
11180 _device: crate::vk::Device,
11181 _renderpass: RenderPass,
11182 _p_max_workgroup_size: *mut Extent2D,
11183 ) -> Result {
11184 panic!(concat!(
11185 "Unable to load ",
11186 stringify!(get_device_subpass_shading_max_workgroup_size_huawei)
11187 ))
11188 }
11189 let val = _f(c"vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI");
11190 if val.is_null() {
11191 get_device_subpass_shading_max_workgroup_size_huawei
11192 } else {
11193 ::core::mem::transmute::<
11194 *const c_void,
11195 PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI,
11196 >(val)
11197 }
11198 },
11199 cmd_subpass_shading_huawei: unsafe {
11200 unsafe extern "system" fn cmd_subpass_shading_huawei(
11201 _command_buffer: CommandBuffer,
11202 ) {
11203 panic!(concat!(
11204 "Unable to load ",
11205 stringify!(cmd_subpass_shading_huawei)
11206 ))
11207 }
11208 let val = _f(c"vkCmdSubpassShadingHUAWEI");
11209 if val.is_null() {
11210 cmd_subpass_shading_huawei
11211 } else {
11212 ::core::mem::transmute::<*const c_void, PFN_vkCmdSubpassShadingHUAWEI>(
11213 val,
11214 )
11215 }
11216 },
11217 }
11218 }
11219 }
11220 }
11221 #[doc = "VK_HUAWEI_invocation_mask"]
11222 pub mod invocation_mask {
11223 use super::super::*;
11224 pub use {
11225 crate::vk::HUAWEI_INVOCATION_MASK_EXTENSION_NAME as NAME,
11226 crate::vk::HUAWEI_INVOCATION_MASK_SPEC_VERSION as SPEC_VERSION,
11227 };
11228 #[doc = "VK_HUAWEI_invocation_mask device-level functions"]
11229 #[derive(Clone)]
11230 pub struct Device {
11231 pub(crate) fp: DeviceFn,
11232 pub(crate) handle: crate::vk::Device,
11233 }
11234 impl Device {
11235 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
11236 let handle = device.handle();
11237 let fp = DeviceFn::load(|name| unsafe {
11238 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11239 instance.get_device_proc_addr(handle, name.as_ptr()),
11240 )
11241 });
11242 Self { handle, fp }
11243 }
11244 #[inline]
11245 pub fn fp(&self) -> &DeviceFn {
11246 &self.fp
11247 }
11248 #[inline]
11249 pub fn device(&self) -> crate::vk::Device {
11250 self.handle
11251 }
11252 }
11253 #[derive(Clone)]
11254 #[doc = "Raw VK_HUAWEI_invocation_mask device-level function pointers"]
11255 pub struct DeviceFn {
11256 pub cmd_bind_invocation_mask_huawei: PFN_vkCmdBindInvocationMaskHUAWEI,
11257 }
11258 unsafe impl Send for DeviceFn {}
11259 unsafe impl Sync for DeviceFn {}
11260 impl DeviceFn {
11261 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11262 Self::load_erased(&mut f)
11263 }
11264 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11265 Self {
11266 cmd_bind_invocation_mask_huawei: unsafe {
11267 unsafe extern "system" fn cmd_bind_invocation_mask_huawei(
11268 _command_buffer: CommandBuffer,
11269 _image_view: ImageView,
11270 _image_layout: ImageLayout,
11271 ) {
11272 panic!(concat!(
11273 "Unable to load ",
11274 stringify!(cmd_bind_invocation_mask_huawei)
11275 ))
11276 }
11277 let val = _f(c"vkCmdBindInvocationMaskHUAWEI");
11278 if val.is_null() {
11279 cmd_bind_invocation_mask_huawei
11280 } else {
11281 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindInvocationMaskHUAWEI>(
11282 val,
11283 )
11284 }
11285 },
11286 }
11287 }
11288 }
11289 }
11290 #[doc = "VK_HUAWEI_cluster_culling_shader"]
11291 pub mod cluster_culling_shader {
11292 use super::super::*;
11293 pub use {
11294 crate::vk::HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME as NAME,
11295 crate::vk::HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION as SPEC_VERSION,
11296 };
11297 #[doc = "VK_HUAWEI_cluster_culling_shader device-level functions"]
11298 #[derive(Clone)]
11299 pub struct Device {
11300 pub(crate) fp: DeviceFn,
11301 pub(crate) handle: crate::vk::Device,
11302 }
11303 impl Device {
11304 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
11305 let handle = device.handle();
11306 let fp = DeviceFn::load(|name| unsafe {
11307 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11308 instance.get_device_proc_addr(handle, name.as_ptr()),
11309 )
11310 });
11311 Self { handle, fp }
11312 }
11313 #[inline]
11314 pub fn fp(&self) -> &DeviceFn {
11315 &self.fp
11316 }
11317 #[inline]
11318 pub fn device(&self) -> crate::vk::Device {
11319 self.handle
11320 }
11321 }
11322 #[derive(Clone)]
11323 #[doc = "Raw VK_HUAWEI_cluster_culling_shader device-level function pointers"]
11324 pub struct DeviceFn {
11325 pub cmd_draw_cluster_huawei: PFN_vkCmdDrawClusterHUAWEI,
11326 pub cmd_draw_cluster_indirect_huawei: PFN_vkCmdDrawClusterIndirectHUAWEI,
11327 }
11328 unsafe impl Send for DeviceFn {}
11329 unsafe impl Sync for DeviceFn {}
11330 impl DeviceFn {
11331 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11332 Self::load_erased(&mut f)
11333 }
11334 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11335 Self {
11336 cmd_draw_cluster_huawei: unsafe {
11337 unsafe extern "system" fn cmd_draw_cluster_huawei(
11338 _command_buffer: CommandBuffer,
11339 _group_count_x: u32,
11340 _group_count_y: u32,
11341 _group_count_z: u32,
11342 ) {
11343 panic!(concat!(
11344 "Unable to load ",
11345 stringify!(cmd_draw_cluster_huawei)
11346 ))
11347 }
11348 let val = _f(c"vkCmdDrawClusterHUAWEI");
11349 if val.is_null() {
11350 cmd_draw_cluster_huawei
11351 } else {
11352 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawClusterHUAWEI>(val)
11353 }
11354 },
11355 cmd_draw_cluster_indirect_huawei: unsafe {
11356 unsafe extern "system" fn cmd_draw_cluster_indirect_huawei(
11357 _command_buffer: CommandBuffer,
11358 _buffer: Buffer,
11359 _offset: DeviceSize,
11360 ) {
11361 panic!(concat!(
11362 "Unable to load ",
11363 stringify!(cmd_draw_cluster_indirect_huawei)
11364 ))
11365 }
11366 let val = _f(c"vkCmdDrawClusterIndirectHUAWEI");
11367 if val.is_null() {
11368 cmd_draw_cluster_indirect_huawei
11369 } else {
11370 ::core::mem::transmute::<
11371 *const c_void,
11372 PFN_vkCmdDrawClusterIndirectHUAWEI,
11373 >(val)
11374 }
11375 },
11376 }
11377 }
11378 }
11379 }
11380 #[doc = "VK_HUAWEI_hdr_vivid"]
11381 pub mod hdr_vivid {
11382 use super::super::*;
11383 pub use {
11384 crate::vk::HUAWEI_HDR_VIVID_EXTENSION_NAME as NAME,
11385 crate::vk::HUAWEI_HDR_VIVID_SPEC_VERSION as SPEC_VERSION,
11386 };
11387 }
11388}
11389#[doc = "Extensions tagged IMG"]
11390pub mod img {
11391 #[doc = "VK_IMG_filter_cubic"]
11392 pub mod filter_cubic {
11393 use super::super::*;
11394 pub use {
11395 crate::vk::IMG_FILTER_CUBIC_EXTENSION_NAME as NAME,
11396 crate::vk::IMG_FILTER_CUBIC_SPEC_VERSION as SPEC_VERSION,
11397 };
11398 }
11399 #[doc = "VK_IMG_format_pvrtc"]
11400 pub mod format_pvrtc {
11401 use super::super::*;
11402 pub use {
11403 crate::vk::IMG_FORMAT_PVRTC_EXTENSION_NAME as NAME,
11404 crate::vk::IMG_FORMAT_PVRTC_SPEC_VERSION as SPEC_VERSION,
11405 };
11406 }
11407 #[doc = "VK_IMG_relaxed_line_rasterization"]
11408 pub mod relaxed_line_rasterization {
11409 use super::super::*;
11410 pub use {
11411 crate::vk::IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME as NAME,
11412 crate::vk::IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION,
11413 };
11414 }
11415}
11416#[doc = "Extensions tagged INTEL"]
11417pub mod intel {
11418 #[doc = "VK_INTEL_shader_integer_functions2"]
11419 pub mod shader_integer_functions2 {
11420 use super::super::*;
11421 pub use {
11422 crate::vk::INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME as NAME,
11423 crate::vk::INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION as SPEC_VERSION,
11424 };
11425 }
11426 #[doc = "VK_INTEL_performance_query"]
11427 pub mod performance_query {
11428 use super::super::*;
11429 pub use {
11430 crate::vk::INTEL_PERFORMANCE_QUERY_EXTENSION_NAME as NAME,
11431 crate::vk::INTEL_PERFORMANCE_QUERY_SPEC_VERSION as SPEC_VERSION,
11432 };
11433 #[doc = "VK_INTEL_performance_query device-level functions"]
11434 #[derive(Clone)]
11435 pub struct Device {
11436 pub(crate) fp: DeviceFn,
11437 pub(crate) handle: crate::vk::Device,
11438 }
11439 impl Device {
11440 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
11441 let handle = device.handle();
11442 let fp = DeviceFn::load(|name| unsafe {
11443 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11444 instance.get_device_proc_addr(handle, name.as_ptr()),
11445 )
11446 });
11447 Self { handle, fp }
11448 }
11449 #[inline]
11450 pub fn fp(&self) -> &DeviceFn {
11451 &self.fp
11452 }
11453 #[inline]
11454 pub fn device(&self) -> crate::vk::Device {
11455 self.handle
11456 }
11457 }
11458 #[derive(Clone)]
11459 #[doc = "Raw VK_INTEL_performance_query device-level function pointers"]
11460 pub struct DeviceFn {
11461 pub initialize_performance_api_intel: PFN_vkInitializePerformanceApiINTEL,
11462 pub uninitialize_performance_api_intel: PFN_vkUninitializePerformanceApiINTEL,
11463 pub cmd_set_performance_marker_intel: PFN_vkCmdSetPerformanceMarkerINTEL,
11464 pub cmd_set_performance_stream_marker_intel: PFN_vkCmdSetPerformanceStreamMarkerINTEL,
11465 pub cmd_set_performance_override_intel: PFN_vkCmdSetPerformanceOverrideINTEL,
11466 pub acquire_performance_configuration_intel: PFN_vkAcquirePerformanceConfigurationINTEL,
11467 pub release_performance_configuration_intel: PFN_vkReleasePerformanceConfigurationINTEL,
11468 pub queue_set_performance_configuration_intel:
11469 PFN_vkQueueSetPerformanceConfigurationINTEL,
11470 pub get_performance_parameter_intel: PFN_vkGetPerformanceParameterINTEL,
11471 }
11472 unsafe impl Send for DeviceFn {}
11473 unsafe impl Sync for DeviceFn {}
11474 impl DeviceFn {
11475 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11476 Self::load_erased(&mut f)
11477 }
11478 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11479 Self {
11480 initialize_performance_api_intel: unsafe {
11481 unsafe extern "system" fn initialize_performance_api_intel(
11482 _device: crate::vk::Device,
11483 _p_initialize_info: *const InitializePerformanceApiInfoINTEL<'_>,
11484 ) -> Result {
11485 panic!(concat!(
11486 "Unable to load ",
11487 stringify!(initialize_performance_api_intel)
11488 ))
11489 }
11490 let val = _f(c"vkInitializePerformanceApiINTEL");
11491 if val.is_null() {
11492 initialize_performance_api_intel
11493 } else {
11494 ::core::mem::transmute::<
11495 *const c_void,
11496 PFN_vkInitializePerformanceApiINTEL,
11497 >(val)
11498 }
11499 },
11500 uninitialize_performance_api_intel: unsafe {
11501 unsafe extern "system" fn uninitialize_performance_api_intel(
11502 _device: crate::vk::Device,
11503 ) {
11504 panic!(concat!(
11505 "Unable to load ",
11506 stringify!(uninitialize_performance_api_intel)
11507 ))
11508 }
11509 let val = _f(c"vkUninitializePerformanceApiINTEL");
11510 if val.is_null() {
11511 uninitialize_performance_api_intel
11512 } else {
11513 ::core::mem::transmute::<
11514 *const c_void,
11515 PFN_vkUninitializePerformanceApiINTEL,
11516 >(val)
11517 }
11518 },
11519 cmd_set_performance_marker_intel: unsafe {
11520 unsafe extern "system" fn cmd_set_performance_marker_intel(
11521 _command_buffer: CommandBuffer,
11522 _p_marker_info: *const PerformanceMarkerInfoINTEL<'_>,
11523 ) -> Result {
11524 panic!(concat!(
11525 "Unable to load ",
11526 stringify!(cmd_set_performance_marker_intel)
11527 ))
11528 }
11529 let val = _f(c"vkCmdSetPerformanceMarkerINTEL");
11530 if val.is_null() {
11531 cmd_set_performance_marker_intel
11532 } else {
11533 ::core::mem::transmute::<
11534 *const c_void,
11535 PFN_vkCmdSetPerformanceMarkerINTEL,
11536 >(val)
11537 }
11538 },
11539 cmd_set_performance_stream_marker_intel: unsafe {
11540 unsafe extern "system" fn cmd_set_performance_stream_marker_intel(
11541 _command_buffer: CommandBuffer,
11542 _p_marker_info: *const PerformanceStreamMarkerInfoINTEL<'_>,
11543 ) -> Result {
11544 panic!(concat!(
11545 "Unable to load ",
11546 stringify!(cmd_set_performance_stream_marker_intel)
11547 ))
11548 }
11549 let val = _f(c"vkCmdSetPerformanceStreamMarkerINTEL");
11550 if val.is_null() {
11551 cmd_set_performance_stream_marker_intel
11552 } else {
11553 ::core::mem::transmute::<
11554 *const c_void,
11555 PFN_vkCmdSetPerformanceStreamMarkerINTEL,
11556 >(val)
11557 }
11558 },
11559 cmd_set_performance_override_intel: unsafe {
11560 unsafe extern "system" fn cmd_set_performance_override_intel(
11561 _command_buffer: CommandBuffer,
11562 _p_override_info: *const PerformanceOverrideInfoINTEL<'_>,
11563 ) -> Result {
11564 panic!(concat!(
11565 "Unable to load ",
11566 stringify!(cmd_set_performance_override_intel)
11567 ))
11568 }
11569 let val = _f(c"vkCmdSetPerformanceOverrideINTEL");
11570 if val.is_null() {
11571 cmd_set_performance_override_intel
11572 } else {
11573 ::core::mem::transmute::<
11574 *const c_void,
11575 PFN_vkCmdSetPerformanceOverrideINTEL,
11576 >(val)
11577 }
11578 },
11579 acquire_performance_configuration_intel: unsafe {
11580 unsafe extern "system" fn acquire_performance_configuration_intel(
11581 _device: crate::vk::Device,
11582 _p_acquire_info: *const PerformanceConfigurationAcquireInfoINTEL<'_>,
11583 _p_configuration: *mut PerformanceConfigurationINTEL,
11584 ) -> Result {
11585 panic!(concat!(
11586 "Unable to load ",
11587 stringify!(acquire_performance_configuration_intel)
11588 ))
11589 }
11590 let val = _f(c"vkAcquirePerformanceConfigurationINTEL");
11591 if val.is_null() {
11592 acquire_performance_configuration_intel
11593 } else {
11594 ::core::mem::transmute::<
11595 *const c_void,
11596 PFN_vkAcquirePerformanceConfigurationINTEL,
11597 >(val)
11598 }
11599 },
11600 release_performance_configuration_intel: unsafe {
11601 unsafe extern "system" fn release_performance_configuration_intel(
11602 _device: crate::vk::Device,
11603 _configuration: PerformanceConfigurationINTEL,
11604 ) -> Result {
11605 panic!(concat!(
11606 "Unable to load ",
11607 stringify!(release_performance_configuration_intel)
11608 ))
11609 }
11610 let val = _f(c"vkReleasePerformanceConfigurationINTEL");
11611 if val.is_null() {
11612 release_performance_configuration_intel
11613 } else {
11614 ::core::mem::transmute::<
11615 *const c_void,
11616 PFN_vkReleasePerformanceConfigurationINTEL,
11617 >(val)
11618 }
11619 },
11620 queue_set_performance_configuration_intel: unsafe {
11621 unsafe extern "system" fn queue_set_performance_configuration_intel(
11622 _queue: Queue,
11623 _configuration: PerformanceConfigurationINTEL,
11624 ) -> Result {
11625 panic!(concat!(
11626 "Unable to load ",
11627 stringify!(queue_set_performance_configuration_intel)
11628 ))
11629 }
11630 let val = _f(c"vkQueueSetPerformanceConfigurationINTEL");
11631 if val.is_null() {
11632 queue_set_performance_configuration_intel
11633 } else {
11634 ::core::mem::transmute::<
11635 *const c_void,
11636 PFN_vkQueueSetPerformanceConfigurationINTEL,
11637 >(val)
11638 }
11639 },
11640 get_performance_parameter_intel: unsafe {
11641 unsafe extern "system" fn get_performance_parameter_intel(
11642 _device: crate::vk::Device,
11643 _parameter: PerformanceParameterTypeINTEL,
11644 _p_value: *mut PerformanceValueINTEL,
11645 ) -> Result {
11646 panic!(concat!(
11647 "Unable to load ",
11648 stringify!(get_performance_parameter_intel)
11649 ))
11650 }
11651 let val = _f(c"vkGetPerformanceParameterINTEL");
11652 if val.is_null() {
11653 get_performance_parameter_intel
11654 } else {
11655 ::core::mem::transmute::<
11656 *const c_void,
11657 PFN_vkGetPerformanceParameterINTEL,
11658 >(val)
11659 }
11660 },
11661 }
11662 }
11663 }
11664 }
11665}
11666#[doc = "Extensions tagged KHR"]
11667pub mod khr {
11668 #[doc = "VK_KHR_surface"]
11669 pub mod surface {
11670 use super::super::*;
11671 pub use {
11672 crate::vk::KHR_SURFACE_EXTENSION_NAME as NAME,
11673 crate::vk::KHR_SURFACE_SPEC_VERSION as SPEC_VERSION,
11674 };
11675 #[doc = "VK_KHR_surface instance-level functions"]
11676 #[derive(Clone)]
11677 pub struct Instance {
11678 pub(crate) fp: InstanceFn,
11679 pub(crate) handle: crate::vk::Instance,
11680 }
11681 impl Instance {
11682 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
11683 let handle = instance.handle();
11684 let fp = InstanceFn::load(|name| unsafe {
11685 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11686 entry.get_instance_proc_addr(handle, name.as_ptr()),
11687 )
11688 });
11689 Self { handle, fp }
11690 }
11691 #[inline]
11692 pub fn fp(&self) -> &InstanceFn {
11693 &self.fp
11694 }
11695 #[inline]
11696 pub fn instance(&self) -> crate::vk::Instance {
11697 self.handle
11698 }
11699 }
11700 #[derive(Clone)]
11701 #[doc = "Raw VK_KHR_surface instance-level function pointers"]
11702 pub struct InstanceFn {
11703 pub destroy_surface_khr: PFN_vkDestroySurfaceKHR,
11704 pub get_physical_device_surface_support_khr: PFN_vkGetPhysicalDeviceSurfaceSupportKHR,
11705 pub get_physical_device_surface_capabilities_khr:
11706 PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR,
11707 pub get_physical_device_surface_formats_khr: PFN_vkGetPhysicalDeviceSurfaceFormatsKHR,
11708 pub get_physical_device_surface_present_modes_khr:
11709 PFN_vkGetPhysicalDeviceSurfacePresentModesKHR,
11710 }
11711 unsafe impl Send for InstanceFn {}
11712 unsafe impl Sync for InstanceFn {}
11713 impl InstanceFn {
11714 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11715 Self::load_erased(&mut f)
11716 }
11717 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11718 Self {
11719 destroy_surface_khr: unsafe {
11720 unsafe extern "system" fn destroy_surface_khr(
11721 _instance: crate::vk::Instance,
11722 _surface: SurfaceKHR,
11723 _p_allocator: *const AllocationCallbacks<'_>,
11724 ) {
11725 panic!(concat!("Unable to load ", stringify!(destroy_surface_khr)))
11726 }
11727 let val = _f(c"vkDestroySurfaceKHR");
11728 if val.is_null() {
11729 destroy_surface_khr
11730 } else {
11731 ::core::mem::transmute::<*const c_void, PFN_vkDestroySurfaceKHR>(val)
11732 }
11733 },
11734 get_physical_device_surface_support_khr: unsafe {
11735 unsafe extern "system" fn get_physical_device_surface_support_khr(
11736 _physical_device: PhysicalDevice,
11737 _queue_family_index: u32,
11738 _surface: SurfaceKHR,
11739 _p_supported: *mut Bool32,
11740 ) -> Result {
11741 panic!(concat!(
11742 "Unable to load ",
11743 stringify!(get_physical_device_surface_support_khr)
11744 ))
11745 }
11746 let val = _f(c"vkGetPhysicalDeviceSurfaceSupportKHR");
11747 if val.is_null() {
11748 get_physical_device_surface_support_khr
11749 } else {
11750 ::core::mem::transmute::<
11751 *const c_void,
11752 PFN_vkGetPhysicalDeviceSurfaceSupportKHR,
11753 >(val)
11754 }
11755 },
11756 get_physical_device_surface_capabilities_khr: unsafe {
11757 unsafe extern "system" fn get_physical_device_surface_capabilities_khr(
11758 _physical_device: PhysicalDevice,
11759 _surface: SurfaceKHR,
11760 _p_surface_capabilities: *mut SurfaceCapabilitiesKHR,
11761 ) -> Result {
11762 panic!(concat!(
11763 "Unable to load ",
11764 stringify!(get_physical_device_surface_capabilities_khr)
11765 ))
11766 }
11767 let val = _f(c"vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
11768 if val.is_null() {
11769 get_physical_device_surface_capabilities_khr
11770 } else {
11771 ::core::mem::transmute::<
11772 *const c_void,
11773 PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR,
11774 >(val)
11775 }
11776 },
11777 get_physical_device_surface_formats_khr: unsafe {
11778 unsafe extern "system" fn get_physical_device_surface_formats_khr(
11779 _physical_device: PhysicalDevice,
11780 _surface: SurfaceKHR,
11781 _p_surface_format_count: *mut u32,
11782 _p_surface_formats: *mut SurfaceFormatKHR,
11783 ) -> Result {
11784 panic!(concat!(
11785 "Unable to load ",
11786 stringify!(get_physical_device_surface_formats_khr)
11787 ))
11788 }
11789 let val = _f(c"vkGetPhysicalDeviceSurfaceFormatsKHR");
11790 if val.is_null() {
11791 get_physical_device_surface_formats_khr
11792 } else {
11793 ::core::mem::transmute::<
11794 *const c_void,
11795 PFN_vkGetPhysicalDeviceSurfaceFormatsKHR,
11796 >(val)
11797 }
11798 },
11799 get_physical_device_surface_present_modes_khr: unsafe {
11800 unsafe extern "system" fn get_physical_device_surface_present_modes_khr(
11801 _physical_device: PhysicalDevice,
11802 _surface: SurfaceKHR,
11803 _p_present_mode_count: *mut u32,
11804 _p_present_modes: *mut PresentModeKHR,
11805 ) -> Result {
11806 panic!(concat!(
11807 "Unable to load ",
11808 stringify!(get_physical_device_surface_present_modes_khr)
11809 ))
11810 }
11811 let val = _f(c"vkGetPhysicalDeviceSurfacePresentModesKHR");
11812 if val.is_null() {
11813 get_physical_device_surface_present_modes_khr
11814 } else {
11815 ::core::mem::transmute::<
11816 *const c_void,
11817 PFN_vkGetPhysicalDeviceSurfacePresentModesKHR,
11818 >(val)
11819 }
11820 },
11821 }
11822 }
11823 }
11824 }
11825 #[doc = "VK_KHR_swapchain"]
11826 pub mod swapchain {
11827 use super::super::*;
11828 pub use {
11829 crate::vk::KHR_SWAPCHAIN_EXTENSION_NAME as NAME,
11830 crate::vk::KHR_SWAPCHAIN_SPEC_VERSION as SPEC_VERSION,
11831 };
11832 #[doc = "VK_KHR_swapchain instance-level functions"]
11833 #[derive(Clone)]
11834 pub struct Instance {
11835 pub(crate) fp: InstanceFn,
11836 pub(crate) handle: crate::vk::Instance,
11837 }
11838 impl Instance {
11839 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
11840 let handle = instance.handle();
11841 let fp = InstanceFn::load(|name| unsafe {
11842 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11843 entry.get_instance_proc_addr(handle, name.as_ptr()),
11844 )
11845 });
11846 Self { handle, fp }
11847 }
11848 #[inline]
11849 pub fn fp(&self) -> &InstanceFn {
11850 &self.fp
11851 }
11852 #[inline]
11853 pub fn instance(&self) -> crate::vk::Instance {
11854 self.handle
11855 }
11856 }
11857 #[derive(Clone)]
11858 #[doc = "Raw VK_KHR_swapchain instance-level function pointers"]
11859 pub struct InstanceFn {
11860 pub get_physical_device_present_rectangles_khr:
11861 PFN_vkGetPhysicalDevicePresentRectanglesKHR,
11862 }
11863 unsafe impl Send for InstanceFn {}
11864 unsafe impl Sync for InstanceFn {}
11865 impl InstanceFn {
11866 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11867 Self::load_erased(&mut f)
11868 }
11869 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11870 Self {
11871 get_physical_device_present_rectangles_khr: unsafe {
11872 unsafe extern "system" fn get_physical_device_present_rectangles_khr(
11873 _physical_device: PhysicalDevice,
11874 _surface: SurfaceKHR,
11875 _p_rect_count: *mut u32,
11876 _p_rects: *mut Rect2D,
11877 ) -> Result {
11878 panic!(concat!(
11879 "Unable to load ",
11880 stringify!(get_physical_device_present_rectangles_khr)
11881 ))
11882 }
11883 let val = _f(c"vkGetPhysicalDevicePresentRectanglesKHR");
11884 if val.is_null() {
11885 get_physical_device_present_rectangles_khr
11886 } else {
11887 ::core::mem::transmute::<
11888 *const c_void,
11889 PFN_vkGetPhysicalDevicePresentRectanglesKHR,
11890 >(val)
11891 }
11892 },
11893 }
11894 }
11895 }
11896 #[doc = "VK_KHR_swapchain device-level functions"]
11897 #[derive(Clone)]
11898 pub struct Device {
11899 pub(crate) fp: DeviceFn,
11900 pub(crate) handle: crate::vk::Device,
11901 }
11902 impl Device {
11903 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
11904 let handle = device.handle();
11905 let fp = DeviceFn::load(|name| unsafe {
11906 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
11907 instance.get_device_proc_addr(handle, name.as_ptr()),
11908 )
11909 });
11910 Self { handle, fp }
11911 }
11912 #[inline]
11913 pub fn fp(&self) -> &DeviceFn {
11914 &self.fp
11915 }
11916 #[inline]
11917 pub fn device(&self) -> crate::vk::Device {
11918 self.handle
11919 }
11920 }
11921 #[derive(Clone)]
11922 #[doc = "Raw VK_KHR_swapchain device-level function pointers"]
11923 pub struct DeviceFn {
11924 pub create_swapchain_khr: PFN_vkCreateSwapchainKHR,
11925 pub destroy_swapchain_khr: PFN_vkDestroySwapchainKHR,
11926 pub get_swapchain_images_khr: PFN_vkGetSwapchainImagesKHR,
11927 pub acquire_next_image_khr: PFN_vkAcquireNextImageKHR,
11928 pub queue_present_khr: PFN_vkQueuePresentKHR,
11929 pub get_device_group_present_capabilities_khr:
11930 PFN_vkGetDeviceGroupPresentCapabilitiesKHR,
11931 pub get_device_group_surface_present_modes_khr:
11932 PFN_vkGetDeviceGroupSurfacePresentModesKHR,
11933 pub acquire_next_image2_khr: PFN_vkAcquireNextImage2KHR,
11934 }
11935 unsafe impl Send for DeviceFn {}
11936 unsafe impl Sync for DeviceFn {}
11937 impl DeviceFn {
11938 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
11939 Self::load_erased(&mut f)
11940 }
11941 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
11942 Self {
11943 create_swapchain_khr: unsafe {
11944 unsafe extern "system" fn create_swapchain_khr(
11945 _device: crate::vk::Device,
11946 _p_create_info: *const SwapchainCreateInfoKHR<'_>,
11947 _p_allocator: *const AllocationCallbacks<'_>,
11948 _p_swapchain: *mut SwapchainKHR,
11949 ) -> Result {
11950 panic!(concat!("Unable to load ", stringify!(create_swapchain_khr)))
11951 }
11952 let val = _f(c"vkCreateSwapchainKHR");
11953 if val.is_null() {
11954 create_swapchain_khr
11955 } else {
11956 ::core::mem::transmute::<*const c_void, PFN_vkCreateSwapchainKHR>(val)
11957 }
11958 },
11959 destroy_swapchain_khr: unsafe {
11960 unsafe extern "system" fn destroy_swapchain_khr(
11961 _device: crate::vk::Device,
11962 _swapchain: SwapchainKHR,
11963 _p_allocator: *const AllocationCallbacks<'_>,
11964 ) {
11965 panic!(concat!(
11966 "Unable to load ",
11967 stringify!(destroy_swapchain_khr)
11968 ))
11969 }
11970 let val = _f(c"vkDestroySwapchainKHR");
11971 if val.is_null() {
11972 destroy_swapchain_khr
11973 } else {
11974 ::core::mem::transmute::<*const c_void, PFN_vkDestroySwapchainKHR>(val)
11975 }
11976 },
11977 get_swapchain_images_khr: unsafe {
11978 unsafe extern "system" fn get_swapchain_images_khr(
11979 _device: crate::vk::Device,
11980 _swapchain: SwapchainKHR,
11981 _p_swapchain_image_count: *mut u32,
11982 _p_swapchain_images: *mut Image,
11983 ) -> Result {
11984 panic!(concat!(
11985 "Unable to load ",
11986 stringify!(get_swapchain_images_khr)
11987 ))
11988 }
11989 let val = _f(c"vkGetSwapchainImagesKHR");
11990 if val.is_null() {
11991 get_swapchain_images_khr
11992 } else {
11993 ::core::mem::transmute::<*const c_void, PFN_vkGetSwapchainImagesKHR>(
11994 val,
11995 )
11996 }
11997 },
11998 acquire_next_image_khr: unsafe {
11999 unsafe extern "system" fn acquire_next_image_khr(
12000 _device: crate::vk::Device,
12001 _swapchain: SwapchainKHR,
12002 _timeout: u64,
12003 _semaphore: Semaphore,
12004 _fence: Fence,
12005 _p_image_index: *mut u32,
12006 ) -> Result {
12007 panic!(concat!(
12008 "Unable to load ",
12009 stringify!(acquire_next_image_khr)
12010 ))
12011 }
12012 let val = _f(c"vkAcquireNextImageKHR");
12013 if val.is_null() {
12014 acquire_next_image_khr
12015 } else {
12016 ::core::mem::transmute::<*const c_void, PFN_vkAcquireNextImageKHR>(val)
12017 }
12018 },
12019 queue_present_khr: unsafe {
12020 unsafe extern "system" fn queue_present_khr(
12021 _queue: Queue,
12022 _p_present_info: *const PresentInfoKHR<'_>,
12023 ) -> Result {
12024 panic!(concat!("Unable to load ", stringify!(queue_present_khr)))
12025 }
12026 let val = _f(c"vkQueuePresentKHR");
12027 if val.is_null() {
12028 queue_present_khr
12029 } else {
12030 ::core::mem::transmute::<*const c_void, PFN_vkQueuePresentKHR>(val)
12031 }
12032 },
12033 get_device_group_present_capabilities_khr: unsafe {
12034 unsafe extern "system" fn get_device_group_present_capabilities_khr(
12035 _device: crate::vk::Device,
12036 _p_device_group_present_capabilities : * mut DeviceGroupPresentCapabilitiesKHR < '_ >,
12037 ) -> Result {
12038 panic!(concat!(
12039 "Unable to load ",
12040 stringify!(get_device_group_present_capabilities_khr)
12041 ))
12042 }
12043 let val = _f(c"vkGetDeviceGroupPresentCapabilitiesKHR");
12044 if val.is_null() {
12045 get_device_group_present_capabilities_khr
12046 } else {
12047 ::core::mem::transmute::<
12048 *const c_void,
12049 PFN_vkGetDeviceGroupPresentCapabilitiesKHR,
12050 >(val)
12051 }
12052 },
12053 get_device_group_surface_present_modes_khr: unsafe {
12054 unsafe extern "system" fn get_device_group_surface_present_modes_khr(
12055 _device: crate::vk::Device,
12056 _surface: SurfaceKHR,
12057 _p_modes: *mut DeviceGroupPresentModeFlagsKHR,
12058 ) -> Result {
12059 panic!(concat!(
12060 "Unable to load ",
12061 stringify!(get_device_group_surface_present_modes_khr)
12062 ))
12063 }
12064 let val = _f(c"vkGetDeviceGroupSurfacePresentModesKHR");
12065 if val.is_null() {
12066 get_device_group_surface_present_modes_khr
12067 } else {
12068 ::core::mem::transmute::<
12069 *const c_void,
12070 PFN_vkGetDeviceGroupSurfacePresentModesKHR,
12071 >(val)
12072 }
12073 },
12074 acquire_next_image2_khr: unsafe {
12075 unsafe extern "system" fn acquire_next_image2_khr(
12076 _device: crate::vk::Device,
12077 _p_acquire_info: *const AcquireNextImageInfoKHR<'_>,
12078 _p_image_index: *mut u32,
12079 ) -> Result {
12080 panic!(concat!(
12081 "Unable to load ",
12082 stringify!(acquire_next_image2_khr)
12083 ))
12084 }
12085 let val = _f(c"vkAcquireNextImage2KHR");
12086 if val.is_null() {
12087 acquire_next_image2_khr
12088 } else {
12089 ::core::mem::transmute::<*const c_void, PFN_vkAcquireNextImage2KHR>(val)
12090 }
12091 },
12092 }
12093 }
12094 }
12095 }
12096 #[doc = "VK_KHR_display"]
12097 pub mod display {
12098 use super::super::*;
12099 pub use {
12100 crate::vk::KHR_DISPLAY_EXTENSION_NAME as NAME,
12101 crate::vk::KHR_DISPLAY_SPEC_VERSION as SPEC_VERSION,
12102 };
12103 #[doc = "VK_KHR_display instance-level functions"]
12104 #[derive(Clone)]
12105 pub struct Instance {
12106 pub(crate) fp: InstanceFn,
12107 pub(crate) handle: crate::vk::Instance,
12108 }
12109 impl Instance {
12110 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
12111 let handle = instance.handle();
12112 let fp = InstanceFn::load(|name| unsafe {
12113 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12114 entry.get_instance_proc_addr(handle, name.as_ptr()),
12115 )
12116 });
12117 Self { handle, fp }
12118 }
12119 #[inline]
12120 pub fn fp(&self) -> &InstanceFn {
12121 &self.fp
12122 }
12123 #[inline]
12124 pub fn instance(&self) -> crate::vk::Instance {
12125 self.handle
12126 }
12127 }
12128 #[derive(Clone)]
12129 #[doc = "Raw VK_KHR_display instance-level function pointers"]
12130 pub struct InstanceFn {
12131 pub get_physical_device_display_properties_khr:
12132 PFN_vkGetPhysicalDeviceDisplayPropertiesKHR,
12133 pub get_physical_device_display_plane_properties_khr:
12134 PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR,
12135 pub get_display_plane_supported_displays_khr: PFN_vkGetDisplayPlaneSupportedDisplaysKHR,
12136 pub get_display_mode_properties_khr: PFN_vkGetDisplayModePropertiesKHR,
12137 pub create_display_mode_khr: PFN_vkCreateDisplayModeKHR,
12138 pub get_display_plane_capabilities_khr: PFN_vkGetDisplayPlaneCapabilitiesKHR,
12139 pub create_display_plane_surface_khr: PFN_vkCreateDisplayPlaneSurfaceKHR,
12140 }
12141 unsafe impl Send for InstanceFn {}
12142 unsafe impl Sync for InstanceFn {}
12143 impl InstanceFn {
12144 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12145 Self::load_erased(&mut f)
12146 }
12147 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12148 Self {
12149 get_physical_device_display_properties_khr: unsafe {
12150 unsafe extern "system" fn get_physical_device_display_properties_khr(
12151 _physical_device: PhysicalDevice,
12152 _p_property_count: *mut u32,
12153 _p_properties: *mut DisplayPropertiesKHR<'_>,
12154 ) -> Result {
12155 panic!(concat!(
12156 "Unable to load ",
12157 stringify!(get_physical_device_display_properties_khr)
12158 ))
12159 }
12160 let val = _f(c"vkGetPhysicalDeviceDisplayPropertiesKHR");
12161 if val.is_null() {
12162 get_physical_device_display_properties_khr
12163 } else {
12164 ::core::mem::transmute::<
12165 *const c_void,
12166 PFN_vkGetPhysicalDeviceDisplayPropertiesKHR,
12167 >(val)
12168 }
12169 },
12170 get_physical_device_display_plane_properties_khr: unsafe {
12171 unsafe extern "system" fn get_physical_device_display_plane_properties_khr(
12172 _physical_device: PhysicalDevice,
12173 _p_property_count: *mut u32,
12174 _p_properties: *mut DisplayPlanePropertiesKHR,
12175 ) -> Result {
12176 panic!(concat!(
12177 "Unable to load ",
12178 stringify!(get_physical_device_display_plane_properties_khr)
12179 ))
12180 }
12181 let val = _f(c"vkGetPhysicalDeviceDisplayPlanePropertiesKHR");
12182 if val.is_null() {
12183 get_physical_device_display_plane_properties_khr
12184 } else {
12185 ::core::mem::transmute::<
12186 *const c_void,
12187 PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR,
12188 >(val)
12189 }
12190 },
12191 get_display_plane_supported_displays_khr: unsafe {
12192 unsafe extern "system" fn get_display_plane_supported_displays_khr(
12193 _physical_device: PhysicalDevice,
12194 _plane_index: u32,
12195 _p_display_count: *mut u32,
12196 _p_displays: *mut DisplayKHR,
12197 ) -> Result {
12198 panic!(concat!(
12199 "Unable to load ",
12200 stringify!(get_display_plane_supported_displays_khr)
12201 ))
12202 }
12203 let val = _f(c"vkGetDisplayPlaneSupportedDisplaysKHR");
12204 if val.is_null() {
12205 get_display_plane_supported_displays_khr
12206 } else {
12207 ::core::mem::transmute::<
12208 *const c_void,
12209 PFN_vkGetDisplayPlaneSupportedDisplaysKHR,
12210 >(val)
12211 }
12212 },
12213 get_display_mode_properties_khr: unsafe {
12214 unsafe extern "system" fn get_display_mode_properties_khr(
12215 _physical_device: PhysicalDevice,
12216 _display: DisplayKHR,
12217 _p_property_count: *mut u32,
12218 _p_properties: *mut DisplayModePropertiesKHR,
12219 ) -> Result {
12220 panic!(concat!(
12221 "Unable to load ",
12222 stringify!(get_display_mode_properties_khr)
12223 ))
12224 }
12225 let val = _f(c"vkGetDisplayModePropertiesKHR");
12226 if val.is_null() {
12227 get_display_mode_properties_khr
12228 } else {
12229 ::core::mem::transmute::<*const c_void, PFN_vkGetDisplayModePropertiesKHR>(
12230 val,
12231 )
12232 }
12233 },
12234 create_display_mode_khr: unsafe {
12235 unsafe extern "system" fn create_display_mode_khr(
12236 _physical_device: PhysicalDevice,
12237 _display: DisplayKHR,
12238 _p_create_info: *const DisplayModeCreateInfoKHR<'_>,
12239 _p_allocator: *const AllocationCallbacks<'_>,
12240 _p_mode: *mut DisplayModeKHR,
12241 ) -> Result {
12242 panic!(concat!(
12243 "Unable to load ",
12244 stringify!(create_display_mode_khr)
12245 ))
12246 }
12247 let val = _f(c"vkCreateDisplayModeKHR");
12248 if val.is_null() {
12249 create_display_mode_khr
12250 } else {
12251 ::core::mem::transmute::<*const c_void, PFN_vkCreateDisplayModeKHR>(val)
12252 }
12253 },
12254 get_display_plane_capabilities_khr: unsafe {
12255 unsafe extern "system" fn get_display_plane_capabilities_khr(
12256 _physical_device: PhysicalDevice,
12257 _mode: DisplayModeKHR,
12258 _plane_index: u32,
12259 _p_capabilities: *mut DisplayPlaneCapabilitiesKHR,
12260 ) -> Result {
12261 panic!(concat!(
12262 "Unable to load ",
12263 stringify!(get_display_plane_capabilities_khr)
12264 ))
12265 }
12266 let val = _f(c"vkGetDisplayPlaneCapabilitiesKHR");
12267 if val.is_null() {
12268 get_display_plane_capabilities_khr
12269 } else {
12270 ::core::mem::transmute::<
12271 *const c_void,
12272 PFN_vkGetDisplayPlaneCapabilitiesKHR,
12273 >(val)
12274 }
12275 },
12276 create_display_plane_surface_khr: unsafe {
12277 unsafe extern "system" fn create_display_plane_surface_khr(
12278 _instance: crate::vk::Instance,
12279 _p_create_info: *const DisplaySurfaceCreateInfoKHR<'_>,
12280 _p_allocator: *const AllocationCallbacks<'_>,
12281 _p_surface: *mut SurfaceKHR,
12282 ) -> Result {
12283 panic!(concat!(
12284 "Unable to load ",
12285 stringify!(create_display_plane_surface_khr)
12286 ))
12287 }
12288 let val = _f(c"vkCreateDisplayPlaneSurfaceKHR");
12289 if val.is_null() {
12290 create_display_plane_surface_khr
12291 } else {
12292 ::core::mem::transmute::<
12293 *const c_void,
12294 PFN_vkCreateDisplayPlaneSurfaceKHR,
12295 >(val)
12296 }
12297 },
12298 }
12299 }
12300 }
12301 }
12302 #[doc = "VK_KHR_display_swapchain"]
12303 pub mod display_swapchain {
12304 use super::super::*;
12305 pub use {
12306 crate::vk::KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME as NAME,
12307 crate::vk::KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION as SPEC_VERSION,
12308 };
12309 #[doc = "VK_KHR_display_swapchain device-level functions"]
12310 #[derive(Clone)]
12311 pub struct Device {
12312 pub(crate) fp: DeviceFn,
12313 pub(crate) handle: crate::vk::Device,
12314 }
12315 impl Device {
12316 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
12317 let handle = device.handle();
12318 let fp = DeviceFn::load(|name| unsafe {
12319 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12320 instance.get_device_proc_addr(handle, name.as_ptr()),
12321 )
12322 });
12323 Self { handle, fp }
12324 }
12325 #[inline]
12326 pub fn fp(&self) -> &DeviceFn {
12327 &self.fp
12328 }
12329 #[inline]
12330 pub fn device(&self) -> crate::vk::Device {
12331 self.handle
12332 }
12333 }
12334 #[derive(Clone)]
12335 #[doc = "Raw VK_KHR_display_swapchain device-level function pointers"]
12336 pub struct DeviceFn {
12337 pub create_shared_swapchains_khr: PFN_vkCreateSharedSwapchainsKHR,
12338 }
12339 unsafe impl Send for DeviceFn {}
12340 unsafe impl Sync for DeviceFn {}
12341 impl DeviceFn {
12342 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12343 Self::load_erased(&mut f)
12344 }
12345 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12346 Self {
12347 create_shared_swapchains_khr: unsafe {
12348 unsafe extern "system" fn create_shared_swapchains_khr(
12349 _device: crate::vk::Device,
12350 _swapchain_count: u32,
12351 _p_create_infos: *const SwapchainCreateInfoKHR<'_>,
12352 _p_allocator: *const AllocationCallbacks<'_>,
12353 _p_swapchains: *mut SwapchainKHR,
12354 ) -> Result {
12355 panic!(concat!(
12356 "Unable to load ",
12357 stringify!(create_shared_swapchains_khr)
12358 ))
12359 }
12360 let val = _f(c"vkCreateSharedSwapchainsKHR");
12361 if val.is_null() {
12362 create_shared_swapchains_khr
12363 } else {
12364 ::core::mem::transmute::<*const c_void, PFN_vkCreateSharedSwapchainsKHR>(
12365 val,
12366 )
12367 }
12368 },
12369 }
12370 }
12371 }
12372 }
12373 #[doc = "VK_KHR_xlib_surface"]
12374 pub mod xlib_surface {
12375 use super::super::*;
12376 pub use {
12377 crate::vk::KHR_XLIB_SURFACE_EXTENSION_NAME as NAME,
12378 crate::vk::KHR_XLIB_SURFACE_SPEC_VERSION as SPEC_VERSION,
12379 };
12380 #[doc = "VK_KHR_xlib_surface instance-level functions"]
12381 #[derive(Clone)]
12382 pub struct Instance {
12383 pub(crate) fp: InstanceFn,
12384 pub(crate) handle: crate::vk::Instance,
12385 }
12386 impl Instance {
12387 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
12388 let handle = instance.handle();
12389 let fp = InstanceFn::load(|name| unsafe {
12390 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12391 entry.get_instance_proc_addr(handle, name.as_ptr()),
12392 )
12393 });
12394 Self { handle, fp }
12395 }
12396 #[inline]
12397 pub fn fp(&self) -> &InstanceFn {
12398 &self.fp
12399 }
12400 #[inline]
12401 pub fn instance(&self) -> crate::vk::Instance {
12402 self.handle
12403 }
12404 }
12405 #[derive(Clone)]
12406 #[doc = "Raw VK_KHR_xlib_surface instance-level function pointers"]
12407 pub struct InstanceFn {
12408 pub create_xlib_surface_khr: PFN_vkCreateXlibSurfaceKHR,
12409 pub get_physical_device_xlib_presentation_support_khr:
12410 PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR,
12411 }
12412 unsafe impl Send for InstanceFn {}
12413 unsafe impl Sync for InstanceFn {}
12414 impl InstanceFn {
12415 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12416 Self::load_erased(&mut f)
12417 }
12418 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12419 Self {
12420 create_xlib_surface_khr: unsafe {
12421 unsafe extern "system" fn create_xlib_surface_khr(
12422 _instance: crate::vk::Instance,
12423 _p_create_info: *const XlibSurfaceCreateInfoKHR<'_>,
12424 _p_allocator: *const AllocationCallbacks<'_>,
12425 _p_surface: *mut SurfaceKHR,
12426 ) -> Result {
12427 panic!(concat!(
12428 "Unable to load ",
12429 stringify!(create_xlib_surface_khr)
12430 ))
12431 }
12432 let val = _f(c"vkCreateXlibSurfaceKHR");
12433 if val.is_null() {
12434 create_xlib_surface_khr
12435 } else {
12436 ::core::mem::transmute::<*const c_void, PFN_vkCreateXlibSurfaceKHR>(val)
12437 }
12438 },
12439 get_physical_device_xlib_presentation_support_khr: unsafe {
12440 unsafe extern "system" fn get_physical_device_xlib_presentation_support_khr(
12441 _physical_device: PhysicalDevice,
12442 _queue_family_index: u32,
12443 _dpy: *mut Display,
12444 _visual_id: VisualID,
12445 ) -> Bool32 {
12446 panic!(concat!(
12447 "Unable to load ",
12448 stringify!(get_physical_device_xlib_presentation_support_khr)
12449 ))
12450 }
12451 let val = _f(c"vkGetPhysicalDeviceXlibPresentationSupportKHR");
12452 if val.is_null() {
12453 get_physical_device_xlib_presentation_support_khr
12454 } else {
12455 ::core::mem::transmute::<
12456 *const c_void,
12457 PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR,
12458 >(val)
12459 }
12460 },
12461 }
12462 }
12463 }
12464 }
12465 #[doc = "VK_KHR_xcb_surface"]
12466 pub mod xcb_surface {
12467 use super::super::*;
12468 pub use {
12469 crate::vk::KHR_XCB_SURFACE_EXTENSION_NAME as NAME,
12470 crate::vk::KHR_XCB_SURFACE_SPEC_VERSION as SPEC_VERSION,
12471 };
12472 #[doc = "VK_KHR_xcb_surface instance-level functions"]
12473 #[derive(Clone)]
12474 pub struct Instance {
12475 pub(crate) fp: InstanceFn,
12476 pub(crate) handle: crate::vk::Instance,
12477 }
12478 impl Instance {
12479 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
12480 let handle = instance.handle();
12481 let fp = InstanceFn::load(|name| unsafe {
12482 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12483 entry.get_instance_proc_addr(handle, name.as_ptr()),
12484 )
12485 });
12486 Self { handle, fp }
12487 }
12488 #[inline]
12489 pub fn fp(&self) -> &InstanceFn {
12490 &self.fp
12491 }
12492 #[inline]
12493 pub fn instance(&self) -> crate::vk::Instance {
12494 self.handle
12495 }
12496 }
12497 #[derive(Clone)]
12498 #[doc = "Raw VK_KHR_xcb_surface instance-level function pointers"]
12499 pub struct InstanceFn {
12500 pub create_xcb_surface_khr: PFN_vkCreateXcbSurfaceKHR,
12501 pub get_physical_device_xcb_presentation_support_khr:
12502 PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR,
12503 }
12504 unsafe impl Send for InstanceFn {}
12505 unsafe impl Sync for InstanceFn {}
12506 impl InstanceFn {
12507 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12508 Self::load_erased(&mut f)
12509 }
12510 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12511 Self {
12512 create_xcb_surface_khr: unsafe {
12513 unsafe extern "system" fn create_xcb_surface_khr(
12514 _instance: crate::vk::Instance,
12515 _p_create_info: *const XcbSurfaceCreateInfoKHR<'_>,
12516 _p_allocator: *const AllocationCallbacks<'_>,
12517 _p_surface: *mut SurfaceKHR,
12518 ) -> Result {
12519 panic!(concat!(
12520 "Unable to load ",
12521 stringify!(create_xcb_surface_khr)
12522 ))
12523 }
12524 let val = _f(c"vkCreateXcbSurfaceKHR");
12525 if val.is_null() {
12526 create_xcb_surface_khr
12527 } else {
12528 ::core::mem::transmute::<*const c_void, PFN_vkCreateXcbSurfaceKHR>(val)
12529 }
12530 },
12531 get_physical_device_xcb_presentation_support_khr: unsafe {
12532 unsafe extern "system" fn get_physical_device_xcb_presentation_support_khr(
12533 _physical_device: PhysicalDevice,
12534 _queue_family_index: u32,
12535 _connection: *mut xcb_connection_t,
12536 _visual_id: xcb_visualid_t,
12537 ) -> Bool32 {
12538 panic!(concat!(
12539 "Unable to load ",
12540 stringify!(get_physical_device_xcb_presentation_support_khr)
12541 ))
12542 }
12543 let val = _f(c"vkGetPhysicalDeviceXcbPresentationSupportKHR");
12544 if val.is_null() {
12545 get_physical_device_xcb_presentation_support_khr
12546 } else {
12547 ::core::mem::transmute::<
12548 *const c_void,
12549 PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR,
12550 >(val)
12551 }
12552 },
12553 }
12554 }
12555 }
12556 }
12557 #[doc = "VK_KHR_wayland_surface"]
12558 pub mod wayland_surface {
12559 use super::super::*;
12560 pub use {
12561 crate::vk::KHR_WAYLAND_SURFACE_EXTENSION_NAME as NAME,
12562 crate::vk::KHR_WAYLAND_SURFACE_SPEC_VERSION as SPEC_VERSION,
12563 };
12564 #[doc = "VK_KHR_wayland_surface instance-level functions"]
12565 #[derive(Clone)]
12566 pub struct Instance {
12567 pub(crate) fp: InstanceFn,
12568 pub(crate) handle: crate::vk::Instance,
12569 }
12570 impl Instance {
12571 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
12572 let handle = instance.handle();
12573 let fp = InstanceFn::load(|name| unsafe {
12574 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12575 entry.get_instance_proc_addr(handle, name.as_ptr()),
12576 )
12577 });
12578 Self { handle, fp }
12579 }
12580 #[inline]
12581 pub fn fp(&self) -> &InstanceFn {
12582 &self.fp
12583 }
12584 #[inline]
12585 pub fn instance(&self) -> crate::vk::Instance {
12586 self.handle
12587 }
12588 }
12589 #[derive(Clone)]
12590 #[doc = "Raw VK_KHR_wayland_surface instance-level function pointers"]
12591 pub struct InstanceFn {
12592 pub create_wayland_surface_khr: PFN_vkCreateWaylandSurfaceKHR,
12593 pub get_physical_device_wayland_presentation_support_khr:
12594 PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR,
12595 }
12596 unsafe impl Send for InstanceFn {}
12597 unsafe impl Sync for InstanceFn {}
12598 impl InstanceFn {
12599 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12600 Self::load_erased(&mut f)
12601 }
12602 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12603 Self {
12604 create_wayland_surface_khr: unsafe {
12605 unsafe extern "system" fn create_wayland_surface_khr(
12606 _instance: crate::vk::Instance,
12607 _p_create_info: *const WaylandSurfaceCreateInfoKHR<'_>,
12608 _p_allocator: *const AllocationCallbacks<'_>,
12609 _p_surface: *mut SurfaceKHR,
12610 ) -> Result {
12611 panic!(concat!(
12612 "Unable to load ",
12613 stringify!(create_wayland_surface_khr)
12614 ))
12615 }
12616 let val = _f(c"vkCreateWaylandSurfaceKHR");
12617 if val.is_null() {
12618 create_wayland_surface_khr
12619 } else {
12620 ::core::mem::transmute::<*const c_void, PFN_vkCreateWaylandSurfaceKHR>(
12621 val,
12622 )
12623 }
12624 },
12625 get_physical_device_wayland_presentation_support_khr: unsafe {
12626 unsafe extern "system" fn get_physical_device_wayland_presentation_support_khr(
12627 _physical_device: PhysicalDevice,
12628 _queue_family_index: u32,
12629 _display: *mut wl_display,
12630 ) -> Bool32 {
12631 panic!(concat!(
12632 "Unable to load ",
12633 stringify!(get_physical_device_wayland_presentation_support_khr)
12634 ))
12635 }
12636 let val = _f(c"vkGetPhysicalDeviceWaylandPresentationSupportKHR");
12637 if val.is_null() {
12638 get_physical_device_wayland_presentation_support_khr
12639 } else {
12640 ::core::mem::transmute::<
12641 *const c_void,
12642 PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR,
12643 >(val)
12644 }
12645 },
12646 }
12647 }
12648 }
12649 }
12650 #[doc = "VK_KHR_android_surface"]
12651 pub mod android_surface {
12652 use super::super::*;
12653 pub use {
12654 crate::vk::KHR_ANDROID_SURFACE_EXTENSION_NAME as NAME,
12655 crate::vk::KHR_ANDROID_SURFACE_SPEC_VERSION as SPEC_VERSION,
12656 };
12657 #[doc = "VK_KHR_android_surface instance-level functions"]
12658 #[derive(Clone)]
12659 pub struct Instance {
12660 pub(crate) fp: InstanceFn,
12661 pub(crate) handle: crate::vk::Instance,
12662 }
12663 impl Instance {
12664 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
12665 let handle = instance.handle();
12666 let fp = InstanceFn::load(|name| unsafe {
12667 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12668 entry.get_instance_proc_addr(handle, name.as_ptr()),
12669 )
12670 });
12671 Self { handle, fp }
12672 }
12673 #[inline]
12674 pub fn fp(&self) -> &InstanceFn {
12675 &self.fp
12676 }
12677 #[inline]
12678 pub fn instance(&self) -> crate::vk::Instance {
12679 self.handle
12680 }
12681 }
12682 #[derive(Clone)]
12683 #[doc = "Raw VK_KHR_android_surface instance-level function pointers"]
12684 pub struct InstanceFn {
12685 pub create_android_surface_khr: PFN_vkCreateAndroidSurfaceKHR,
12686 }
12687 unsafe impl Send for InstanceFn {}
12688 unsafe impl Sync for InstanceFn {}
12689 impl InstanceFn {
12690 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12691 Self::load_erased(&mut f)
12692 }
12693 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12694 Self {
12695 create_android_surface_khr: unsafe {
12696 unsafe extern "system" fn create_android_surface_khr(
12697 _instance: crate::vk::Instance,
12698 _p_create_info: *const AndroidSurfaceCreateInfoKHR<'_>,
12699 _p_allocator: *const AllocationCallbacks<'_>,
12700 _p_surface: *mut SurfaceKHR,
12701 ) -> Result {
12702 panic!(concat!(
12703 "Unable to load ",
12704 stringify!(create_android_surface_khr)
12705 ))
12706 }
12707 let val = _f(c"vkCreateAndroidSurfaceKHR");
12708 if val.is_null() {
12709 create_android_surface_khr
12710 } else {
12711 ::core::mem::transmute::<*const c_void, PFN_vkCreateAndroidSurfaceKHR>(
12712 val,
12713 )
12714 }
12715 },
12716 }
12717 }
12718 }
12719 }
12720 #[doc = "VK_KHR_win32_surface"]
12721 pub mod win32_surface {
12722 use super::super::*;
12723 pub use {
12724 crate::vk::KHR_WIN32_SURFACE_EXTENSION_NAME as NAME,
12725 crate::vk::KHR_WIN32_SURFACE_SPEC_VERSION as SPEC_VERSION,
12726 };
12727 #[doc = "VK_KHR_win32_surface instance-level functions"]
12728 #[derive(Clone)]
12729 pub struct Instance {
12730 pub(crate) fp: InstanceFn,
12731 pub(crate) handle: crate::vk::Instance,
12732 }
12733 impl Instance {
12734 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
12735 let handle = instance.handle();
12736 let fp = InstanceFn::load(|name| unsafe {
12737 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12738 entry.get_instance_proc_addr(handle, name.as_ptr()),
12739 )
12740 });
12741 Self { handle, fp }
12742 }
12743 #[inline]
12744 pub fn fp(&self) -> &InstanceFn {
12745 &self.fp
12746 }
12747 #[inline]
12748 pub fn instance(&self) -> crate::vk::Instance {
12749 self.handle
12750 }
12751 }
12752 #[derive(Clone)]
12753 #[doc = "Raw VK_KHR_win32_surface instance-level function pointers"]
12754 pub struct InstanceFn {
12755 pub create_win32_surface_khr: PFN_vkCreateWin32SurfaceKHR,
12756 pub get_physical_device_win32_presentation_support_khr:
12757 PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR,
12758 }
12759 unsafe impl Send for InstanceFn {}
12760 unsafe impl Sync for InstanceFn {}
12761 impl InstanceFn {
12762 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12763 Self::load_erased(&mut f)
12764 }
12765 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12766 Self {
12767 create_win32_surface_khr: unsafe {
12768 unsafe extern "system" fn create_win32_surface_khr(
12769 _instance: crate::vk::Instance,
12770 _p_create_info: *const Win32SurfaceCreateInfoKHR<'_>,
12771 _p_allocator: *const AllocationCallbacks<'_>,
12772 _p_surface: *mut SurfaceKHR,
12773 ) -> Result {
12774 panic!(concat!(
12775 "Unable to load ",
12776 stringify!(create_win32_surface_khr)
12777 ))
12778 }
12779 let val = _f(c"vkCreateWin32SurfaceKHR");
12780 if val.is_null() {
12781 create_win32_surface_khr
12782 } else {
12783 ::core::mem::transmute::<*const c_void, PFN_vkCreateWin32SurfaceKHR>(
12784 val,
12785 )
12786 }
12787 },
12788 get_physical_device_win32_presentation_support_khr: unsafe {
12789 unsafe extern "system" fn get_physical_device_win32_presentation_support_khr(
12790 _physical_device: PhysicalDevice,
12791 _queue_family_index: u32,
12792 ) -> Bool32 {
12793 panic!(concat!(
12794 "Unable to load ",
12795 stringify!(get_physical_device_win32_presentation_support_khr)
12796 ))
12797 }
12798 let val = _f(c"vkGetPhysicalDeviceWin32PresentationSupportKHR");
12799 if val.is_null() {
12800 get_physical_device_win32_presentation_support_khr
12801 } else {
12802 ::core::mem::transmute::<
12803 *const c_void,
12804 PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR,
12805 >(val)
12806 }
12807 },
12808 }
12809 }
12810 }
12811 }
12812 #[doc = "VK_KHR_sampler_mirror_clamp_to_edge"]
12813 pub mod sampler_mirror_clamp_to_edge {
12814 use super::super::*;
12815 pub use {
12816 crate::vk::KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME as NAME,
12817 crate::vk::KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION as SPEC_VERSION,
12818 };
12819 }
12820 #[doc = "VK_KHR_video_queue"]
12821 pub mod video_queue {
12822 use super::super::*;
12823 pub use {
12824 crate::vk::KHR_VIDEO_QUEUE_EXTENSION_NAME as NAME,
12825 crate::vk::KHR_VIDEO_QUEUE_SPEC_VERSION as SPEC_VERSION,
12826 };
12827 #[doc = "VK_KHR_video_queue instance-level functions"]
12828 #[derive(Clone)]
12829 pub struct Instance {
12830 pub(crate) fp: InstanceFn,
12831 pub(crate) handle: crate::vk::Instance,
12832 }
12833 impl Instance {
12834 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
12835 let handle = instance.handle();
12836 let fp = InstanceFn::load(|name| unsafe {
12837 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12838 entry.get_instance_proc_addr(handle, name.as_ptr()),
12839 )
12840 });
12841 Self { handle, fp }
12842 }
12843 #[inline]
12844 pub fn fp(&self) -> &InstanceFn {
12845 &self.fp
12846 }
12847 #[inline]
12848 pub fn instance(&self) -> crate::vk::Instance {
12849 self.handle
12850 }
12851 }
12852 #[derive(Clone)]
12853 #[doc = "Raw VK_KHR_video_queue instance-level function pointers"]
12854 pub struct InstanceFn {
12855 pub get_physical_device_video_capabilities_khr:
12856 PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR,
12857 pub get_physical_device_video_format_properties_khr:
12858 PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR,
12859 }
12860 unsafe impl Send for InstanceFn {}
12861 unsafe impl Sync for InstanceFn {}
12862 impl InstanceFn {
12863 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12864 Self::load_erased(&mut f)
12865 }
12866 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12867 Self {
12868 get_physical_device_video_capabilities_khr: unsafe {
12869 unsafe extern "system" fn get_physical_device_video_capabilities_khr(
12870 _physical_device: PhysicalDevice,
12871 _p_video_profile: *const VideoProfileInfoKHR<'_>,
12872 _p_capabilities: *mut VideoCapabilitiesKHR<'_>,
12873 ) -> Result {
12874 panic!(concat!(
12875 "Unable to load ",
12876 stringify!(get_physical_device_video_capabilities_khr)
12877 ))
12878 }
12879 let val = _f(c"vkGetPhysicalDeviceVideoCapabilitiesKHR");
12880 if val.is_null() {
12881 get_physical_device_video_capabilities_khr
12882 } else {
12883 ::core::mem::transmute::<
12884 *const c_void,
12885 PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR,
12886 >(val)
12887 }
12888 },
12889 get_physical_device_video_format_properties_khr: unsafe {
12890 unsafe extern "system" fn get_physical_device_video_format_properties_khr(
12891 _physical_device: PhysicalDevice,
12892 _p_video_format_info: *const PhysicalDeviceVideoFormatInfoKHR<'_>,
12893 _p_video_format_property_count: *mut u32,
12894 _p_video_format_properties: *mut VideoFormatPropertiesKHR<'_>,
12895 ) -> Result {
12896 panic!(concat!(
12897 "Unable to load ",
12898 stringify!(get_physical_device_video_format_properties_khr)
12899 ))
12900 }
12901 let val = _f(c"vkGetPhysicalDeviceVideoFormatPropertiesKHR");
12902 if val.is_null() {
12903 get_physical_device_video_format_properties_khr
12904 } else {
12905 ::core::mem::transmute::<
12906 *const c_void,
12907 PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR,
12908 >(val)
12909 }
12910 },
12911 }
12912 }
12913 }
12914 #[doc = "VK_KHR_video_queue device-level functions"]
12915 #[derive(Clone)]
12916 pub struct Device {
12917 pub(crate) fp: DeviceFn,
12918 pub(crate) handle: crate::vk::Device,
12919 }
12920 impl Device {
12921 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
12922 let handle = device.handle();
12923 let fp = DeviceFn::load(|name| unsafe {
12924 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
12925 instance.get_device_proc_addr(handle, name.as_ptr()),
12926 )
12927 });
12928 Self { handle, fp }
12929 }
12930 #[inline]
12931 pub fn fp(&self) -> &DeviceFn {
12932 &self.fp
12933 }
12934 #[inline]
12935 pub fn device(&self) -> crate::vk::Device {
12936 self.handle
12937 }
12938 }
12939 #[derive(Clone)]
12940 #[doc = "Raw VK_KHR_video_queue device-level function pointers"]
12941 pub struct DeviceFn {
12942 pub create_video_session_khr: PFN_vkCreateVideoSessionKHR,
12943 pub destroy_video_session_khr: PFN_vkDestroyVideoSessionKHR,
12944 pub get_video_session_memory_requirements_khr:
12945 PFN_vkGetVideoSessionMemoryRequirementsKHR,
12946 pub bind_video_session_memory_khr: PFN_vkBindVideoSessionMemoryKHR,
12947 pub create_video_session_parameters_khr: PFN_vkCreateVideoSessionParametersKHR,
12948 pub update_video_session_parameters_khr: PFN_vkUpdateVideoSessionParametersKHR,
12949 pub destroy_video_session_parameters_khr: PFN_vkDestroyVideoSessionParametersKHR,
12950 pub cmd_begin_video_coding_khr: PFN_vkCmdBeginVideoCodingKHR,
12951 pub cmd_end_video_coding_khr: PFN_vkCmdEndVideoCodingKHR,
12952 pub cmd_control_video_coding_khr: PFN_vkCmdControlVideoCodingKHR,
12953 }
12954 unsafe impl Send for DeviceFn {}
12955 unsafe impl Sync for DeviceFn {}
12956 impl DeviceFn {
12957 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
12958 Self::load_erased(&mut f)
12959 }
12960 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
12961 Self {
12962 create_video_session_khr: unsafe {
12963 unsafe extern "system" fn create_video_session_khr(
12964 _device: crate::vk::Device,
12965 _p_create_info: *const VideoSessionCreateInfoKHR<'_>,
12966 _p_allocator: *const AllocationCallbacks<'_>,
12967 _p_video_session: *mut VideoSessionKHR,
12968 ) -> Result {
12969 panic!(concat!(
12970 "Unable to load ",
12971 stringify!(create_video_session_khr)
12972 ))
12973 }
12974 let val = _f(c"vkCreateVideoSessionKHR");
12975 if val.is_null() {
12976 create_video_session_khr
12977 } else {
12978 ::core::mem::transmute::<*const c_void, PFN_vkCreateVideoSessionKHR>(
12979 val,
12980 )
12981 }
12982 },
12983 destroy_video_session_khr: unsafe {
12984 unsafe extern "system" fn destroy_video_session_khr(
12985 _device: crate::vk::Device,
12986 _video_session: VideoSessionKHR,
12987 _p_allocator: *const AllocationCallbacks<'_>,
12988 ) {
12989 panic!(concat!(
12990 "Unable to load ",
12991 stringify!(destroy_video_session_khr)
12992 ))
12993 }
12994 let val = _f(c"vkDestroyVideoSessionKHR");
12995 if val.is_null() {
12996 destroy_video_session_khr
12997 } else {
12998 ::core::mem::transmute::<*const c_void, PFN_vkDestroyVideoSessionKHR>(
12999 val,
13000 )
13001 }
13002 },
13003 get_video_session_memory_requirements_khr: unsafe {
13004 unsafe extern "system" fn get_video_session_memory_requirements_khr(
13005 _device: crate::vk::Device,
13006 _video_session: VideoSessionKHR,
13007 _p_memory_requirements_count: *mut u32,
13008 _p_memory_requirements: *mut VideoSessionMemoryRequirementsKHR<'_>,
13009 ) -> Result {
13010 panic!(concat!(
13011 "Unable to load ",
13012 stringify!(get_video_session_memory_requirements_khr)
13013 ))
13014 }
13015 let val = _f(c"vkGetVideoSessionMemoryRequirementsKHR");
13016 if val.is_null() {
13017 get_video_session_memory_requirements_khr
13018 } else {
13019 ::core::mem::transmute::<
13020 *const c_void,
13021 PFN_vkGetVideoSessionMemoryRequirementsKHR,
13022 >(val)
13023 }
13024 },
13025 bind_video_session_memory_khr: unsafe {
13026 unsafe extern "system" fn bind_video_session_memory_khr(
13027 _device: crate::vk::Device,
13028 _video_session: VideoSessionKHR,
13029 _bind_session_memory_info_count: u32,
13030 _p_bind_session_memory_infos: *const BindVideoSessionMemoryInfoKHR<'_>,
13031 ) -> Result {
13032 panic!(concat!(
13033 "Unable to load ",
13034 stringify!(bind_video_session_memory_khr)
13035 ))
13036 }
13037 let val = _f(c"vkBindVideoSessionMemoryKHR");
13038 if val.is_null() {
13039 bind_video_session_memory_khr
13040 } else {
13041 ::core::mem::transmute::<*const c_void, PFN_vkBindVideoSessionMemoryKHR>(
13042 val,
13043 )
13044 }
13045 },
13046 create_video_session_parameters_khr: unsafe {
13047 unsafe extern "system" fn create_video_session_parameters_khr(
13048 _device: crate::vk::Device,
13049 _p_create_info: *const VideoSessionParametersCreateInfoKHR<'_>,
13050 _p_allocator: *const AllocationCallbacks<'_>,
13051 _p_video_session_parameters: *mut VideoSessionParametersKHR,
13052 ) -> Result {
13053 panic!(concat!(
13054 "Unable to load ",
13055 stringify!(create_video_session_parameters_khr)
13056 ))
13057 }
13058 let val = _f(c"vkCreateVideoSessionParametersKHR");
13059 if val.is_null() {
13060 create_video_session_parameters_khr
13061 } else {
13062 ::core::mem::transmute::<
13063 *const c_void,
13064 PFN_vkCreateVideoSessionParametersKHR,
13065 >(val)
13066 }
13067 },
13068 update_video_session_parameters_khr: unsafe {
13069 unsafe extern "system" fn update_video_session_parameters_khr(
13070 _device: crate::vk::Device,
13071 _video_session_parameters: VideoSessionParametersKHR,
13072 _p_update_info: *const VideoSessionParametersUpdateInfoKHR<'_>,
13073 ) -> Result {
13074 panic!(concat!(
13075 "Unable to load ",
13076 stringify!(update_video_session_parameters_khr)
13077 ))
13078 }
13079 let val = _f(c"vkUpdateVideoSessionParametersKHR");
13080 if val.is_null() {
13081 update_video_session_parameters_khr
13082 } else {
13083 ::core::mem::transmute::<
13084 *const c_void,
13085 PFN_vkUpdateVideoSessionParametersKHR,
13086 >(val)
13087 }
13088 },
13089 destroy_video_session_parameters_khr: unsafe {
13090 unsafe extern "system" fn destroy_video_session_parameters_khr(
13091 _device: crate::vk::Device,
13092 _video_session_parameters: VideoSessionParametersKHR,
13093 _p_allocator: *const AllocationCallbacks<'_>,
13094 ) {
13095 panic!(concat!(
13096 "Unable to load ",
13097 stringify!(destroy_video_session_parameters_khr)
13098 ))
13099 }
13100 let val = _f(c"vkDestroyVideoSessionParametersKHR");
13101 if val.is_null() {
13102 destroy_video_session_parameters_khr
13103 } else {
13104 ::core::mem::transmute::<
13105 *const c_void,
13106 PFN_vkDestroyVideoSessionParametersKHR,
13107 >(val)
13108 }
13109 },
13110 cmd_begin_video_coding_khr: unsafe {
13111 unsafe extern "system" fn cmd_begin_video_coding_khr(
13112 _command_buffer: CommandBuffer,
13113 _p_begin_info: *const VideoBeginCodingInfoKHR<'_>,
13114 ) {
13115 panic!(concat!(
13116 "Unable to load ",
13117 stringify!(cmd_begin_video_coding_khr)
13118 ))
13119 }
13120 let val = _f(c"vkCmdBeginVideoCodingKHR");
13121 if val.is_null() {
13122 cmd_begin_video_coding_khr
13123 } else {
13124 ::core::mem::transmute::<*const c_void, PFN_vkCmdBeginVideoCodingKHR>(
13125 val,
13126 )
13127 }
13128 },
13129 cmd_end_video_coding_khr: unsafe {
13130 unsafe extern "system" fn cmd_end_video_coding_khr(
13131 _command_buffer: CommandBuffer,
13132 _p_end_coding_info: *const VideoEndCodingInfoKHR<'_>,
13133 ) {
13134 panic!(concat!(
13135 "Unable to load ",
13136 stringify!(cmd_end_video_coding_khr)
13137 ))
13138 }
13139 let val = _f(c"vkCmdEndVideoCodingKHR");
13140 if val.is_null() {
13141 cmd_end_video_coding_khr
13142 } else {
13143 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndVideoCodingKHR>(val)
13144 }
13145 },
13146 cmd_control_video_coding_khr: unsafe {
13147 unsafe extern "system" fn cmd_control_video_coding_khr(
13148 _command_buffer: CommandBuffer,
13149 _p_coding_control_info: *const VideoCodingControlInfoKHR<'_>,
13150 ) {
13151 panic!(concat!(
13152 "Unable to load ",
13153 stringify!(cmd_control_video_coding_khr)
13154 ))
13155 }
13156 let val = _f(c"vkCmdControlVideoCodingKHR");
13157 if val.is_null() {
13158 cmd_control_video_coding_khr
13159 } else {
13160 ::core::mem::transmute::<*const c_void, PFN_vkCmdControlVideoCodingKHR>(
13161 val,
13162 )
13163 }
13164 },
13165 }
13166 }
13167 }
13168 }
13169 #[doc = "VK_KHR_video_decode_queue"]
13170 pub mod video_decode_queue {
13171 use super::super::*;
13172 pub use {
13173 crate::vk::KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME as NAME,
13174 crate::vk::KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION as SPEC_VERSION,
13175 };
13176 #[doc = "VK_KHR_video_decode_queue device-level functions"]
13177 #[derive(Clone)]
13178 pub struct Device {
13179 pub(crate) fp: DeviceFn,
13180 pub(crate) handle: crate::vk::Device,
13181 }
13182 impl Device {
13183 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
13184 let handle = device.handle();
13185 let fp = DeviceFn::load(|name| unsafe {
13186 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13187 instance.get_device_proc_addr(handle, name.as_ptr()),
13188 )
13189 });
13190 Self { handle, fp }
13191 }
13192 #[inline]
13193 pub fn fp(&self) -> &DeviceFn {
13194 &self.fp
13195 }
13196 #[inline]
13197 pub fn device(&self) -> crate::vk::Device {
13198 self.handle
13199 }
13200 }
13201 #[derive(Clone)]
13202 #[doc = "Raw VK_KHR_video_decode_queue device-level function pointers"]
13203 pub struct DeviceFn {
13204 pub cmd_decode_video_khr: PFN_vkCmdDecodeVideoKHR,
13205 }
13206 unsafe impl Send for DeviceFn {}
13207 unsafe impl Sync for DeviceFn {}
13208 impl DeviceFn {
13209 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13210 Self::load_erased(&mut f)
13211 }
13212 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13213 Self {
13214 cmd_decode_video_khr: unsafe {
13215 unsafe extern "system" fn cmd_decode_video_khr(
13216 _command_buffer: CommandBuffer,
13217 _p_decode_info: *const VideoDecodeInfoKHR<'_>,
13218 ) {
13219 panic!(concat!("Unable to load ", stringify!(cmd_decode_video_khr)))
13220 }
13221 let val = _f(c"vkCmdDecodeVideoKHR");
13222 if val.is_null() {
13223 cmd_decode_video_khr
13224 } else {
13225 ::core::mem::transmute::<*const c_void, PFN_vkCmdDecodeVideoKHR>(val)
13226 }
13227 },
13228 }
13229 }
13230 }
13231 }
13232 #[doc = "VK_KHR_video_encode_h264"]
13233 pub mod video_encode_h264 {
13234 use super::super::*;
13235 pub use {
13236 crate::vk::KHR_VIDEO_ENCODE_H264_EXTENSION_NAME as NAME,
13237 crate::vk::KHR_VIDEO_ENCODE_H264_SPEC_VERSION as SPEC_VERSION,
13238 };
13239 }
13240 #[doc = "VK_KHR_video_encode_h265"]
13241 pub mod video_encode_h265 {
13242 use super::super::*;
13243 pub use {
13244 crate::vk::KHR_VIDEO_ENCODE_H265_EXTENSION_NAME as NAME,
13245 crate::vk::KHR_VIDEO_ENCODE_H265_SPEC_VERSION as SPEC_VERSION,
13246 };
13247 }
13248 #[doc = "VK_KHR_video_decode_h264"]
13249 pub mod video_decode_h264 {
13250 use super::super::*;
13251 pub use {
13252 crate::vk::KHR_VIDEO_DECODE_H264_EXTENSION_NAME as NAME,
13253 crate::vk::KHR_VIDEO_DECODE_H264_SPEC_VERSION as SPEC_VERSION,
13254 };
13255 }
13256 #[doc = "VK_KHR_dynamic_rendering"]
13257 pub mod dynamic_rendering {
13258 use super::super::*;
13259 pub use {
13260 crate::vk::KHR_DYNAMIC_RENDERING_EXTENSION_NAME as NAME,
13261 crate::vk::KHR_DYNAMIC_RENDERING_SPEC_VERSION as SPEC_VERSION,
13262 };
13263 #[doc = "VK_KHR_dynamic_rendering device-level functions"]
13264 #[derive(Clone)]
13265 pub struct Device {
13266 pub(crate) fp: DeviceFn,
13267 pub(crate) handle: crate::vk::Device,
13268 }
13269 impl Device {
13270 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
13271 let handle = device.handle();
13272 let fp = DeviceFn::load(|name| unsafe {
13273 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13274 instance.get_device_proc_addr(handle, name.as_ptr()),
13275 )
13276 });
13277 Self { handle, fp }
13278 }
13279 #[inline]
13280 pub fn fp(&self) -> &DeviceFn {
13281 &self.fp
13282 }
13283 #[inline]
13284 pub fn device(&self) -> crate::vk::Device {
13285 self.handle
13286 }
13287 }
13288 #[derive(Clone)]
13289 #[doc = "Raw VK_KHR_dynamic_rendering device-level function pointers"]
13290 pub struct DeviceFn {
13291 pub cmd_begin_rendering_khr: PFN_vkCmdBeginRendering,
13292 pub cmd_end_rendering_khr: PFN_vkCmdEndRendering,
13293 }
13294 unsafe impl Send for DeviceFn {}
13295 unsafe impl Sync for DeviceFn {}
13296 impl DeviceFn {
13297 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13298 Self::load_erased(&mut f)
13299 }
13300 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13301 Self {
13302 cmd_begin_rendering_khr: unsafe {
13303 unsafe extern "system" fn cmd_begin_rendering_khr(
13304 _command_buffer: CommandBuffer,
13305 _p_rendering_info: *const RenderingInfo<'_>,
13306 ) {
13307 panic!(concat!(
13308 "Unable to load ",
13309 stringify!(cmd_begin_rendering_khr)
13310 ))
13311 }
13312 let val = _f(c"vkCmdBeginRenderingKHR");
13313 if val.is_null() {
13314 cmd_begin_rendering_khr
13315 } else {
13316 ::core::mem::transmute::<*const c_void, PFN_vkCmdBeginRendering>(val)
13317 }
13318 },
13319 cmd_end_rendering_khr: unsafe {
13320 unsafe extern "system" fn cmd_end_rendering_khr(
13321 _command_buffer: CommandBuffer,
13322 ) {
13323 panic!(concat!(
13324 "Unable to load ",
13325 stringify!(cmd_end_rendering_khr)
13326 ))
13327 }
13328 let val = _f(c"vkCmdEndRenderingKHR");
13329 if val.is_null() {
13330 cmd_end_rendering_khr
13331 } else {
13332 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndRendering>(val)
13333 }
13334 },
13335 }
13336 }
13337 }
13338 }
13339 #[doc = "VK_KHR_multiview"]
13340 pub mod multiview {
13341 use super::super::*;
13342 pub use {
13343 crate::vk::KHR_MULTIVIEW_EXTENSION_NAME as NAME,
13344 crate::vk::KHR_MULTIVIEW_SPEC_VERSION as SPEC_VERSION,
13345 };
13346 }
13347 #[doc = "VK_KHR_get_physical_device_properties2"]
13348 pub mod get_physical_device_properties2 {
13349 use super::super::*;
13350 pub use {
13351 crate::vk::KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME as NAME,
13352 crate::vk::KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION as SPEC_VERSION,
13353 };
13354 #[doc = "VK_KHR_get_physical_device_properties2 instance-level functions"]
13355 #[derive(Clone)]
13356 pub struct Instance {
13357 pub(crate) fp: InstanceFn,
13358 pub(crate) handle: crate::vk::Instance,
13359 }
13360 impl Instance {
13361 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
13362 let handle = instance.handle();
13363 let fp = InstanceFn::load(|name| unsafe {
13364 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13365 entry.get_instance_proc_addr(handle, name.as_ptr()),
13366 )
13367 });
13368 Self { handle, fp }
13369 }
13370 #[inline]
13371 pub fn fp(&self) -> &InstanceFn {
13372 &self.fp
13373 }
13374 #[inline]
13375 pub fn instance(&self) -> crate::vk::Instance {
13376 self.handle
13377 }
13378 }
13379 #[derive(Clone)]
13380 #[doc = "Raw VK_KHR_get_physical_device_properties2 instance-level function pointers"]
13381 pub struct InstanceFn {
13382 pub get_physical_device_features2_khr: PFN_vkGetPhysicalDeviceFeatures2,
13383 pub get_physical_device_properties2_khr: PFN_vkGetPhysicalDeviceProperties2,
13384 pub get_physical_device_format_properties2_khr:
13385 PFN_vkGetPhysicalDeviceFormatProperties2,
13386 pub get_physical_device_image_format_properties2_khr:
13387 PFN_vkGetPhysicalDeviceImageFormatProperties2,
13388 pub get_physical_device_queue_family_properties2_khr:
13389 PFN_vkGetPhysicalDeviceQueueFamilyProperties2,
13390 pub get_physical_device_memory_properties2_khr:
13391 PFN_vkGetPhysicalDeviceMemoryProperties2,
13392 pub get_physical_device_sparse_image_format_properties2_khr:
13393 PFN_vkGetPhysicalDeviceSparseImageFormatProperties2,
13394 }
13395 unsafe impl Send for InstanceFn {}
13396 unsafe impl Sync for InstanceFn {}
13397 impl InstanceFn {
13398 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13399 Self::load_erased(&mut f)
13400 }
13401 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13402 Self {
13403 get_physical_device_features2_khr: unsafe {
13404 unsafe extern "system" fn get_physical_device_features2_khr(
13405 _physical_device: PhysicalDevice,
13406 _p_features: *mut PhysicalDeviceFeatures2<'_>,
13407 ) {
13408 panic!(concat!(
13409 "Unable to load ",
13410 stringify!(get_physical_device_features2_khr)
13411 ))
13412 }
13413 let val = _f(c"vkGetPhysicalDeviceFeatures2KHR");
13414 if val.is_null() {
13415 get_physical_device_features2_khr
13416 } else {
13417 ::core::mem::transmute::<*const c_void, PFN_vkGetPhysicalDeviceFeatures2>(
13418 val,
13419 )
13420 }
13421 },
13422 get_physical_device_properties2_khr: unsafe {
13423 unsafe extern "system" fn get_physical_device_properties2_khr(
13424 _physical_device: PhysicalDevice,
13425 _p_properties: *mut PhysicalDeviceProperties2<'_>,
13426 ) {
13427 panic!(concat!(
13428 "Unable to load ",
13429 stringify!(get_physical_device_properties2_khr)
13430 ))
13431 }
13432 let val = _f(c"vkGetPhysicalDeviceProperties2KHR");
13433 if val.is_null() {
13434 get_physical_device_properties2_khr
13435 } else {
13436 ::core::mem::transmute::<
13437 *const c_void,
13438 PFN_vkGetPhysicalDeviceProperties2,
13439 >(val)
13440 }
13441 },
13442 get_physical_device_format_properties2_khr: unsafe {
13443 unsafe extern "system" fn get_physical_device_format_properties2_khr(
13444 _physical_device: PhysicalDevice,
13445 _format: Format,
13446 _p_format_properties: *mut FormatProperties2<'_>,
13447 ) {
13448 panic!(concat!(
13449 "Unable to load ",
13450 stringify!(get_physical_device_format_properties2_khr)
13451 ))
13452 }
13453 let val = _f(c"vkGetPhysicalDeviceFormatProperties2KHR");
13454 if val.is_null() {
13455 get_physical_device_format_properties2_khr
13456 } else {
13457 ::core::mem::transmute::<
13458 *const c_void,
13459 PFN_vkGetPhysicalDeviceFormatProperties2,
13460 >(val)
13461 }
13462 },
13463 get_physical_device_image_format_properties2_khr: unsafe {
13464 unsafe extern "system" fn get_physical_device_image_format_properties2_khr(
13465 _physical_device: PhysicalDevice,
13466 _p_image_format_info: *const PhysicalDeviceImageFormatInfo2<'_>,
13467 _p_image_format_properties: *mut ImageFormatProperties2<'_>,
13468 ) -> Result {
13469 panic!(concat!(
13470 "Unable to load ",
13471 stringify!(get_physical_device_image_format_properties2_khr)
13472 ))
13473 }
13474 let val = _f(c"vkGetPhysicalDeviceImageFormatProperties2KHR");
13475 if val.is_null() {
13476 get_physical_device_image_format_properties2_khr
13477 } else {
13478 ::core::mem::transmute::<
13479 *const c_void,
13480 PFN_vkGetPhysicalDeviceImageFormatProperties2,
13481 >(val)
13482 }
13483 },
13484 get_physical_device_queue_family_properties2_khr: unsafe {
13485 unsafe extern "system" fn get_physical_device_queue_family_properties2_khr(
13486 _physical_device: PhysicalDevice,
13487 _p_queue_family_property_count: *mut u32,
13488 _p_queue_family_properties: *mut QueueFamilyProperties2<'_>,
13489 ) {
13490 panic!(concat!(
13491 "Unable to load ",
13492 stringify!(get_physical_device_queue_family_properties2_khr)
13493 ))
13494 }
13495 let val = _f(c"vkGetPhysicalDeviceQueueFamilyProperties2KHR");
13496 if val.is_null() {
13497 get_physical_device_queue_family_properties2_khr
13498 } else {
13499 ::core::mem::transmute::<
13500 *const c_void,
13501 PFN_vkGetPhysicalDeviceQueueFamilyProperties2,
13502 >(val)
13503 }
13504 },
13505 get_physical_device_memory_properties2_khr: unsafe {
13506 unsafe extern "system" fn get_physical_device_memory_properties2_khr(
13507 _physical_device: PhysicalDevice,
13508 _p_memory_properties: *mut PhysicalDeviceMemoryProperties2<'_>,
13509 ) {
13510 panic!(concat!(
13511 "Unable to load ",
13512 stringify!(get_physical_device_memory_properties2_khr)
13513 ))
13514 }
13515 let val = _f(c"vkGetPhysicalDeviceMemoryProperties2KHR");
13516 if val.is_null() {
13517 get_physical_device_memory_properties2_khr
13518 } else {
13519 ::core::mem::transmute::<
13520 *const c_void,
13521 PFN_vkGetPhysicalDeviceMemoryProperties2,
13522 >(val)
13523 }
13524 },
13525 get_physical_device_sparse_image_format_properties2_khr: unsafe {
13526 unsafe extern "system" fn get_physical_device_sparse_image_format_properties2_khr(
13527 _physical_device: PhysicalDevice,
13528 _p_format_info: *const PhysicalDeviceSparseImageFormatInfo2<'_>,
13529 _p_property_count: *mut u32,
13530 _p_properties: *mut SparseImageFormatProperties2<'_>,
13531 ) {
13532 panic!(concat!(
13533 "Unable to load ",
13534 stringify!(get_physical_device_sparse_image_format_properties2_khr)
13535 ))
13536 }
13537 let val = _f(c"vkGetPhysicalDeviceSparseImageFormatProperties2KHR");
13538 if val.is_null() {
13539 get_physical_device_sparse_image_format_properties2_khr
13540 } else {
13541 ::core::mem::transmute::<
13542 *const c_void,
13543 PFN_vkGetPhysicalDeviceSparseImageFormatProperties2,
13544 >(val)
13545 }
13546 },
13547 }
13548 }
13549 }
13550 }
13551 #[doc = "VK_KHR_device_group"]
13552 pub mod device_group {
13553 use super::super::*;
13554 pub use {
13555 crate::vk::KHR_DEVICE_GROUP_EXTENSION_NAME as NAME,
13556 crate::vk::KHR_DEVICE_GROUP_SPEC_VERSION as SPEC_VERSION,
13557 };
13558 #[doc = "VK_KHR_device_group instance-level functions"]
13559 #[derive(Clone)]
13560 pub struct Instance {
13561 pub(crate) fp: InstanceFn,
13562 pub(crate) handle: crate::vk::Instance,
13563 }
13564 impl Instance {
13565 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
13566 let handle = instance.handle();
13567 let fp = InstanceFn::load(|name| unsafe {
13568 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13569 entry.get_instance_proc_addr(handle, name.as_ptr()),
13570 )
13571 });
13572 Self { handle, fp }
13573 }
13574 #[inline]
13575 pub fn fp(&self) -> &InstanceFn {
13576 &self.fp
13577 }
13578 #[inline]
13579 pub fn instance(&self) -> crate::vk::Instance {
13580 self.handle
13581 }
13582 }
13583 #[derive(Clone)]
13584 #[doc = "Raw VK_KHR_device_group instance-level function pointers"]
13585 pub struct InstanceFn {
13586 pub get_physical_device_present_rectangles_khr:
13587 PFN_vkGetPhysicalDevicePresentRectanglesKHR,
13588 }
13589 unsafe impl Send for InstanceFn {}
13590 unsafe impl Sync for InstanceFn {}
13591 impl InstanceFn {
13592 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13593 Self::load_erased(&mut f)
13594 }
13595 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13596 Self {
13597 get_physical_device_present_rectangles_khr: unsafe {
13598 unsafe extern "system" fn get_physical_device_present_rectangles_khr(
13599 _physical_device: PhysicalDevice,
13600 _surface: SurfaceKHR,
13601 _p_rect_count: *mut u32,
13602 _p_rects: *mut Rect2D,
13603 ) -> Result {
13604 panic!(concat!(
13605 "Unable to load ",
13606 stringify!(get_physical_device_present_rectangles_khr)
13607 ))
13608 }
13609 let val = _f(c"vkGetPhysicalDevicePresentRectanglesKHR");
13610 if val.is_null() {
13611 get_physical_device_present_rectangles_khr
13612 } else {
13613 ::core::mem::transmute::<
13614 *const c_void,
13615 PFN_vkGetPhysicalDevicePresentRectanglesKHR,
13616 >(val)
13617 }
13618 },
13619 }
13620 }
13621 }
13622 #[doc = "VK_KHR_device_group device-level functions"]
13623 #[derive(Clone)]
13624 pub struct Device {
13625 pub(crate) fp: DeviceFn,
13626 pub(crate) handle: crate::vk::Device,
13627 }
13628 impl Device {
13629 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
13630 let handle = device.handle();
13631 let fp = DeviceFn::load(|name| unsafe {
13632 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13633 instance.get_device_proc_addr(handle, name.as_ptr()),
13634 )
13635 });
13636 Self { handle, fp }
13637 }
13638 #[inline]
13639 pub fn fp(&self) -> &DeviceFn {
13640 &self.fp
13641 }
13642 #[inline]
13643 pub fn device(&self) -> crate::vk::Device {
13644 self.handle
13645 }
13646 }
13647 #[derive(Clone)]
13648 #[doc = "Raw VK_KHR_device_group device-level function pointers"]
13649 pub struct DeviceFn {
13650 pub get_device_group_peer_memory_features_khr: PFN_vkGetDeviceGroupPeerMemoryFeatures,
13651 pub cmd_set_device_mask_khr: PFN_vkCmdSetDeviceMask,
13652 pub cmd_dispatch_base_khr: PFN_vkCmdDispatchBase,
13653 pub get_device_group_present_capabilities_khr:
13654 PFN_vkGetDeviceGroupPresentCapabilitiesKHR,
13655 pub get_device_group_surface_present_modes_khr:
13656 PFN_vkGetDeviceGroupSurfacePresentModesKHR,
13657 pub acquire_next_image2_khr: PFN_vkAcquireNextImage2KHR,
13658 }
13659 unsafe impl Send for DeviceFn {}
13660 unsafe impl Sync for DeviceFn {}
13661 impl DeviceFn {
13662 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13663 Self::load_erased(&mut f)
13664 }
13665 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13666 Self {
13667 get_device_group_peer_memory_features_khr: unsafe {
13668 unsafe extern "system" fn get_device_group_peer_memory_features_khr(
13669 _device: crate::vk::Device,
13670 _heap_index: u32,
13671 _local_device_index: u32,
13672 _remote_device_index: u32,
13673 _p_peer_memory_features: *mut PeerMemoryFeatureFlags,
13674 ) {
13675 panic!(concat!(
13676 "Unable to load ",
13677 stringify!(get_device_group_peer_memory_features_khr)
13678 ))
13679 }
13680 let val = _f(c"vkGetDeviceGroupPeerMemoryFeaturesKHR");
13681 if val.is_null() {
13682 get_device_group_peer_memory_features_khr
13683 } else {
13684 ::core::mem::transmute::<
13685 *const c_void,
13686 PFN_vkGetDeviceGroupPeerMemoryFeatures,
13687 >(val)
13688 }
13689 },
13690 cmd_set_device_mask_khr: unsafe {
13691 unsafe extern "system" fn cmd_set_device_mask_khr(
13692 _command_buffer: CommandBuffer,
13693 _device_mask: u32,
13694 ) {
13695 panic!(concat!(
13696 "Unable to load ",
13697 stringify!(cmd_set_device_mask_khr)
13698 ))
13699 }
13700 let val = _f(c"vkCmdSetDeviceMaskKHR");
13701 if val.is_null() {
13702 cmd_set_device_mask_khr
13703 } else {
13704 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetDeviceMask>(val)
13705 }
13706 },
13707 cmd_dispatch_base_khr: unsafe {
13708 unsafe extern "system" fn cmd_dispatch_base_khr(
13709 _command_buffer: CommandBuffer,
13710 _base_group_x: u32,
13711 _base_group_y: u32,
13712 _base_group_z: u32,
13713 _group_count_x: u32,
13714 _group_count_y: u32,
13715 _group_count_z: u32,
13716 ) {
13717 panic!(concat!(
13718 "Unable to load ",
13719 stringify!(cmd_dispatch_base_khr)
13720 ))
13721 }
13722 let val = _f(c"vkCmdDispatchBaseKHR");
13723 if val.is_null() {
13724 cmd_dispatch_base_khr
13725 } else {
13726 ::core::mem::transmute::<*const c_void, PFN_vkCmdDispatchBase>(val)
13727 }
13728 },
13729 get_device_group_present_capabilities_khr: unsafe {
13730 unsafe extern "system" fn get_device_group_present_capabilities_khr(
13731 _device: crate::vk::Device,
13732 _p_device_group_present_capabilities : * mut DeviceGroupPresentCapabilitiesKHR < '_ >,
13733 ) -> Result {
13734 panic!(concat!(
13735 "Unable to load ",
13736 stringify!(get_device_group_present_capabilities_khr)
13737 ))
13738 }
13739 let val = _f(c"vkGetDeviceGroupPresentCapabilitiesKHR");
13740 if val.is_null() {
13741 get_device_group_present_capabilities_khr
13742 } else {
13743 ::core::mem::transmute::<
13744 *const c_void,
13745 PFN_vkGetDeviceGroupPresentCapabilitiesKHR,
13746 >(val)
13747 }
13748 },
13749 get_device_group_surface_present_modes_khr: unsafe {
13750 unsafe extern "system" fn get_device_group_surface_present_modes_khr(
13751 _device: crate::vk::Device,
13752 _surface: SurfaceKHR,
13753 _p_modes: *mut DeviceGroupPresentModeFlagsKHR,
13754 ) -> Result {
13755 panic!(concat!(
13756 "Unable to load ",
13757 stringify!(get_device_group_surface_present_modes_khr)
13758 ))
13759 }
13760 let val = _f(c"vkGetDeviceGroupSurfacePresentModesKHR");
13761 if val.is_null() {
13762 get_device_group_surface_present_modes_khr
13763 } else {
13764 ::core::mem::transmute::<
13765 *const c_void,
13766 PFN_vkGetDeviceGroupSurfacePresentModesKHR,
13767 >(val)
13768 }
13769 },
13770 acquire_next_image2_khr: unsafe {
13771 unsafe extern "system" fn acquire_next_image2_khr(
13772 _device: crate::vk::Device,
13773 _p_acquire_info: *const AcquireNextImageInfoKHR<'_>,
13774 _p_image_index: *mut u32,
13775 ) -> Result {
13776 panic!(concat!(
13777 "Unable to load ",
13778 stringify!(acquire_next_image2_khr)
13779 ))
13780 }
13781 let val = _f(c"vkAcquireNextImage2KHR");
13782 if val.is_null() {
13783 acquire_next_image2_khr
13784 } else {
13785 ::core::mem::transmute::<*const c_void, PFN_vkAcquireNextImage2KHR>(val)
13786 }
13787 },
13788 }
13789 }
13790 }
13791 }
13792 #[doc = "VK_KHR_shader_draw_parameters"]
13793 pub mod shader_draw_parameters {
13794 use super::super::*;
13795 pub use {
13796 crate::vk::KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME as NAME,
13797 crate::vk::KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION as SPEC_VERSION,
13798 };
13799 }
13800 #[doc = "VK_KHR_maintenance1"]
13801 pub mod maintenance1 {
13802 use super::super::*;
13803 pub use {
13804 crate::vk::KHR_MAINTENANCE_1_EXTENSION_NAME as NAME,
13805 crate::vk::KHR_MAINTENANCE_1_SPEC_VERSION as SPEC_VERSION,
13806 };
13807 #[doc = "VK_KHR_maintenance1 device-level functions"]
13808 #[derive(Clone)]
13809 pub struct Device {
13810 pub(crate) fp: DeviceFn,
13811 pub(crate) handle: crate::vk::Device,
13812 }
13813 impl Device {
13814 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
13815 let handle = device.handle();
13816 let fp = DeviceFn::load(|name| unsafe {
13817 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13818 instance.get_device_proc_addr(handle, name.as_ptr()),
13819 )
13820 });
13821 Self { handle, fp }
13822 }
13823 #[inline]
13824 pub fn fp(&self) -> &DeviceFn {
13825 &self.fp
13826 }
13827 #[inline]
13828 pub fn device(&self) -> crate::vk::Device {
13829 self.handle
13830 }
13831 }
13832 #[derive(Clone)]
13833 #[doc = "Raw VK_KHR_maintenance1 device-level function pointers"]
13834 pub struct DeviceFn {
13835 pub trim_command_pool_khr: PFN_vkTrimCommandPool,
13836 }
13837 unsafe impl Send for DeviceFn {}
13838 unsafe impl Sync for DeviceFn {}
13839 impl DeviceFn {
13840 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13841 Self::load_erased(&mut f)
13842 }
13843 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13844 Self {
13845 trim_command_pool_khr: unsafe {
13846 unsafe extern "system" fn trim_command_pool_khr(
13847 _device: crate::vk::Device,
13848 _command_pool: CommandPool,
13849 _flags: CommandPoolTrimFlags,
13850 ) {
13851 panic!(concat!(
13852 "Unable to load ",
13853 stringify!(trim_command_pool_khr)
13854 ))
13855 }
13856 let val = _f(c"vkTrimCommandPoolKHR");
13857 if val.is_null() {
13858 trim_command_pool_khr
13859 } else {
13860 ::core::mem::transmute::<*const c_void, PFN_vkTrimCommandPool>(val)
13861 }
13862 },
13863 }
13864 }
13865 }
13866 }
13867 #[doc = "VK_KHR_device_group_creation"]
13868 pub mod device_group_creation {
13869 use super::super::*;
13870 pub use {
13871 crate::vk::KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME as NAME,
13872 crate::vk::KHR_DEVICE_GROUP_CREATION_SPEC_VERSION as SPEC_VERSION,
13873 };
13874 #[doc = "VK_KHR_device_group_creation instance-level functions"]
13875 #[derive(Clone)]
13876 pub struct Instance {
13877 pub(crate) fp: InstanceFn,
13878 pub(crate) handle: crate::vk::Instance,
13879 }
13880 impl Instance {
13881 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
13882 let handle = instance.handle();
13883 let fp = InstanceFn::load(|name| unsafe {
13884 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13885 entry.get_instance_proc_addr(handle, name.as_ptr()),
13886 )
13887 });
13888 Self { handle, fp }
13889 }
13890 #[inline]
13891 pub fn fp(&self) -> &InstanceFn {
13892 &self.fp
13893 }
13894 #[inline]
13895 pub fn instance(&self) -> crate::vk::Instance {
13896 self.handle
13897 }
13898 }
13899 #[derive(Clone)]
13900 #[doc = "Raw VK_KHR_device_group_creation instance-level function pointers"]
13901 pub struct InstanceFn {
13902 pub enumerate_physical_device_groups_khr: PFN_vkEnumeratePhysicalDeviceGroups,
13903 }
13904 unsafe impl Send for InstanceFn {}
13905 unsafe impl Sync for InstanceFn {}
13906 impl InstanceFn {
13907 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13908 Self::load_erased(&mut f)
13909 }
13910 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13911 Self {
13912 enumerate_physical_device_groups_khr: unsafe {
13913 unsafe extern "system" fn enumerate_physical_device_groups_khr(
13914 _instance: crate::vk::Instance,
13915 _p_physical_device_group_count: *mut u32,
13916 _p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties<
13917 '_,
13918 >,
13919 ) -> Result {
13920 panic!(concat!(
13921 "Unable to load ",
13922 stringify!(enumerate_physical_device_groups_khr)
13923 ))
13924 }
13925 let val = _f(c"vkEnumeratePhysicalDeviceGroupsKHR");
13926 if val.is_null() {
13927 enumerate_physical_device_groups_khr
13928 } else {
13929 ::core::mem::transmute::<
13930 *const c_void,
13931 PFN_vkEnumeratePhysicalDeviceGroups,
13932 >(val)
13933 }
13934 },
13935 }
13936 }
13937 }
13938 }
13939 #[doc = "VK_KHR_external_memory_capabilities"]
13940 pub mod external_memory_capabilities {
13941 use super::super::*;
13942 pub use {
13943 crate::vk::KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME as NAME,
13944 crate::vk::KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION as SPEC_VERSION,
13945 };
13946 #[doc = "VK_KHR_external_memory_capabilities instance-level functions"]
13947 #[derive(Clone)]
13948 pub struct Instance {
13949 pub(crate) fp: InstanceFn,
13950 pub(crate) handle: crate::vk::Instance,
13951 }
13952 impl Instance {
13953 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
13954 let handle = instance.handle();
13955 let fp = InstanceFn::load(|name| unsafe {
13956 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
13957 entry.get_instance_proc_addr(handle, name.as_ptr()),
13958 )
13959 });
13960 Self { handle, fp }
13961 }
13962 #[inline]
13963 pub fn fp(&self) -> &InstanceFn {
13964 &self.fp
13965 }
13966 #[inline]
13967 pub fn instance(&self) -> crate::vk::Instance {
13968 self.handle
13969 }
13970 }
13971 #[derive(Clone)]
13972 #[doc = "Raw VK_KHR_external_memory_capabilities instance-level function pointers"]
13973 pub struct InstanceFn {
13974 pub get_physical_device_external_buffer_properties_khr:
13975 PFN_vkGetPhysicalDeviceExternalBufferProperties,
13976 }
13977 unsafe impl Send for InstanceFn {}
13978 unsafe impl Sync for InstanceFn {}
13979 impl InstanceFn {
13980 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
13981 Self::load_erased(&mut f)
13982 }
13983 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
13984 Self {
13985 get_physical_device_external_buffer_properties_khr: unsafe {
13986 unsafe extern "system" fn get_physical_device_external_buffer_properties_khr(
13987 _physical_device: PhysicalDevice,
13988 _p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo<'_>,
13989 _p_external_buffer_properties: *mut ExternalBufferProperties<'_>,
13990 ) {
13991 panic!(concat!(
13992 "Unable to load ",
13993 stringify!(get_physical_device_external_buffer_properties_khr)
13994 ))
13995 }
13996 let val = _f(c"vkGetPhysicalDeviceExternalBufferPropertiesKHR");
13997 if val.is_null() {
13998 get_physical_device_external_buffer_properties_khr
13999 } else {
14000 ::core::mem::transmute::<
14001 *const c_void,
14002 PFN_vkGetPhysicalDeviceExternalBufferProperties,
14003 >(val)
14004 }
14005 },
14006 }
14007 }
14008 }
14009 }
14010 #[doc = "VK_KHR_external_memory"]
14011 pub mod external_memory {
14012 use super::super::*;
14013 pub use {
14014 crate::vk::KHR_EXTERNAL_MEMORY_EXTENSION_NAME as NAME,
14015 crate::vk::KHR_EXTERNAL_MEMORY_SPEC_VERSION as SPEC_VERSION,
14016 };
14017 }
14018 #[doc = "VK_KHR_external_memory_win32"]
14019 pub mod external_memory_win32 {
14020 use super::super::*;
14021 pub use {
14022 crate::vk::KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME as NAME,
14023 crate::vk::KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION as SPEC_VERSION,
14024 };
14025 #[doc = "VK_KHR_external_memory_win32 device-level functions"]
14026 #[derive(Clone)]
14027 pub struct Device {
14028 pub(crate) fp: DeviceFn,
14029 pub(crate) handle: crate::vk::Device,
14030 }
14031 impl Device {
14032 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14033 let handle = device.handle();
14034 let fp = DeviceFn::load(|name| unsafe {
14035 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14036 instance.get_device_proc_addr(handle, name.as_ptr()),
14037 )
14038 });
14039 Self { handle, fp }
14040 }
14041 #[inline]
14042 pub fn fp(&self) -> &DeviceFn {
14043 &self.fp
14044 }
14045 #[inline]
14046 pub fn device(&self) -> crate::vk::Device {
14047 self.handle
14048 }
14049 }
14050 #[derive(Clone)]
14051 #[doc = "Raw VK_KHR_external_memory_win32 device-level function pointers"]
14052 pub struct DeviceFn {
14053 pub get_memory_win32_handle_khr: PFN_vkGetMemoryWin32HandleKHR,
14054 pub get_memory_win32_handle_properties_khr: PFN_vkGetMemoryWin32HandlePropertiesKHR,
14055 }
14056 unsafe impl Send for DeviceFn {}
14057 unsafe impl Sync for DeviceFn {}
14058 impl DeviceFn {
14059 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14060 Self::load_erased(&mut f)
14061 }
14062 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14063 Self {
14064 get_memory_win32_handle_khr: unsafe {
14065 unsafe extern "system" fn get_memory_win32_handle_khr(
14066 _device: crate::vk::Device,
14067 _p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR<'_>,
14068 _p_handle: *mut HANDLE,
14069 ) -> Result {
14070 panic!(concat!(
14071 "Unable to load ",
14072 stringify!(get_memory_win32_handle_khr)
14073 ))
14074 }
14075 let val = _f(c"vkGetMemoryWin32HandleKHR");
14076 if val.is_null() {
14077 get_memory_win32_handle_khr
14078 } else {
14079 ::core::mem::transmute::<*const c_void, PFN_vkGetMemoryWin32HandleKHR>(
14080 val,
14081 )
14082 }
14083 },
14084 get_memory_win32_handle_properties_khr: unsafe {
14085 unsafe extern "system" fn get_memory_win32_handle_properties_khr(
14086 _device: crate::vk::Device,
14087 _handle_type: ExternalMemoryHandleTypeFlags,
14088 _handle: HANDLE,
14089 _p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR<
14090 '_,
14091 >,
14092 ) -> Result {
14093 panic!(concat!(
14094 "Unable to load ",
14095 stringify!(get_memory_win32_handle_properties_khr)
14096 ))
14097 }
14098 let val = _f(c"vkGetMemoryWin32HandlePropertiesKHR");
14099 if val.is_null() {
14100 get_memory_win32_handle_properties_khr
14101 } else {
14102 ::core::mem::transmute::<
14103 *const c_void,
14104 PFN_vkGetMemoryWin32HandlePropertiesKHR,
14105 >(val)
14106 }
14107 },
14108 }
14109 }
14110 }
14111 }
14112 #[doc = "VK_KHR_external_memory_fd"]
14113 pub mod external_memory_fd {
14114 use super::super::*;
14115 pub use {
14116 crate::vk::KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME as NAME,
14117 crate::vk::KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION as SPEC_VERSION,
14118 };
14119 #[doc = "VK_KHR_external_memory_fd device-level functions"]
14120 #[derive(Clone)]
14121 pub struct Device {
14122 pub(crate) fp: DeviceFn,
14123 pub(crate) handle: crate::vk::Device,
14124 }
14125 impl Device {
14126 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14127 let handle = device.handle();
14128 let fp = DeviceFn::load(|name| unsafe {
14129 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14130 instance.get_device_proc_addr(handle, name.as_ptr()),
14131 )
14132 });
14133 Self { handle, fp }
14134 }
14135 #[inline]
14136 pub fn fp(&self) -> &DeviceFn {
14137 &self.fp
14138 }
14139 #[inline]
14140 pub fn device(&self) -> crate::vk::Device {
14141 self.handle
14142 }
14143 }
14144 #[derive(Clone)]
14145 #[doc = "Raw VK_KHR_external_memory_fd device-level function pointers"]
14146 pub struct DeviceFn {
14147 pub get_memory_fd_khr: PFN_vkGetMemoryFdKHR,
14148 pub get_memory_fd_properties_khr: PFN_vkGetMemoryFdPropertiesKHR,
14149 }
14150 unsafe impl Send for DeviceFn {}
14151 unsafe impl Sync for DeviceFn {}
14152 impl DeviceFn {
14153 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14154 Self::load_erased(&mut f)
14155 }
14156 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14157 Self {
14158 get_memory_fd_khr: unsafe {
14159 unsafe extern "system" fn get_memory_fd_khr(
14160 _device: crate::vk::Device,
14161 _p_get_fd_info: *const MemoryGetFdInfoKHR<'_>,
14162 _p_fd: *mut c_int,
14163 ) -> Result {
14164 panic!(concat!("Unable to load ", stringify!(get_memory_fd_khr)))
14165 }
14166 let val = _f(c"vkGetMemoryFdKHR");
14167 if val.is_null() {
14168 get_memory_fd_khr
14169 } else {
14170 ::core::mem::transmute::<*const c_void, PFN_vkGetMemoryFdKHR>(val)
14171 }
14172 },
14173 get_memory_fd_properties_khr: unsafe {
14174 unsafe extern "system" fn get_memory_fd_properties_khr(
14175 _device: crate::vk::Device,
14176 _handle_type: ExternalMemoryHandleTypeFlags,
14177 _fd: c_int,
14178 _p_memory_fd_properties: *mut MemoryFdPropertiesKHR<'_>,
14179 ) -> Result {
14180 panic!(concat!(
14181 "Unable to load ",
14182 stringify!(get_memory_fd_properties_khr)
14183 ))
14184 }
14185 let val = _f(c"vkGetMemoryFdPropertiesKHR");
14186 if val.is_null() {
14187 get_memory_fd_properties_khr
14188 } else {
14189 ::core::mem::transmute::<*const c_void, PFN_vkGetMemoryFdPropertiesKHR>(
14190 val,
14191 )
14192 }
14193 },
14194 }
14195 }
14196 }
14197 }
14198 #[doc = "VK_KHR_win32_keyed_mutex"]
14199 pub mod win32_keyed_mutex {
14200 use super::super::*;
14201 pub use {
14202 crate::vk::KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME as NAME,
14203 crate::vk::KHR_WIN32_KEYED_MUTEX_SPEC_VERSION as SPEC_VERSION,
14204 };
14205 }
14206 #[doc = "VK_KHR_external_semaphore_capabilities"]
14207 pub mod external_semaphore_capabilities {
14208 use super::super::*;
14209 pub use {
14210 crate::vk::KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME as NAME,
14211 crate::vk::KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION as SPEC_VERSION,
14212 };
14213 #[doc = "VK_KHR_external_semaphore_capabilities instance-level functions"]
14214 #[derive(Clone)]
14215 pub struct Instance {
14216 pub(crate) fp: InstanceFn,
14217 pub(crate) handle: crate::vk::Instance,
14218 }
14219 impl Instance {
14220 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
14221 let handle = instance.handle();
14222 let fp = InstanceFn::load(|name| unsafe {
14223 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14224 entry.get_instance_proc_addr(handle, name.as_ptr()),
14225 )
14226 });
14227 Self { handle, fp }
14228 }
14229 #[inline]
14230 pub fn fp(&self) -> &InstanceFn {
14231 &self.fp
14232 }
14233 #[inline]
14234 pub fn instance(&self) -> crate::vk::Instance {
14235 self.handle
14236 }
14237 }
14238 #[derive(Clone)]
14239 #[doc = "Raw VK_KHR_external_semaphore_capabilities instance-level function pointers"]
14240 pub struct InstanceFn {
14241 pub get_physical_device_external_semaphore_properties_khr:
14242 PFN_vkGetPhysicalDeviceExternalSemaphoreProperties,
14243 }
14244 unsafe impl Send for InstanceFn {}
14245 unsafe impl Sync for InstanceFn {}
14246 impl InstanceFn {
14247 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14248 Self::load_erased(&mut f)
14249 }
14250 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14251 Self {
14252 get_physical_device_external_semaphore_properties_khr: unsafe {
14253 unsafe extern "system" fn get_physical_device_external_semaphore_properties_khr(
14254 _physical_device: PhysicalDevice,
14255 _p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo<
14256 '_,
14257 >,
14258 _p_external_semaphore_properties: *mut ExternalSemaphoreProperties<'_>,
14259 ) {
14260 panic!(concat!(
14261 "Unable to load ",
14262 stringify!(get_physical_device_external_semaphore_properties_khr)
14263 ))
14264 }
14265 let val = _f(c"vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
14266 if val.is_null() {
14267 get_physical_device_external_semaphore_properties_khr
14268 } else {
14269 ::core::mem::transmute::<
14270 *const c_void,
14271 PFN_vkGetPhysicalDeviceExternalSemaphoreProperties,
14272 >(val)
14273 }
14274 },
14275 }
14276 }
14277 }
14278 }
14279 #[doc = "VK_KHR_external_semaphore"]
14280 pub mod external_semaphore {
14281 use super::super::*;
14282 pub use {
14283 crate::vk::KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME as NAME,
14284 crate::vk::KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION as SPEC_VERSION,
14285 };
14286 }
14287 #[doc = "VK_KHR_external_semaphore_win32"]
14288 pub mod external_semaphore_win32 {
14289 use super::super::*;
14290 pub use {
14291 crate::vk::KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME as NAME,
14292 crate::vk::KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION as SPEC_VERSION,
14293 };
14294 #[doc = "VK_KHR_external_semaphore_win32 device-level functions"]
14295 #[derive(Clone)]
14296 pub struct Device {
14297 pub(crate) fp: DeviceFn,
14298 pub(crate) handle: crate::vk::Device,
14299 }
14300 impl Device {
14301 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14302 let handle = device.handle();
14303 let fp = DeviceFn::load(|name| unsafe {
14304 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14305 instance.get_device_proc_addr(handle, name.as_ptr()),
14306 )
14307 });
14308 Self { handle, fp }
14309 }
14310 #[inline]
14311 pub fn fp(&self) -> &DeviceFn {
14312 &self.fp
14313 }
14314 #[inline]
14315 pub fn device(&self) -> crate::vk::Device {
14316 self.handle
14317 }
14318 }
14319 #[derive(Clone)]
14320 #[doc = "Raw VK_KHR_external_semaphore_win32 device-level function pointers"]
14321 pub struct DeviceFn {
14322 pub import_semaphore_win32_handle_khr: PFN_vkImportSemaphoreWin32HandleKHR,
14323 pub get_semaphore_win32_handle_khr: PFN_vkGetSemaphoreWin32HandleKHR,
14324 }
14325 unsafe impl Send for DeviceFn {}
14326 unsafe impl Sync for DeviceFn {}
14327 impl DeviceFn {
14328 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14329 Self::load_erased(&mut f)
14330 }
14331 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14332 Self {
14333 import_semaphore_win32_handle_khr: unsafe {
14334 unsafe extern "system" fn import_semaphore_win32_handle_khr(
14335 _device: crate::vk::Device,
14336 _p_import_semaphore_win32_handle_info : * const ImportSemaphoreWin32HandleInfoKHR < '_ >,
14337 ) -> Result {
14338 panic!(concat!(
14339 "Unable to load ",
14340 stringify!(import_semaphore_win32_handle_khr)
14341 ))
14342 }
14343 let val = _f(c"vkImportSemaphoreWin32HandleKHR");
14344 if val.is_null() {
14345 import_semaphore_win32_handle_khr
14346 } else {
14347 ::core::mem::transmute::<
14348 *const c_void,
14349 PFN_vkImportSemaphoreWin32HandleKHR,
14350 >(val)
14351 }
14352 },
14353 get_semaphore_win32_handle_khr: unsafe {
14354 unsafe extern "system" fn get_semaphore_win32_handle_khr(
14355 _device: crate::vk::Device,
14356 _p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR<'_>,
14357 _p_handle: *mut HANDLE,
14358 ) -> Result {
14359 panic!(concat!(
14360 "Unable to load ",
14361 stringify!(get_semaphore_win32_handle_khr)
14362 ))
14363 }
14364 let val = _f(c"vkGetSemaphoreWin32HandleKHR");
14365 if val.is_null() {
14366 get_semaphore_win32_handle_khr
14367 } else {
14368 ::core::mem::transmute::<*const c_void, PFN_vkGetSemaphoreWin32HandleKHR>(
14369 val,
14370 )
14371 }
14372 },
14373 }
14374 }
14375 }
14376 }
14377 #[doc = "VK_KHR_external_semaphore_fd"]
14378 pub mod external_semaphore_fd {
14379 use super::super::*;
14380 pub use {
14381 crate::vk::KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME as NAME,
14382 crate::vk::KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION as SPEC_VERSION,
14383 };
14384 #[doc = "VK_KHR_external_semaphore_fd device-level functions"]
14385 #[derive(Clone)]
14386 pub struct Device {
14387 pub(crate) fp: DeviceFn,
14388 pub(crate) handle: crate::vk::Device,
14389 }
14390 impl Device {
14391 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14392 let handle = device.handle();
14393 let fp = DeviceFn::load(|name| unsafe {
14394 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14395 instance.get_device_proc_addr(handle, name.as_ptr()),
14396 )
14397 });
14398 Self { handle, fp }
14399 }
14400 #[inline]
14401 pub fn fp(&self) -> &DeviceFn {
14402 &self.fp
14403 }
14404 #[inline]
14405 pub fn device(&self) -> crate::vk::Device {
14406 self.handle
14407 }
14408 }
14409 #[derive(Clone)]
14410 #[doc = "Raw VK_KHR_external_semaphore_fd device-level function pointers"]
14411 pub struct DeviceFn {
14412 pub import_semaphore_fd_khr: PFN_vkImportSemaphoreFdKHR,
14413 pub get_semaphore_fd_khr: PFN_vkGetSemaphoreFdKHR,
14414 }
14415 unsafe impl Send for DeviceFn {}
14416 unsafe impl Sync for DeviceFn {}
14417 impl DeviceFn {
14418 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14419 Self::load_erased(&mut f)
14420 }
14421 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14422 Self {
14423 import_semaphore_fd_khr: unsafe {
14424 unsafe extern "system" fn import_semaphore_fd_khr(
14425 _device: crate::vk::Device,
14426 _p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR<'_>,
14427 ) -> Result {
14428 panic!(concat!(
14429 "Unable to load ",
14430 stringify!(import_semaphore_fd_khr)
14431 ))
14432 }
14433 let val = _f(c"vkImportSemaphoreFdKHR");
14434 if val.is_null() {
14435 import_semaphore_fd_khr
14436 } else {
14437 ::core::mem::transmute::<*const c_void, PFN_vkImportSemaphoreFdKHR>(val)
14438 }
14439 },
14440 get_semaphore_fd_khr: unsafe {
14441 unsafe extern "system" fn get_semaphore_fd_khr(
14442 _device: crate::vk::Device,
14443 _p_get_fd_info: *const SemaphoreGetFdInfoKHR<'_>,
14444 _p_fd: *mut c_int,
14445 ) -> Result {
14446 panic!(concat!("Unable to load ", stringify!(get_semaphore_fd_khr)))
14447 }
14448 let val = _f(c"vkGetSemaphoreFdKHR");
14449 if val.is_null() {
14450 get_semaphore_fd_khr
14451 } else {
14452 ::core::mem::transmute::<*const c_void, PFN_vkGetSemaphoreFdKHR>(val)
14453 }
14454 },
14455 }
14456 }
14457 }
14458 }
14459 #[doc = "VK_KHR_push_descriptor"]
14460 pub mod push_descriptor {
14461 use super::super::*;
14462 pub use {
14463 crate::vk::KHR_PUSH_DESCRIPTOR_EXTENSION_NAME as NAME,
14464 crate::vk::KHR_PUSH_DESCRIPTOR_SPEC_VERSION as SPEC_VERSION,
14465 };
14466 #[doc = "VK_KHR_push_descriptor device-level functions"]
14467 #[derive(Clone)]
14468 pub struct Device {
14469 pub(crate) fp: DeviceFn,
14470 pub(crate) handle: crate::vk::Device,
14471 }
14472 impl Device {
14473 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14474 let handle = device.handle();
14475 let fp = DeviceFn::load(|name| unsafe {
14476 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14477 instance.get_device_proc_addr(handle, name.as_ptr()),
14478 )
14479 });
14480 Self { handle, fp }
14481 }
14482 #[inline]
14483 pub fn fp(&self) -> &DeviceFn {
14484 &self.fp
14485 }
14486 #[inline]
14487 pub fn device(&self) -> crate::vk::Device {
14488 self.handle
14489 }
14490 }
14491 #[derive(Clone)]
14492 #[doc = "Raw VK_KHR_push_descriptor device-level function pointers"]
14493 pub struct DeviceFn {
14494 pub cmd_push_descriptor_set_khr: PFN_vkCmdPushDescriptorSet,
14495 pub cmd_push_descriptor_set_with_template_khr: PFN_vkCmdPushDescriptorSetWithTemplate,
14496 }
14497 unsafe impl Send for DeviceFn {}
14498 unsafe impl Sync for DeviceFn {}
14499 impl DeviceFn {
14500 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14501 Self::load_erased(&mut f)
14502 }
14503 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14504 Self {
14505 cmd_push_descriptor_set_khr: unsafe {
14506 unsafe extern "system" fn cmd_push_descriptor_set_khr(
14507 _command_buffer: CommandBuffer,
14508 _pipeline_bind_point: PipelineBindPoint,
14509 _layout: PipelineLayout,
14510 _set: u32,
14511 _descriptor_write_count: u32,
14512 _p_descriptor_writes: *const WriteDescriptorSet<'_>,
14513 ) {
14514 panic!(concat!(
14515 "Unable to load ",
14516 stringify!(cmd_push_descriptor_set_khr)
14517 ))
14518 }
14519 let val = _f(c"vkCmdPushDescriptorSetKHR");
14520 if val.is_null() {
14521 cmd_push_descriptor_set_khr
14522 } else {
14523 ::core::mem::transmute::<*const c_void, PFN_vkCmdPushDescriptorSet>(val)
14524 }
14525 },
14526 cmd_push_descriptor_set_with_template_khr: unsafe {
14527 unsafe extern "system" fn cmd_push_descriptor_set_with_template_khr(
14528 _command_buffer: CommandBuffer,
14529 _descriptor_update_template: DescriptorUpdateTemplate,
14530 _layout: PipelineLayout,
14531 _set: u32,
14532 _p_data: *const c_void,
14533 ) {
14534 panic!(concat!(
14535 "Unable to load ",
14536 stringify!(cmd_push_descriptor_set_with_template_khr)
14537 ))
14538 }
14539 let val = _f(c"vkCmdPushDescriptorSetWithTemplateKHR");
14540 if val.is_null() {
14541 cmd_push_descriptor_set_with_template_khr
14542 } else {
14543 ::core::mem::transmute::<
14544 *const c_void,
14545 PFN_vkCmdPushDescriptorSetWithTemplate,
14546 >(val)
14547 }
14548 },
14549 }
14550 }
14551 }
14552 }
14553 #[doc = "VK_KHR_shader_float16_int8"]
14554 pub mod shader_float16_int8 {
14555 use super::super::*;
14556 pub use {
14557 crate::vk::KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME as NAME,
14558 crate::vk::KHR_SHADER_FLOAT16_INT8_SPEC_VERSION as SPEC_VERSION,
14559 };
14560 }
14561 #[doc = "VK_KHR_16bit_storage"]
14562 pub mod _16bit_storage {
14563 use super::super::*;
14564 pub use {
14565 crate::vk::KHR_16BIT_STORAGE_EXTENSION_NAME as NAME,
14566 crate::vk::KHR_16BIT_STORAGE_SPEC_VERSION as SPEC_VERSION,
14567 };
14568 }
14569 #[doc = "VK_KHR_incremental_present"]
14570 pub mod incremental_present {
14571 use super::super::*;
14572 pub use {
14573 crate::vk::KHR_INCREMENTAL_PRESENT_EXTENSION_NAME as NAME,
14574 crate::vk::KHR_INCREMENTAL_PRESENT_SPEC_VERSION as SPEC_VERSION,
14575 };
14576 }
14577 #[doc = "VK_KHR_descriptor_update_template"]
14578 pub mod descriptor_update_template {
14579 use super::super::*;
14580 pub use {
14581 crate::vk::KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME as NAME,
14582 crate::vk::KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION as SPEC_VERSION,
14583 };
14584 #[doc = "VK_KHR_descriptor_update_template device-level functions"]
14585 #[derive(Clone)]
14586 pub struct Device {
14587 pub(crate) fp: DeviceFn,
14588 pub(crate) handle: crate::vk::Device,
14589 }
14590 impl Device {
14591 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14592 let handle = device.handle();
14593 let fp = DeviceFn::load(|name| unsafe {
14594 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14595 instance.get_device_proc_addr(handle, name.as_ptr()),
14596 )
14597 });
14598 Self { handle, fp }
14599 }
14600 #[inline]
14601 pub fn fp(&self) -> &DeviceFn {
14602 &self.fp
14603 }
14604 #[inline]
14605 pub fn device(&self) -> crate::vk::Device {
14606 self.handle
14607 }
14608 }
14609 #[derive(Clone)]
14610 #[doc = "Raw VK_KHR_descriptor_update_template device-level function pointers"]
14611 pub struct DeviceFn {
14612 pub create_descriptor_update_template_khr: PFN_vkCreateDescriptorUpdateTemplate,
14613 pub destroy_descriptor_update_template_khr: PFN_vkDestroyDescriptorUpdateTemplate,
14614 pub update_descriptor_set_with_template_khr: PFN_vkUpdateDescriptorSetWithTemplate,
14615 pub cmd_push_descriptor_set_with_template_khr: PFN_vkCmdPushDescriptorSetWithTemplate,
14616 }
14617 unsafe impl Send for DeviceFn {}
14618 unsafe impl Sync for DeviceFn {}
14619 impl DeviceFn {
14620 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14621 Self::load_erased(&mut f)
14622 }
14623 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14624 Self {
14625 create_descriptor_update_template_khr: unsafe {
14626 unsafe extern "system" fn create_descriptor_update_template_khr(
14627 _device: crate::vk::Device,
14628 _p_create_info: *const DescriptorUpdateTemplateCreateInfo<'_>,
14629 _p_allocator: *const AllocationCallbacks<'_>,
14630 _p_descriptor_update_template: *mut DescriptorUpdateTemplate,
14631 ) -> Result {
14632 panic!(concat!(
14633 "Unable to load ",
14634 stringify!(create_descriptor_update_template_khr)
14635 ))
14636 }
14637 let val = _f(c"vkCreateDescriptorUpdateTemplateKHR");
14638 if val.is_null() {
14639 create_descriptor_update_template_khr
14640 } else {
14641 ::core::mem::transmute::<
14642 *const c_void,
14643 PFN_vkCreateDescriptorUpdateTemplate,
14644 >(val)
14645 }
14646 },
14647 destroy_descriptor_update_template_khr: unsafe {
14648 unsafe extern "system" fn destroy_descriptor_update_template_khr(
14649 _device: crate::vk::Device,
14650 _descriptor_update_template: DescriptorUpdateTemplate,
14651 _p_allocator: *const AllocationCallbacks<'_>,
14652 ) {
14653 panic!(concat!(
14654 "Unable to load ",
14655 stringify!(destroy_descriptor_update_template_khr)
14656 ))
14657 }
14658 let val = _f(c"vkDestroyDescriptorUpdateTemplateKHR");
14659 if val.is_null() {
14660 destroy_descriptor_update_template_khr
14661 } else {
14662 ::core::mem::transmute::<
14663 *const c_void,
14664 PFN_vkDestroyDescriptorUpdateTemplate,
14665 >(val)
14666 }
14667 },
14668 update_descriptor_set_with_template_khr: unsafe {
14669 unsafe extern "system" fn update_descriptor_set_with_template_khr(
14670 _device: crate::vk::Device,
14671 _descriptor_set: DescriptorSet,
14672 _descriptor_update_template: DescriptorUpdateTemplate,
14673 _p_data: *const c_void,
14674 ) {
14675 panic!(concat!(
14676 "Unable to load ",
14677 stringify!(update_descriptor_set_with_template_khr)
14678 ))
14679 }
14680 let val = _f(c"vkUpdateDescriptorSetWithTemplateKHR");
14681 if val.is_null() {
14682 update_descriptor_set_with_template_khr
14683 } else {
14684 ::core::mem::transmute::<
14685 *const c_void,
14686 PFN_vkUpdateDescriptorSetWithTemplate,
14687 >(val)
14688 }
14689 },
14690 cmd_push_descriptor_set_with_template_khr: unsafe {
14691 unsafe extern "system" fn cmd_push_descriptor_set_with_template_khr(
14692 _command_buffer: CommandBuffer,
14693 _descriptor_update_template: DescriptorUpdateTemplate,
14694 _layout: PipelineLayout,
14695 _set: u32,
14696 _p_data: *const c_void,
14697 ) {
14698 panic!(concat!(
14699 "Unable to load ",
14700 stringify!(cmd_push_descriptor_set_with_template_khr)
14701 ))
14702 }
14703 let val = _f(c"vkCmdPushDescriptorSetWithTemplateKHR");
14704 if val.is_null() {
14705 cmd_push_descriptor_set_with_template_khr
14706 } else {
14707 ::core::mem::transmute::<
14708 *const c_void,
14709 PFN_vkCmdPushDescriptorSetWithTemplate,
14710 >(val)
14711 }
14712 },
14713 }
14714 }
14715 }
14716 }
14717 #[doc = "VK_KHR_imageless_framebuffer"]
14718 pub mod imageless_framebuffer {
14719 use super::super::*;
14720 pub use {
14721 crate::vk::KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME as NAME,
14722 crate::vk::KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION as SPEC_VERSION,
14723 };
14724 }
14725 #[doc = "VK_KHR_create_renderpass2"]
14726 pub mod create_renderpass2 {
14727 use super::super::*;
14728 pub use {
14729 crate::vk::KHR_CREATE_RENDERPASS_2_EXTENSION_NAME as NAME,
14730 crate::vk::KHR_CREATE_RENDERPASS_2_SPEC_VERSION as SPEC_VERSION,
14731 };
14732 #[doc = "VK_KHR_create_renderpass2 device-level functions"]
14733 #[derive(Clone)]
14734 pub struct Device {
14735 pub(crate) fp: DeviceFn,
14736 pub(crate) handle: crate::vk::Device,
14737 }
14738 impl Device {
14739 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14740 let handle = device.handle();
14741 let fp = DeviceFn::load(|name| unsafe {
14742 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14743 instance.get_device_proc_addr(handle, name.as_ptr()),
14744 )
14745 });
14746 Self { handle, fp }
14747 }
14748 #[inline]
14749 pub fn fp(&self) -> &DeviceFn {
14750 &self.fp
14751 }
14752 #[inline]
14753 pub fn device(&self) -> crate::vk::Device {
14754 self.handle
14755 }
14756 }
14757 #[derive(Clone)]
14758 #[doc = "Raw VK_KHR_create_renderpass2 device-level function pointers"]
14759 pub struct DeviceFn {
14760 pub create_render_pass2_khr: PFN_vkCreateRenderPass2,
14761 pub cmd_begin_render_pass2_khr: PFN_vkCmdBeginRenderPass2,
14762 pub cmd_next_subpass2_khr: PFN_vkCmdNextSubpass2,
14763 pub cmd_end_render_pass2_khr: PFN_vkCmdEndRenderPass2,
14764 }
14765 unsafe impl Send for DeviceFn {}
14766 unsafe impl Sync for DeviceFn {}
14767 impl DeviceFn {
14768 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14769 Self::load_erased(&mut f)
14770 }
14771 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14772 Self {
14773 create_render_pass2_khr: unsafe {
14774 unsafe extern "system" fn create_render_pass2_khr(
14775 _device: crate::vk::Device,
14776 _p_create_info: *const RenderPassCreateInfo2<'_>,
14777 _p_allocator: *const AllocationCallbacks<'_>,
14778 _p_render_pass: *mut RenderPass,
14779 ) -> Result {
14780 panic!(concat!(
14781 "Unable to load ",
14782 stringify!(create_render_pass2_khr)
14783 ))
14784 }
14785 let val = _f(c"vkCreateRenderPass2KHR");
14786 if val.is_null() {
14787 create_render_pass2_khr
14788 } else {
14789 ::core::mem::transmute::<*const c_void, PFN_vkCreateRenderPass2>(val)
14790 }
14791 },
14792 cmd_begin_render_pass2_khr: unsafe {
14793 unsafe extern "system" fn cmd_begin_render_pass2_khr(
14794 _command_buffer: CommandBuffer,
14795 _p_render_pass_begin: *const RenderPassBeginInfo<'_>,
14796 _p_subpass_begin_info: *const SubpassBeginInfo<'_>,
14797 ) {
14798 panic!(concat!(
14799 "Unable to load ",
14800 stringify!(cmd_begin_render_pass2_khr)
14801 ))
14802 }
14803 let val = _f(c"vkCmdBeginRenderPass2KHR");
14804 if val.is_null() {
14805 cmd_begin_render_pass2_khr
14806 } else {
14807 ::core::mem::transmute::<*const c_void, PFN_vkCmdBeginRenderPass2>(val)
14808 }
14809 },
14810 cmd_next_subpass2_khr: unsafe {
14811 unsafe extern "system" fn cmd_next_subpass2_khr(
14812 _command_buffer: CommandBuffer,
14813 _p_subpass_begin_info: *const SubpassBeginInfo<'_>,
14814 _p_subpass_end_info: *const SubpassEndInfo<'_>,
14815 ) {
14816 panic!(concat!(
14817 "Unable to load ",
14818 stringify!(cmd_next_subpass2_khr)
14819 ))
14820 }
14821 let val = _f(c"vkCmdNextSubpass2KHR");
14822 if val.is_null() {
14823 cmd_next_subpass2_khr
14824 } else {
14825 ::core::mem::transmute::<*const c_void, PFN_vkCmdNextSubpass2>(val)
14826 }
14827 },
14828 cmd_end_render_pass2_khr: unsafe {
14829 unsafe extern "system" fn cmd_end_render_pass2_khr(
14830 _command_buffer: CommandBuffer,
14831 _p_subpass_end_info: *const SubpassEndInfo<'_>,
14832 ) {
14833 panic!(concat!(
14834 "Unable to load ",
14835 stringify!(cmd_end_render_pass2_khr)
14836 ))
14837 }
14838 let val = _f(c"vkCmdEndRenderPass2KHR");
14839 if val.is_null() {
14840 cmd_end_render_pass2_khr
14841 } else {
14842 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndRenderPass2>(val)
14843 }
14844 },
14845 }
14846 }
14847 }
14848 }
14849 #[doc = "VK_KHR_shared_presentable_image"]
14850 pub mod shared_presentable_image {
14851 use super::super::*;
14852 pub use {
14853 crate::vk::KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME as NAME,
14854 crate::vk::KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION as SPEC_VERSION,
14855 };
14856 #[doc = "VK_KHR_shared_presentable_image device-level functions"]
14857 #[derive(Clone)]
14858 pub struct Device {
14859 pub(crate) fp: DeviceFn,
14860 pub(crate) handle: crate::vk::Device,
14861 }
14862 impl Device {
14863 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
14864 let handle = device.handle();
14865 let fp = DeviceFn::load(|name| unsafe {
14866 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14867 instance.get_device_proc_addr(handle, name.as_ptr()),
14868 )
14869 });
14870 Self { handle, fp }
14871 }
14872 #[inline]
14873 pub fn fp(&self) -> &DeviceFn {
14874 &self.fp
14875 }
14876 #[inline]
14877 pub fn device(&self) -> crate::vk::Device {
14878 self.handle
14879 }
14880 }
14881 #[derive(Clone)]
14882 #[doc = "Raw VK_KHR_shared_presentable_image device-level function pointers"]
14883 pub struct DeviceFn {
14884 pub get_swapchain_status_khr: PFN_vkGetSwapchainStatusKHR,
14885 }
14886 unsafe impl Send for DeviceFn {}
14887 unsafe impl Sync for DeviceFn {}
14888 impl DeviceFn {
14889 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14890 Self::load_erased(&mut f)
14891 }
14892 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14893 Self {
14894 get_swapchain_status_khr: unsafe {
14895 unsafe extern "system" fn get_swapchain_status_khr(
14896 _device: crate::vk::Device,
14897 _swapchain: SwapchainKHR,
14898 ) -> Result {
14899 panic!(concat!(
14900 "Unable to load ",
14901 stringify!(get_swapchain_status_khr)
14902 ))
14903 }
14904 let val = _f(c"vkGetSwapchainStatusKHR");
14905 if val.is_null() {
14906 get_swapchain_status_khr
14907 } else {
14908 ::core::mem::transmute::<*const c_void, PFN_vkGetSwapchainStatusKHR>(
14909 val,
14910 )
14911 }
14912 },
14913 }
14914 }
14915 }
14916 }
14917 #[doc = "VK_KHR_external_fence_capabilities"]
14918 pub mod external_fence_capabilities {
14919 use super::super::*;
14920 pub use {
14921 crate::vk::KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME as NAME,
14922 crate::vk::KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION as SPEC_VERSION,
14923 };
14924 #[doc = "VK_KHR_external_fence_capabilities instance-level functions"]
14925 #[derive(Clone)]
14926 pub struct Instance {
14927 pub(crate) fp: InstanceFn,
14928 pub(crate) handle: crate::vk::Instance,
14929 }
14930 impl Instance {
14931 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
14932 let handle = instance.handle();
14933 let fp = InstanceFn::load(|name| unsafe {
14934 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
14935 entry.get_instance_proc_addr(handle, name.as_ptr()),
14936 )
14937 });
14938 Self { handle, fp }
14939 }
14940 #[inline]
14941 pub fn fp(&self) -> &InstanceFn {
14942 &self.fp
14943 }
14944 #[inline]
14945 pub fn instance(&self) -> crate::vk::Instance {
14946 self.handle
14947 }
14948 }
14949 #[derive(Clone)]
14950 #[doc = "Raw VK_KHR_external_fence_capabilities instance-level function pointers"]
14951 pub struct InstanceFn {
14952 pub get_physical_device_external_fence_properties_khr:
14953 PFN_vkGetPhysicalDeviceExternalFenceProperties,
14954 }
14955 unsafe impl Send for InstanceFn {}
14956 unsafe impl Sync for InstanceFn {}
14957 impl InstanceFn {
14958 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
14959 Self::load_erased(&mut f)
14960 }
14961 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
14962 Self {
14963 get_physical_device_external_fence_properties_khr: unsafe {
14964 unsafe extern "system" fn get_physical_device_external_fence_properties_khr(
14965 _physical_device: PhysicalDevice,
14966 _p_external_fence_info: *const PhysicalDeviceExternalFenceInfo<'_>,
14967 _p_external_fence_properties: *mut ExternalFenceProperties<'_>,
14968 ) {
14969 panic!(concat!(
14970 "Unable to load ",
14971 stringify!(get_physical_device_external_fence_properties_khr)
14972 ))
14973 }
14974 let val = _f(c"vkGetPhysicalDeviceExternalFencePropertiesKHR");
14975 if val.is_null() {
14976 get_physical_device_external_fence_properties_khr
14977 } else {
14978 ::core::mem::transmute::<
14979 *const c_void,
14980 PFN_vkGetPhysicalDeviceExternalFenceProperties,
14981 >(val)
14982 }
14983 },
14984 }
14985 }
14986 }
14987 }
14988 #[doc = "VK_KHR_external_fence"]
14989 pub mod external_fence {
14990 use super::super::*;
14991 pub use {
14992 crate::vk::KHR_EXTERNAL_FENCE_EXTENSION_NAME as NAME,
14993 crate::vk::KHR_EXTERNAL_FENCE_SPEC_VERSION as SPEC_VERSION,
14994 };
14995 }
14996 #[doc = "VK_KHR_external_fence_win32"]
14997 pub mod external_fence_win32 {
14998 use super::super::*;
14999 pub use {
15000 crate::vk::KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME as NAME,
15001 crate::vk::KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION as SPEC_VERSION,
15002 };
15003 #[doc = "VK_KHR_external_fence_win32 device-level functions"]
15004 #[derive(Clone)]
15005 pub struct Device {
15006 pub(crate) fp: DeviceFn,
15007 pub(crate) handle: crate::vk::Device,
15008 }
15009 impl Device {
15010 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
15011 let handle = device.handle();
15012 let fp = DeviceFn::load(|name| unsafe {
15013 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15014 instance.get_device_proc_addr(handle, name.as_ptr()),
15015 )
15016 });
15017 Self { handle, fp }
15018 }
15019 #[inline]
15020 pub fn fp(&self) -> &DeviceFn {
15021 &self.fp
15022 }
15023 #[inline]
15024 pub fn device(&self) -> crate::vk::Device {
15025 self.handle
15026 }
15027 }
15028 #[derive(Clone)]
15029 #[doc = "Raw VK_KHR_external_fence_win32 device-level function pointers"]
15030 pub struct DeviceFn {
15031 pub import_fence_win32_handle_khr: PFN_vkImportFenceWin32HandleKHR,
15032 pub get_fence_win32_handle_khr: PFN_vkGetFenceWin32HandleKHR,
15033 }
15034 unsafe impl Send for DeviceFn {}
15035 unsafe impl Sync for DeviceFn {}
15036 impl DeviceFn {
15037 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15038 Self::load_erased(&mut f)
15039 }
15040 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15041 Self {
15042 import_fence_win32_handle_khr: unsafe {
15043 unsafe extern "system" fn import_fence_win32_handle_khr(
15044 _device: crate::vk::Device,
15045 _p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR<
15046 '_,
15047 >,
15048 ) -> Result {
15049 panic!(concat!(
15050 "Unable to load ",
15051 stringify!(import_fence_win32_handle_khr)
15052 ))
15053 }
15054 let val = _f(c"vkImportFenceWin32HandleKHR");
15055 if val.is_null() {
15056 import_fence_win32_handle_khr
15057 } else {
15058 ::core::mem::transmute::<*const c_void, PFN_vkImportFenceWin32HandleKHR>(
15059 val,
15060 )
15061 }
15062 },
15063 get_fence_win32_handle_khr: unsafe {
15064 unsafe extern "system" fn get_fence_win32_handle_khr(
15065 _device: crate::vk::Device,
15066 _p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR<'_>,
15067 _p_handle: *mut HANDLE,
15068 ) -> Result {
15069 panic!(concat!(
15070 "Unable to load ",
15071 stringify!(get_fence_win32_handle_khr)
15072 ))
15073 }
15074 let val = _f(c"vkGetFenceWin32HandleKHR");
15075 if val.is_null() {
15076 get_fence_win32_handle_khr
15077 } else {
15078 ::core::mem::transmute::<*const c_void, PFN_vkGetFenceWin32HandleKHR>(
15079 val,
15080 )
15081 }
15082 },
15083 }
15084 }
15085 }
15086 }
15087 #[doc = "VK_KHR_external_fence_fd"]
15088 pub mod external_fence_fd {
15089 use super::super::*;
15090 pub use {
15091 crate::vk::KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME as NAME,
15092 crate::vk::KHR_EXTERNAL_FENCE_FD_SPEC_VERSION as SPEC_VERSION,
15093 };
15094 #[doc = "VK_KHR_external_fence_fd device-level functions"]
15095 #[derive(Clone)]
15096 pub struct Device {
15097 pub(crate) fp: DeviceFn,
15098 pub(crate) handle: crate::vk::Device,
15099 }
15100 impl Device {
15101 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
15102 let handle = device.handle();
15103 let fp = DeviceFn::load(|name| unsafe {
15104 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15105 instance.get_device_proc_addr(handle, name.as_ptr()),
15106 )
15107 });
15108 Self { handle, fp }
15109 }
15110 #[inline]
15111 pub fn fp(&self) -> &DeviceFn {
15112 &self.fp
15113 }
15114 #[inline]
15115 pub fn device(&self) -> crate::vk::Device {
15116 self.handle
15117 }
15118 }
15119 #[derive(Clone)]
15120 #[doc = "Raw VK_KHR_external_fence_fd device-level function pointers"]
15121 pub struct DeviceFn {
15122 pub import_fence_fd_khr: PFN_vkImportFenceFdKHR,
15123 pub get_fence_fd_khr: PFN_vkGetFenceFdKHR,
15124 }
15125 unsafe impl Send for DeviceFn {}
15126 unsafe impl Sync for DeviceFn {}
15127 impl DeviceFn {
15128 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15129 Self::load_erased(&mut f)
15130 }
15131 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15132 Self {
15133 import_fence_fd_khr: unsafe {
15134 unsafe extern "system" fn import_fence_fd_khr(
15135 _device: crate::vk::Device,
15136 _p_import_fence_fd_info: *const ImportFenceFdInfoKHR<'_>,
15137 ) -> Result {
15138 panic!(concat!("Unable to load ", stringify!(import_fence_fd_khr)))
15139 }
15140 let val = _f(c"vkImportFenceFdKHR");
15141 if val.is_null() {
15142 import_fence_fd_khr
15143 } else {
15144 ::core::mem::transmute::<*const c_void, PFN_vkImportFenceFdKHR>(val)
15145 }
15146 },
15147 get_fence_fd_khr: unsafe {
15148 unsafe extern "system" fn get_fence_fd_khr(
15149 _device: crate::vk::Device,
15150 _p_get_fd_info: *const FenceGetFdInfoKHR<'_>,
15151 _p_fd: *mut c_int,
15152 ) -> Result {
15153 panic!(concat!("Unable to load ", stringify!(get_fence_fd_khr)))
15154 }
15155 let val = _f(c"vkGetFenceFdKHR");
15156 if val.is_null() {
15157 get_fence_fd_khr
15158 } else {
15159 ::core::mem::transmute::<*const c_void, PFN_vkGetFenceFdKHR>(val)
15160 }
15161 },
15162 }
15163 }
15164 }
15165 }
15166 #[doc = "VK_KHR_performance_query"]
15167 pub mod performance_query {
15168 use super::super::*;
15169 pub use {
15170 crate::vk::KHR_PERFORMANCE_QUERY_EXTENSION_NAME as NAME,
15171 crate::vk::KHR_PERFORMANCE_QUERY_SPEC_VERSION as SPEC_VERSION,
15172 };
15173 #[doc = "VK_KHR_performance_query instance-level functions"]
15174 #[derive(Clone)]
15175 pub struct Instance {
15176 pub(crate) fp: InstanceFn,
15177 pub(crate) handle: crate::vk::Instance,
15178 }
15179 impl Instance {
15180 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
15181 let handle = instance.handle();
15182 let fp = InstanceFn::load(|name| unsafe {
15183 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15184 entry.get_instance_proc_addr(handle, name.as_ptr()),
15185 )
15186 });
15187 Self { handle, fp }
15188 }
15189 #[inline]
15190 pub fn fp(&self) -> &InstanceFn {
15191 &self.fp
15192 }
15193 #[inline]
15194 pub fn instance(&self) -> crate::vk::Instance {
15195 self.handle
15196 }
15197 }
15198 #[derive(Clone)]
15199 #[doc = "Raw VK_KHR_performance_query instance-level function pointers"]
15200 pub struct InstanceFn {
15201 pub enumerate_physical_device_queue_family_performance_query_counters_khr:
15202 PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR,
15203 pub get_physical_device_queue_family_performance_query_passes_khr:
15204 PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR,
15205 }
15206 unsafe impl Send for InstanceFn {}
15207 unsafe impl Sync for InstanceFn {}
15208 impl InstanceFn {
15209 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15210 Self::load_erased(&mut f)
15211 }
15212 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15213 Self {
15214 enumerate_physical_device_queue_family_performance_query_counters_khr: unsafe {
15215 unsafe extern "system" fn enumerate_physical_device_queue_family_performance_query_counters_khr(
15216 _physical_device: PhysicalDevice,
15217 _queue_family_index: u32,
15218 _p_counter_count: *mut u32,
15219 _p_counters: *mut PerformanceCounterKHR<'_>,
15220 _p_counter_descriptions: *mut PerformanceCounterDescriptionKHR<'_>,
15221 ) -> Result {
15222 panic ! (concat ! ("Unable to load " , stringify ! (enumerate_physical_device_queue_family_performance_query_counters_khr)))
15223 }
15224 let val =
15225 _f(c"vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
15226 if val.is_null() {
15227 enumerate_physical_device_queue_family_performance_query_counters_khr
15228 } else {
15229 ::core::mem::transmute::<
15230 *const c_void,
15231 PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR,
15232 >(val)
15233 }
15234 },
15235 get_physical_device_queue_family_performance_query_passes_khr: unsafe {
15236 unsafe extern "system" fn get_physical_device_queue_family_performance_query_passes_khr(
15237 _physical_device: PhysicalDevice,
15238 _p_performance_query_create_info : * const QueryPoolPerformanceCreateInfoKHR < '_ >,
15239 _p_num_passes: *mut u32,
15240 ) {
15241 panic!(concat!(
15242 "Unable to load ",
15243 stringify!(
15244 get_physical_device_queue_family_performance_query_passes_khr
15245 )
15246 ))
15247 }
15248 let val = _f(c"vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
15249 if val.is_null() {
15250 get_physical_device_queue_family_performance_query_passes_khr
15251 } else {
15252 ::core::mem::transmute::<
15253 *const c_void,
15254 PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR,
15255 >(val)
15256 }
15257 },
15258 }
15259 }
15260 }
15261 #[doc = "VK_KHR_performance_query device-level functions"]
15262 #[derive(Clone)]
15263 pub struct Device {
15264 pub(crate) fp: DeviceFn,
15265 pub(crate) handle: crate::vk::Device,
15266 }
15267 impl Device {
15268 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
15269 let handle = device.handle();
15270 let fp = DeviceFn::load(|name| unsafe {
15271 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15272 instance.get_device_proc_addr(handle, name.as_ptr()),
15273 )
15274 });
15275 Self { handle, fp }
15276 }
15277 #[inline]
15278 pub fn fp(&self) -> &DeviceFn {
15279 &self.fp
15280 }
15281 #[inline]
15282 pub fn device(&self) -> crate::vk::Device {
15283 self.handle
15284 }
15285 }
15286 #[derive(Clone)]
15287 #[doc = "Raw VK_KHR_performance_query device-level function pointers"]
15288 pub struct DeviceFn {
15289 pub acquire_profiling_lock_khr: PFN_vkAcquireProfilingLockKHR,
15290 pub release_profiling_lock_khr: PFN_vkReleaseProfilingLockKHR,
15291 }
15292 unsafe impl Send for DeviceFn {}
15293 unsafe impl Sync for DeviceFn {}
15294 impl DeviceFn {
15295 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15296 Self::load_erased(&mut f)
15297 }
15298 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15299 Self {
15300 acquire_profiling_lock_khr: unsafe {
15301 unsafe extern "system" fn acquire_profiling_lock_khr(
15302 _device: crate::vk::Device,
15303 _p_info: *const AcquireProfilingLockInfoKHR<'_>,
15304 ) -> Result {
15305 panic!(concat!(
15306 "Unable to load ",
15307 stringify!(acquire_profiling_lock_khr)
15308 ))
15309 }
15310 let val = _f(c"vkAcquireProfilingLockKHR");
15311 if val.is_null() {
15312 acquire_profiling_lock_khr
15313 } else {
15314 ::core::mem::transmute::<*const c_void, PFN_vkAcquireProfilingLockKHR>(
15315 val,
15316 )
15317 }
15318 },
15319 release_profiling_lock_khr: unsafe {
15320 unsafe extern "system" fn release_profiling_lock_khr(
15321 _device: crate::vk::Device,
15322 ) {
15323 panic!(concat!(
15324 "Unable to load ",
15325 stringify!(release_profiling_lock_khr)
15326 ))
15327 }
15328 let val = _f(c"vkReleaseProfilingLockKHR");
15329 if val.is_null() {
15330 release_profiling_lock_khr
15331 } else {
15332 ::core::mem::transmute::<*const c_void, PFN_vkReleaseProfilingLockKHR>(
15333 val,
15334 )
15335 }
15336 },
15337 }
15338 }
15339 }
15340 }
15341 #[doc = "VK_KHR_maintenance2"]
15342 pub mod maintenance2 {
15343 use super::super::*;
15344 pub use {
15345 crate::vk::KHR_MAINTENANCE_2_EXTENSION_NAME as NAME,
15346 crate::vk::KHR_MAINTENANCE_2_SPEC_VERSION as SPEC_VERSION,
15347 };
15348 }
15349 #[doc = "VK_KHR_get_surface_capabilities2"]
15350 pub mod get_surface_capabilities2 {
15351 use super::super::*;
15352 pub use {
15353 crate::vk::KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME as NAME,
15354 crate::vk::KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION as SPEC_VERSION,
15355 };
15356 #[doc = "VK_KHR_get_surface_capabilities2 instance-level functions"]
15357 #[derive(Clone)]
15358 pub struct Instance {
15359 pub(crate) fp: InstanceFn,
15360 pub(crate) handle: crate::vk::Instance,
15361 }
15362 impl Instance {
15363 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
15364 let handle = instance.handle();
15365 let fp = InstanceFn::load(|name| unsafe {
15366 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15367 entry.get_instance_proc_addr(handle, name.as_ptr()),
15368 )
15369 });
15370 Self { handle, fp }
15371 }
15372 #[inline]
15373 pub fn fp(&self) -> &InstanceFn {
15374 &self.fp
15375 }
15376 #[inline]
15377 pub fn instance(&self) -> crate::vk::Instance {
15378 self.handle
15379 }
15380 }
15381 #[derive(Clone)]
15382 #[doc = "Raw VK_KHR_get_surface_capabilities2 instance-level function pointers"]
15383 pub struct InstanceFn {
15384 pub get_physical_device_surface_capabilities2_khr:
15385 PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR,
15386 pub get_physical_device_surface_formats2_khr: PFN_vkGetPhysicalDeviceSurfaceFormats2KHR,
15387 }
15388 unsafe impl Send for InstanceFn {}
15389 unsafe impl Sync for InstanceFn {}
15390 impl InstanceFn {
15391 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15392 Self::load_erased(&mut f)
15393 }
15394 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15395 Self {
15396 get_physical_device_surface_capabilities2_khr: unsafe {
15397 unsafe extern "system" fn get_physical_device_surface_capabilities2_khr(
15398 _physical_device: PhysicalDevice,
15399 _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>,
15400 _p_surface_capabilities: *mut SurfaceCapabilities2KHR<'_>,
15401 ) -> Result {
15402 panic!(concat!(
15403 "Unable to load ",
15404 stringify!(get_physical_device_surface_capabilities2_khr)
15405 ))
15406 }
15407 let val = _f(c"vkGetPhysicalDeviceSurfaceCapabilities2KHR");
15408 if val.is_null() {
15409 get_physical_device_surface_capabilities2_khr
15410 } else {
15411 ::core::mem::transmute::<
15412 *const c_void,
15413 PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR,
15414 >(val)
15415 }
15416 },
15417 get_physical_device_surface_formats2_khr: unsafe {
15418 unsafe extern "system" fn get_physical_device_surface_formats2_khr(
15419 _physical_device: PhysicalDevice,
15420 _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>,
15421 _p_surface_format_count: *mut u32,
15422 _p_surface_formats: *mut SurfaceFormat2KHR<'_>,
15423 ) -> Result {
15424 panic!(concat!(
15425 "Unable to load ",
15426 stringify!(get_physical_device_surface_formats2_khr)
15427 ))
15428 }
15429 let val = _f(c"vkGetPhysicalDeviceSurfaceFormats2KHR");
15430 if val.is_null() {
15431 get_physical_device_surface_formats2_khr
15432 } else {
15433 ::core::mem::transmute::<
15434 *const c_void,
15435 PFN_vkGetPhysicalDeviceSurfaceFormats2KHR,
15436 >(val)
15437 }
15438 },
15439 }
15440 }
15441 }
15442 }
15443 #[doc = "VK_KHR_variable_pointers"]
15444 pub mod variable_pointers {
15445 use super::super::*;
15446 pub use {
15447 crate::vk::KHR_VARIABLE_POINTERS_EXTENSION_NAME as NAME,
15448 crate::vk::KHR_VARIABLE_POINTERS_SPEC_VERSION as SPEC_VERSION,
15449 };
15450 }
15451 #[doc = "VK_KHR_get_display_properties2"]
15452 pub mod get_display_properties2 {
15453 use super::super::*;
15454 pub use {
15455 crate::vk::KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME as NAME,
15456 crate::vk::KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION as SPEC_VERSION,
15457 };
15458 #[doc = "VK_KHR_get_display_properties2 instance-level functions"]
15459 #[derive(Clone)]
15460 pub struct Instance {
15461 pub(crate) fp: InstanceFn,
15462 pub(crate) handle: crate::vk::Instance,
15463 }
15464 impl Instance {
15465 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
15466 let handle = instance.handle();
15467 let fp = InstanceFn::load(|name| unsafe {
15468 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15469 entry.get_instance_proc_addr(handle, name.as_ptr()),
15470 )
15471 });
15472 Self { handle, fp }
15473 }
15474 #[inline]
15475 pub fn fp(&self) -> &InstanceFn {
15476 &self.fp
15477 }
15478 #[inline]
15479 pub fn instance(&self) -> crate::vk::Instance {
15480 self.handle
15481 }
15482 }
15483 #[derive(Clone)]
15484 #[doc = "Raw VK_KHR_get_display_properties2 instance-level function pointers"]
15485 pub struct InstanceFn {
15486 pub get_physical_device_display_properties2_khr:
15487 PFN_vkGetPhysicalDeviceDisplayProperties2KHR,
15488 pub get_physical_device_display_plane_properties2_khr:
15489 PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR,
15490 pub get_display_mode_properties2_khr: PFN_vkGetDisplayModeProperties2KHR,
15491 pub get_display_plane_capabilities2_khr: PFN_vkGetDisplayPlaneCapabilities2KHR,
15492 }
15493 unsafe impl Send for InstanceFn {}
15494 unsafe impl Sync for InstanceFn {}
15495 impl InstanceFn {
15496 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15497 Self::load_erased(&mut f)
15498 }
15499 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15500 Self {
15501 get_physical_device_display_properties2_khr: unsafe {
15502 unsafe extern "system" fn get_physical_device_display_properties2_khr(
15503 _physical_device: PhysicalDevice,
15504 _p_property_count: *mut u32,
15505 _p_properties: *mut DisplayProperties2KHR<'_>,
15506 ) -> Result {
15507 panic!(concat!(
15508 "Unable to load ",
15509 stringify!(get_physical_device_display_properties2_khr)
15510 ))
15511 }
15512 let val = _f(c"vkGetPhysicalDeviceDisplayProperties2KHR");
15513 if val.is_null() {
15514 get_physical_device_display_properties2_khr
15515 } else {
15516 ::core::mem::transmute::<
15517 *const c_void,
15518 PFN_vkGetPhysicalDeviceDisplayProperties2KHR,
15519 >(val)
15520 }
15521 },
15522 get_physical_device_display_plane_properties2_khr: unsafe {
15523 unsafe extern "system" fn get_physical_device_display_plane_properties2_khr(
15524 _physical_device: PhysicalDevice,
15525 _p_property_count: *mut u32,
15526 _p_properties: *mut DisplayPlaneProperties2KHR<'_>,
15527 ) -> Result {
15528 panic!(concat!(
15529 "Unable to load ",
15530 stringify!(get_physical_device_display_plane_properties2_khr)
15531 ))
15532 }
15533 let val = _f(c"vkGetPhysicalDeviceDisplayPlaneProperties2KHR");
15534 if val.is_null() {
15535 get_physical_device_display_plane_properties2_khr
15536 } else {
15537 ::core::mem::transmute::<
15538 *const c_void,
15539 PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR,
15540 >(val)
15541 }
15542 },
15543 get_display_mode_properties2_khr: unsafe {
15544 unsafe extern "system" fn get_display_mode_properties2_khr(
15545 _physical_device: PhysicalDevice,
15546 _display: DisplayKHR,
15547 _p_property_count: *mut u32,
15548 _p_properties: *mut DisplayModeProperties2KHR<'_>,
15549 ) -> Result {
15550 panic!(concat!(
15551 "Unable to load ",
15552 stringify!(get_display_mode_properties2_khr)
15553 ))
15554 }
15555 let val = _f(c"vkGetDisplayModeProperties2KHR");
15556 if val.is_null() {
15557 get_display_mode_properties2_khr
15558 } else {
15559 ::core::mem::transmute::<
15560 *const c_void,
15561 PFN_vkGetDisplayModeProperties2KHR,
15562 >(val)
15563 }
15564 },
15565 get_display_plane_capabilities2_khr: unsafe {
15566 unsafe extern "system" fn get_display_plane_capabilities2_khr(
15567 _physical_device: PhysicalDevice,
15568 _p_display_plane_info: *const DisplayPlaneInfo2KHR<'_>,
15569 _p_capabilities: *mut DisplayPlaneCapabilities2KHR<'_>,
15570 ) -> Result {
15571 panic!(concat!(
15572 "Unable to load ",
15573 stringify!(get_display_plane_capabilities2_khr)
15574 ))
15575 }
15576 let val = _f(c"vkGetDisplayPlaneCapabilities2KHR");
15577 if val.is_null() {
15578 get_display_plane_capabilities2_khr
15579 } else {
15580 ::core::mem::transmute::<
15581 *const c_void,
15582 PFN_vkGetDisplayPlaneCapabilities2KHR,
15583 >(val)
15584 }
15585 },
15586 }
15587 }
15588 }
15589 }
15590 #[doc = "VK_KHR_dedicated_allocation"]
15591 pub mod dedicated_allocation {
15592 use super::super::*;
15593 pub use {
15594 crate::vk::KHR_DEDICATED_ALLOCATION_EXTENSION_NAME as NAME,
15595 crate::vk::KHR_DEDICATED_ALLOCATION_SPEC_VERSION as SPEC_VERSION,
15596 };
15597 }
15598 #[doc = "VK_KHR_storage_buffer_storage_class"]
15599 pub mod storage_buffer_storage_class {
15600 use super::super::*;
15601 pub use {
15602 crate::vk::KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME as NAME,
15603 crate::vk::KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION as SPEC_VERSION,
15604 };
15605 }
15606 #[doc = "VK_KHR_shader_bfloat16"]
15607 pub mod shader_bfloat16 {
15608 use super::super::*;
15609 pub use {
15610 crate::vk::KHR_SHADER_BFLOAT16_EXTENSION_NAME as NAME,
15611 crate::vk::KHR_SHADER_BFLOAT16_SPEC_VERSION as SPEC_VERSION,
15612 };
15613 }
15614 #[doc = "VK_KHR_relaxed_block_layout"]
15615 pub mod relaxed_block_layout {
15616 use super::super::*;
15617 pub use {
15618 crate::vk::KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME as NAME,
15619 crate::vk::KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION as SPEC_VERSION,
15620 };
15621 }
15622 #[doc = "VK_KHR_get_memory_requirements2"]
15623 pub mod get_memory_requirements2 {
15624 use super::super::*;
15625 pub use {
15626 crate::vk::KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME as NAME,
15627 crate::vk::KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION as SPEC_VERSION,
15628 };
15629 #[doc = "VK_KHR_get_memory_requirements2 device-level functions"]
15630 #[derive(Clone)]
15631 pub struct Device {
15632 pub(crate) fp: DeviceFn,
15633 pub(crate) handle: crate::vk::Device,
15634 }
15635 impl Device {
15636 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
15637 let handle = device.handle();
15638 let fp = DeviceFn::load(|name| unsafe {
15639 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15640 instance.get_device_proc_addr(handle, name.as_ptr()),
15641 )
15642 });
15643 Self { handle, fp }
15644 }
15645 #[inline]
15646 pub fn fp(&self) -> &DeviceFn {
15647 &self.fp
15648 }
15649 #[inline]
15650 pub fn device(&self) -> crate::vk::Device {
15651 self.handle
15652 }
15653 }
15654 #[derive(Clone)]
15655 #[doc = "Raw VK_KHR_get_memory_requirements2 device-level function pointers"]
15656 pub struct DeviceFn {
15657 pub get_image_memory_requirements2_khr: PFN_vkGetImageMemoryRequirements2,
15658 pub get_buffer_memory_requirements2_khr: PFN_vkGetBufferMemoryRequirements2,
15659 pub get_image_sparse_memory_requirements2_khr: PFN_vkGetImageSparseMemoryRequirements2,
15660 }
15661 unsafe impl Send for DeviceFn {}
15662 unsafe impl Sync for DeviceFn {}
15663 impl DeviceFn {
15664 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15665 Self::load_erased(&mut f)
15666 }
15667 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15668 Self {
15669 get_image_memory_requirements2_khr: unsafe {
15670 unsafe extern "system" fn get_image_memory_requirements2_khr(
15671 _device: crate::vk::Device,
15672 _p_info: *const ImageMemoryRequirementsInfo2<'_>,
15673 _p_memory_requirements: *mut MemoryRequirements2<'_>,
15674 ) {
15675 panic!(concat!(
15676 "Unable to load ",
15677 stringify!(get_image_memory_requirements2_khr)
15678 ))
15679 }
15680 let val = _f(c"vkGetImageMemoryRequirements2KHR");
15681 if val.is_null() {
15682 get_image_memory_requirements2_khr
15683 } else {
15684 ::core::mem::transmute::<*const c_void, PFN_vkGetImageMemoryRequirements2>(
15685 val,
15686 )
15687 }
15688 },
15689 get_buffer_memory_requirements2_khr: unsafe {
15690 unsafe extern "system" fn get_buffer_memory_requirements2_khr(
15691 _device: crate::vk::Device,
15692 _p_info: *const BufferMemoryRequirementsInfo2<'_>,
15693 _p_memory_requirements: *mut MemoryRequirements2<'_>,
15694 ) {
15695 panic!(concat!(
15696 "Unable to load ",
15697 stringify!(get_buffer_memory_requirements2_khr)
15698 ))
15699 }
15700 let val = _f(c"vkGetBufferMemoryRequirements2KHR");
15701 if val.is_null() {
15702 get_buffer_memory_requirements2_khr
15703 } else {
15704 ::core::mem::transmute::<
15705 *const c_void,
15706 PFN_vkGetBufferMemoryRequirements2,
15707 >(val)
15708 }
15709 },
15710 get_image_sparse_memory_requirements2_khr: unsafe {
15711 unsafe extern "system" fn get_image_sparse_memory_requirements2_khr(
15712 _device: crate::vk::Device,
15713 _p_info: *const ImageSparseMemoryRequirementsInfo2<'_>,
15714 _p_sparse_memory_requirement_count: *mut u32,
15715 _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>,
15716 ) {
15717 panic!(concat!(
15718 "Unable to load ",
15719 stringify!(get_image_sparse_memory_requirements2_khr)
15720 ))
15721 }
15722 let val = _f(c"vkGetImageSparseMemoryRequirements2KHR");
15723 if val.is_null() {
15724 get_image_sparse_memory_requirements2_khr
15725 } else {
15726 ::core::mem::transmute::<
15727 *const c_void,
15728 PFN_vkGetImageSparseMemoryRequirements2,
15729 >(val)
15730 }
15731 },
15732 }
15733 }
15734 }
15735 }
15736 #[doc = "VK_KHR_image_format_list"]
15737 pub mod image_format_list {
15738 use super::super::*;
15739 pub use {
15740 crate::vk::KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME as NAME,
15741 crate::vk::KHR_IMAGE_FORMAT_LIST_SPEC_VERSION as SPEC_VERSION,
15742 };
15743 }
15744 #[doc = "VK_KHR_acceleration_structure"]
15745 pub mod acceleration_structure {
15746 use super::super::*;
15747 pub use {
15748 crate::vk::KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME as NAME,
15749 crate::vk::KHR_ACCELERATION_STRUCTURE_SPEC_VERSION as SPEC_VERSION,
15750 };
15751 #[doc = "VK_KHR_acceleration_structure device-level functions"]
15752 #[derive(Clone)]
15753 pub struct Device {
15754 pub(crate) fp: DeviceFn,
15755 pub(crate) handle: crate::vk::Device,
15756 }
15757 impl Device {
15758 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
15759 let handle = device.handle();
15760 let fp = DeviceFn::load(|name| unsafe {
15761 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
15762 instance.get_device_proc_addr(handle, name.as_ptr()),
15763 )
15764 });
15765 Self { handle, fp }
15766 }
15767 #[inline]
15768 pub fn fp(&self) -> &DeviceFn {
15769 &self.fp
15770 }
15771 #[inline]
15772 pub fn device(&self) -> crate::vk::Device {
15773 self.handle
15774 }
15775 }
15776 #[derive(Clone)]
15777 #[doc = "Raw VK_KHR_acceleration_structure device-level function pointers"]
15778 pub struct DeviceFn {
15779 pub create_acceleration_structure_khr: PFN_vkCreateAccelerationStructureKHR,
15780 pub destroy_acceleration_structure_khr: PFN_vkDestroyAccelerationStructureKHR,
15781 pub cmd_build_acceleration_structures_khr: PFN_vkCmdBuildAccelerationStructuresKHR,
15782 pub cmd_build_acceleration_structures_indirect_khr:
15783 PFN_vkCmdBuildAccelerationStructuresIndirectKHR,
15784 pub build_acceleration_structures_khr: PFN_vkBuildAccelerationStructuresKHR,
15785 pub copy_acceleration_structure_khr: PFN_vkCopyAccelerationStructureKHR,
15786 pub copy_acceleration_structure_to_memory_khr:
15787 PFN_vkCopyAccelerationStructureToMemoryKHR,
15788 pub copy_memory_to_acceleration_structure_khr:
15789 PFN_vkCopyMemoryToAccelerationStructureKHR,
15790 pub write_acceleration_structures_properties_khr:
15791 PFN_vkWriteAccelerationStructuresPropertiesKHR,
15792 pub cmd_copy_acceleration_structure_khr: PFN_vkCmdCopyAccelerationStructureKHR,
15793 pub cmd_copy_acceleration_structure_to_memory_khr:
15794 PFN_vkCmdCopyAccelerationStructureToMemoryKHR,
15795 pub cmd_copy_memory_to_acceleration_structure_khr:
15796 PFN_vkCmdCopyMemoryToAccelerationStructureKHR,
15797 pub get_acceleration_structure_device_address_khr:
15798 PFN_vkGetAccelerationStructureDeviceAddressKHR,
15799 pub cmd_write_acceleration_structures_properties_khr:
15800 PFN_vkCmdWriteAccelerationStructuresPropertiesKHR,
15801 pub get_device_acceleration_structure_compatibility_khr:
15802 PFN_vkGetDeviceAccelerationStructureCompatibilityKHR,
15803 pub get_acceleration_structure_build_sizes_khr:
15804 PFN_vkGetAccelerationStructureBuildSizesKHR,
15805 }
15806 unsafe impl Send for DeviceFn {}
15807 unsafe impl Sync for DeviceFn {}
15808 impl DeviceFn {
15809 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
15810 Self::load_erased(&mut f)
15811 }
15812 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
15813 Self {
15814 create_acceleration_structure_khr: unsafe {
15815 unsafe extern "system" fn create_acceleration_structure_khr(
15816 _device: crate::vk::Device,
15817 _p_create_info: *const AccelerationStructureCreateInfoKHR<'_>,
15818 _p_allocator: *const AllocationCallbacks<'_>,
15819 _p_acceleration_structure: *mut AccelerationStructureKHR,
15820 ) -> Result {
15821 panic!(concat!(
15822 "Unable to load ",
15823 stringify!(create_acceleration_structure_khr)
15824 ))
15825 }
15826 let val = _f(c"vkCreateAccelerationStructureKHR");
15827 if val.is_null() {
15828 create_acceleration_structure_khr
15829 } else {
15830 ::core::mem::transmute::<
15831 *const c_void,
15832 PFN_vkCreateAccelerationStructureKHR,
15833 >(val)
15834 }
15835 },
15836 destroy_acceleration_structure_khr: unsafe {
15837 unsafe extern "system" fn destroy_acceleration_structure_khr(
15838 _device: crate::vk::Device,
15839 _acceleration_structure: AccelerationStructureKHR,
15840 _p_allocator: *const AllocationCallbacks<'_>,
15841 ) {
15842 panic!(concat!(
15843 "Unable to load ",
15844 stringify!(destroy_acceleration_structure_khr)
15845 ))
15846 }
15847 let val = _f(c"vkDestroyAccelerationStructureKHR");
15848 if val.is_null() {
15849 destroy_acceleration_structure_khr
15850 } else {
15851 ::core::mem::transmute::<
15852 *const c_void,
15853 PFN_vkDestroyAccelerationStructureKHR,
15854 >(val)
15855 }
15856 },
15857 cmd_build_acceleration_structures_khr: unsafe {
15858 unsafe extern "system" fn cmd_build_acceleration_structures_khr(
15859 _command_buffer: CommandBuffer,
15860 _info_count: u32,
15861 _p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>,
15862 _pp_build_range_infos : * const * const AccelerationStructureBuildRangeInfoKHR,
15863 ) {
15864 panic!(concat!(
15865 "Unable to load ",
15866 stringify!(cmd_build_acceleration_structures_khr)
15867 ))
15868 }
15869 let val = _f(c"vkCmdBuildAccelerationStructuresKHR");
15870 if val.is_null() {
15871 cmd_build_acceleration_structures_khr
15872 } else {
15873 ::core::mem::transmute::<
15874 *const c_void,
15875 PFN_vkCmdBuildAccelerationStructuresKHR,
15876 >(val)
15877 }
15878 },
15879 cmd_build_acceleration_structures_indirect_khr: unsafe {
15880 unsafe extern "system" fn cmd_build_acceleration_structures_indirect_khr(
15881 _command_buffer: CommandBuffer,
15882 _info_count: u32,
15883 _p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>,
15884 _p_indirect_device_addresses: *const DeviceAddress,
15885 _p_indirect_strides: *const u32,
15886 _pp_max_primitive_counts: *const *const u32,
15887 ) {
15888 panic!(concat!(
15889 "Unable to load ",
15890 stringify!(cmd_build_acceleration_structures_indirect_khr)
15891 ))
15892 }
15893 let val = _f(c"vkCmdBuildAccelerationStructuresIndirectKHR");
15894 if val.is_null() {
15895 cmd_build_acceleration_structures_indirect_khr
15896 } else {
15897 ::core::mem::transmute::<
15898 *const c_void,
15899 PFN_vkCmdBuildAccelerationStructuresIndirectKHR,
15900 >(val)
15901 }
15902 },
15903 build_acceleration_structures_khr: unsafe {
15904 unsafe extern "system" fn build_acceleration_structures_khr(
15905 _device: crate::vk::Device,
15906 _deferred_operation: DeferredOperationKHR,
15907 _info_count: u32,
15908 _p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>,
15909 _pp_build_range_infos : * const * const AccelerationStructureBuildRangeInfoKHR,
15910 ) -> Result {
15911 panic!(concat!(
15912 "Unable to load ",
15913 stringify!(build_acceleration_structures_khr)
15914 ))
15915 }
15916 let val = _f(c"vkBuildAccelerationStructuresKHR");
15917 if val.is_null() {
15918 build_acceleration_structures_khr
15919 } else {
15920 ::core::mem::transmute::<
15921 *const c_void,
15922 PFN_vkBuildAccelerationStructuresKHR,
15923 >(val)
15924 }
15925 },
15926 copy_acceleration_structure_khr: unsafe {
15927 unsafe extern "system" fn copy_acceleration_structure_khr(
15928 _device: crate::vk::Device,
15929 _deferred_operation: DeferredOperationKHR,
15930 _p_info: *const CopyAccelerationStructureInfoKHR<'_>,
15931 ) -> Result {
15932 panic!(concat!(
15933 "Unable to load ",
15934 stringify!(copy_acceleration_structure_khr)
15935 ))
15936 }
15937 let val = _f(c"vkCopyAccelerationStructureKHR");
15938 if val.is_null() {
15939 copy_acceleration_structure_khr
15940 } else {
15941 ::core::mem::transmute::<
15942 *const c_void,
15943 PFN_vkCopyAccelerationStructureKHR,
15944 >(val)
15945 }
15946 },
15947 copy_acceleration_structure_to_memory_khr: unsafe {
15948 unsafe extern "system" fn copy_acceleration_structure_to_memory_khr(
15949 _device: crate::vk::Device,
15950 _deferred_operation: DeferredOperationKHR,
15951 _p_info: *const CopyAccelerationStructureToMemoryInfoKHR<'_>,
15952 ) -> Result {
15953 panic!(concat!(
15954 "Unable to load ",
15955 stringify!(copy_acceleration_structure_to_memory_khr)
15956 ))
15957 }
15958 let val = _f(c"vkCopyAccelerationStructureToMemoryKHR");
15959 if val.is_null() {
15960 copy_acceleration_structure_to_memory_khr
15961 } else {
15962 ::core::mem::transmute::<
15963 *const c_void,
15964 PFN_vkCopyAccelerationStructureToMemoryKHR,
15965 >(val)
15966 }
15967 },
15968 copy_memory_to_acceleration_structure_khr: unsafe {
15969 unsafe extern "system" fn copy_memory_to_acceleration_structure_khr(
15970 _device: crate::vk::Device,
15971 _deferred_operation: DeferredOperationKHR,
15972 _p_info: *const CopyMemoryToAccelerationStructureInfoKHR<'_>,
15973 ) -> Result {
15974 panic!(concat!(
15975 "Unable to load ",
15976 stringify!(copy_memory_to_acceleration_structure_khr)
15977 ))
15978 }
15979 let val = _f(c"vkCopyMemoryToAccelerationStructureKHR");
15980 if val.is_null() {
15981 copy_memory_to_acceleration_structure_khr
15982 } else {
15983 ::core::mem::transmute::<
15984 *const c_void,
15985 PFN_vkCopyMemoryToAccelerationStructureKHR,
15986 >(val)
15987 }
15988 },
15989 write_acceleration_structures_properties_khr: unsafe {
15990 unsafe extern "system" fn write_acceleration_structures_properties_khr(
15991 _device: crate::vk::Device,
15992 _acceleration_structure_count: u32,
15993 _p_acceleration_structures: *const AccelerationStructureKHR,
15994 _query_type: QueryType,
15995 _data_size: usize,
15996 _p_data: *mut c_void,
15997 _stride: usize,
15998 ) -> Result {
15999 panic!(concat!(
16000 "Unable to load ",
16001 stringify!(write_acceleration_structures_properties_khr)
16002 ))
16003 }
16004 let val = _f(c"vkWriteAccelerationStructuresPropertiesKHR");
16005 if val.is_null() {
16006 write_acceleration_structures_properties_khr
16007 } else {
16008 ::core::mem::transmute::<
16009 *const c_void,
16010 PFN_vkWriteAccelerationStructuresPropertiesKHR,
16011 >(val)
16012 }
16013 },
16014 cmd_copy_acceleration_structure_khr: unsafe {
16015 unsafe extern "system" fn cmd_copy_acceleration_structure_khr(
16016 _command_buffer: CommandBuffer,
16017 _p_info: *const CopyAccelerationStructureInfoKHR<'_>,
16018 ) {
16019 panic!(concat!(
16020 "Unable to load ",
16021 stringify!(cmd_copy_acceleration_structure_khr)
16022 ))
16023 }
16024 let val = _f(c"vkCmdCopyAccelerationStructureKHR");
16025 if val.is_null() {
16026 cmd_copy_acceleration_structure_khr
16027 } else {
16028 ::core::mem::transmute::<
16029 *const c_void,
16030 PFN_vkCmdCopyAccelerationStructureKHR,
16031 >(val)
16032 }
16033 },
16034 cmd_copy_acceleration_structure_to_memory_khr: unsafe {
16035 unsafe extern "system" fn cmd_copy_acceleration_structure_to_memory_khr(
16036 _command_buffer: CommandBuffer,
16037 _p_info: *const CopyAccelerationStructureToMemoryInfoKHR<'_>,
16038 ) {
16039 panic!(concat!(
16040 "Unable to load ",
16041 stringify!(cmd_copy_acceleration_structure_to_memory_khr)
16042 ))
16043 }
16044 let val = _f(c"vkCmdCopyAccelerationStructureToMemoryKHR");
16045 if val.is_null() {
16046 cmd_copy_acceleration_structure_to_memory_khr
16047 } else {
16048 ::core::mem::transmute::<
16049 *const c_void,
16050 PFN_vkCmdCopyAccelerationStructureToMemoryKHR,
16051 >(val)
16052 }
16053 },
16054 cmd_copy_memory_to_acceleration_structure_khr: unsafe {
16055 unsafe extern "system" fn cmd_copy_memory_to_acceleration_structure_khr(
16056 _command_buffer: CommandBuffer,
16057 _p_info: *const CopyMemoryToAccelerationStructureInfoKHR<'_>,
16058 ) {
16059 panic!(concat!(
16060 "Unable to load ",
16061 stringify!(cmd_copy_memory_to_acceleration_structure_khr)
16062 ))
16063 }
16064 let val = _f(c"vkCmdCopyMemoryToAccelerationStructureKHR");
16065 if val.is_null() {
16066 cmd_copy_memory_to_acceleration_structure_khr
16067 } else {
16068 ::core::mem::transmute::<
16069 *const c_void,
16070 PFN_vkCmdCopyMemoryToAccelerationStructureKHR,
16071 >(val)
16072 }
16073 },
16074 get_acceleration_structure_device_address_khr: unsafe {
16075 unsafe extern "system" fn get_acceleration_structure_device_address_khr(
16076 _device: crate::vk::Device,
16077 _p_info: *const AccelerationStructureDeviceAddressInfoKHR<'_>,
16078 ) -> DeviceAddress {
16079 panic!(concat!(
16080 "Unable to load ",
16081 stringify!(get_acceleration_structure_device_address_khr)
16082 ))
16083 }
16084 let val = _f(c"vkGetAccelerationStructureDeviceAddressKHR");
16085 if val.is_null() {
16086 get_acceleration_structure_device_address_khr
16087 } else {
16088 ::core::mem::transmute::<
16089 *const c_void,
16090 PFN_vkGetAccelerationStructureDeviceAddressKHR,
16091 >(val)
16092 }
16093 },
16094 cmd_write_acceleration_structures_properties_khr: unsafe {
16095 unsafe extern "system" fn cmd_write_acceleration_structures_properties_khr(
16096 _command_buffer: CommandBuffer,
16097 _acceleration_structure_count: u32,
16098 _p_acceleration_structures: *const AccelerationStructureKHR,
16099 _query_type: QueryType,
16100 _query_pool: QueryPool,
16101 _first_query: u32,
16102 ) {
16103 panic!(concat!(
16104 "Unable to load ",
16105 stringify!(cmd_write_acceleration_structures_properties_khr)
16106 ))
16107 }
16108 let val = _f(c"vkCmdWriteAccelerationStructuresPropertiesKHR");
16109 if val.is_null() {
16110 cmd_write_acceleration_structures_properties_khr
16111 } else {
16112 ::core::mem::transmute::<
16113 *const c_void,
16114 PFN_vkCmdWriteAccelerationStructuresPropertiesKHR,
16115 >(val)
16116 }
16117 },
16118 get_device_acceleration_structure_compatibility_khr: unsafe {
16119 unsafe extern "system" fn get_device_acceleration_structure_compatibility_khr(
16120 _device: crate::vk::Device,
16121 _p_version_info: *const AccelerationStructureVersionInfoKHR<'_>,
16122 _p_compatibility: *mut AccelerationStructureCompatibilityKHR,
16123 ) {
16124 panic!(concat!(
16125 "Unable to load ",
16126 stringify!(get_device_acceleration_structure_compatibility_khr)
16127 ))
16128 }
16129 let val = _f(c"vkGetDeviceAccelerationStructureCompatibilityKHR");
16130 if val.is_null() {
16131 get_device_acceleration_structure_compatibility_khr
16132 } else {
16133 ::core::mem::transmute::<
16134 *const c_void,
16135 PFN_vkGetDeviceAccelerationStructureCompatibilityKHR,
16136 >(val)
16137 }
16138 },
16139 get_acceleration_structure_build_sizes_khr: unsafe {
16140 unsafe extern "system" fn get_acceleration_structure_build_sizes_khr(
16141 _device: crate::vk::Device,
16142 _build_type: AccelerationStructureBuildTypeKHR,
16143 _p_build_info: *const AccelerationStructureBuildGeometryInfoKHR<'_>,
16144 _p_max_primitive_counts: *const u32,
16145 _p_size_info: *mut AccelerationStructureBuildSizesInfoKHR<'_>,
16146 ) {
16147 panic!(concat!(
16148 "Unable to load ",
16149 stringify!(get_acceleration_structure_build_sizes_khr)
16150 ))
16151 }
16152 let val = _f(c"vkGetAccelerationStructureBuildSizesKHR");
16153 if val.is_null() {
16154 get_acceleration_structure_build_sizes_khr
16155 } else {
16156 ::core::mem::transmute::<
16157 *const c_void,
16158 PFN_vkGetAccelerationStructureBuildSizesKHR,
16159 >(val)
16160 }
16161 },
16162 }
16163 }
16164 }
16165 }
16166 #[doc = "VK_KHR_ray_tracing_pipeline"]
16167 pub mod ray_tracing_pipeline {
16168 use super::super::*;
16169 pub use {
16170 crate::vk::KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME as NAME,
16171 crate::vk::KHR_RAY_TRACING_PIPELINE_SPEC_VERSION as SPEC_VERSION,
16172 };
16173 #[doc = "VK_KHR_ray_tracing_pipeline device-level functions"]
16174 #[derive(Clone)]
16175 pub struct Device {
16176 pub(crate) fp: DeviceFn,
16177 pub(crate) handle: crate::vk::Device,
16178 }
16179 impl Device {
16180 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
16181 let handle = device.handle();
16182 let fp = DeviceFn::load(|name| unsafe {
16183 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
16184 instance.get_device_proc_addr(handle, name.as_ptr()),
16185 )
16186 });
16187 Self { handle, fp }
16188 }
16189 #[inline]
16190 pub fn fp(&self) -> &DeviceFn {
16191 &self.fp
16192 }
16193 #[inline]
16194 pub fn device(&self) -> crate::vk::Device {
16195 self.handle
16196 }
16197 }
16198 #[derive(Clone)]
16199 #[doc = "Raw VK_KHR_ray_tracing_pipeline device-level function pointers"]
16200 pub struct DeviceFn {
16201 pub cmd_trace_rays_khr: PFN_vkCmdTraceRaysKHR,
16202 pub create_ray_tracing_pipelines_khr: PFN_vkCreateRayTracingPipelinesKHR,
16203 pub get_ray_tracing_shader_group_handles_khr: PFN_vkGetRayTracingShaderGroupHandlesKHR,
16204 pub get_ray_tracing_capture_replay_shader_group_handles_khr:
16205 PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR,
16206 pub cmd_trace_rays_indirect_khr: PFN_vkCmdTraceRaysIndirectKHR,
16207 pub get_ray_tracing_shader_group_stack_size_khr:
16208 PFN_vkGetRayTracingShaderGroupStackSizeKHR,
16209 pub cmd_set_ray_tracing_pipeline_stack_size_khr:
16210 PFN_vkCmdSetRayTracingPipelineStackSizeKHR,
16211 }
16212 unsafe impl Send for DeviceFn {}
16213 unsafe impl Sync for DeviceFn {}
16214 impl DeviceFn {
16215 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
16216 Self::load_erased(&mut f)
16217 }
16218 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
16219 Self {
16220 cmd_trace_rays_khr: unsafe {
16221 unsafe extern "system" fn cmd_trace_rays_khr(
16222 _command_buffer: CommandBuffer,
16223 _p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16224 _p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16225 _p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16226 _p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16227 _width: u32,
16228 _height: u32,
16229 _depth: u32,
16230 ) {
16231 panic!(concat!("Unable to load ", stringify!(cmd_trace_rays_khr)))
16232 }
16233 let val = _f(c"vkCmdTraceRaysKHR");
16234 if val.is_null() {
16235 cmd_trace_rays_khr
16236 } else {
16237 ::core::mem::transmute::<*const c_void, PFN_vkCmdTraceRaysKHR>(val)
16238 }
16239 },
16240 create_ray_tracing_pipelines_khr: unsafe {
16241 unsafe extern "system" fn create_ray_tracing_pipelines_khr(
16242 _device: crate::vk::Device,
16243 _deferred_operation: DeferredOperationKHR,
16244 _pipeline_cache: PipelineCache,
16245 _create_info_count: u32,
16246 _p_create_infos: *const RayTracingPipelineCreateInfoKHR<'_>,
16247 _p_allocator: *const AllocationCallbacks<'_>,
16248 _p_pipelines: *mut Pipeline,
16249 ) -> Result {
16250 panic!(concat!(
16251 "Unable to load ",
16252 stringify!(create_ray_tracing_pipelines_khr)
16253 ))
16254 }
16255 let val = _f(c"vkCreateRayTracingPipelinesKHR");
16256 if val.is_null() {
16257 create_ray_tracing_pipelines_khr
16258 } else {
16259 ::core::mem::transmute::<
16260 *const c_void,
16261 PFN_vkCreateRayTracingPipelinesKHR,
16262 >(val)
16263 }
16264 },
16265 get_ray_tracing_shader_group_handles_khr: unsafe {
16266 unsafe extern "system" fn get_ray_tracing_shader_group_handles_khr(
16267 _device: crate::vk::Device,
16268 _pipeline: Pipeline,
16269 _first_group: u32,
16270 _group_count: u32,
16271 _data_size: usize,
16272 _p_data: *mut c_void,
16273 ) -> Result {
16274 panic!(concat!(
16275 "Unable to load ",
16276 stringify!(get_ray_tracing_shader_group_handles_khr)
16277 ))
16278 }
16279 let val = _f(c"vkGetRayTracingShaderGroupHandlesKHR");
16280 if val.is_null() {
16281 get_ray_tracing_shader_group_handles_khr
16282 } else {
16283 ::core::mem::transmute::<
16284 *const c_void,
16285 PFN_vkGetRayTracingShaderGroupHandlesKHR,
16286 >(val)
16287 }
16288 },
16289 get_ray_tracing_capture_replay_shader_group_handles_khr: unsafe {
16290 unsafe extern "system" fn get_ray_tracing_capture_replay_shader_group_handles_khr(
16291 _device: crate::vk::Device,
16292 _pipeline: Pipeline,
16293 _first_group: u32,
16294 _group_count: u32,
16295 _data_size: usize,
16296 _p_data: *mut c_void,
16297 ) -> Result {
16298 panic!(concat!(
16299 "Unable to load ",
16300 stringify!(get_ray_tracing_capture_replay_shader_group_handles_khr)
16301 ))
16302 }
16303 let val = _f(c"vkGetRayTracingCaptureReplayShaderGroupHandlesKHR");
16304 if val.is_null() {
16305 get_ray_tracing_capture_replay_shader_group_handles_khr
16306 } else {
16307 ::core::mem::transmute::<
16308 *const c_void,
16309 PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR,
16310 >(val)
16311 }
16312 },
16313 cmd_trace_rays_indirect_khr: unsafe {
16314 unsafe extern "system" fn cmd_trace_rays_indirect_khr(
16315 _command_buffer: CommandBuffer,
16316 _p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16317 _p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16318 _p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16319 _p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR,
16320 _indirect_device_address: DeviceAddress,
16321 ) {
16322 panic!(concat!(
16323 "Unable to load ",
16324 stringify!(cmd_trace_rays_indirect_khr)
16325 ))
16326 }
16327 let val = _f(c"vkCmdTraceRaysIndirectKHR");
16328 if val.is_null() {
16329 cmd_trace_rays_indirect_khr
16330 } else {
16331 ::core::mem::transmute::<*const c_void, PFN_vkCmdTraceRaysIndirectKHR>(
16332 val,
16333 )
16334 }
16335 },
16336 get_ray_tracing_shader_group_stack_size_khr: unsafe {
16337 unsafe extern "system" fn get_ray_tracing_shader_group_stack_size_khr(
16338 _device: crate::vk::Device,
16339 _pipeline: Pipeline,
16340 _group: u32,
16341 _group_shader: ShaderGroupShaderKHR,
16342 ) -> DeviceSize {
16343 panic!(concat!(
16344 "Unable to load ",
16345 stringify!(get_ray_tracing_shader_group_stack_size_khr)
16346 ))
16347 }
16348 let val = _f(c"vkGetRayTracingShaderGroupStackSizeKHR");
16349 if val.is_null() {
16350 get_ray_tracing_shader_group_stack_size_khr
16351 } else {
16352 ::core::mem::transmute::<
16353 *const c_void,
16354 PFN_vkGetRayTracingShaderGroupStackSizeKHR,
16355 >(val)
16356 }
16357 },
16358 cmd_set_ray_tracing_pipeline_stack_size_khr: unsafe {
16359 unsafe extern "system" fn cmd_set_ray_tracing_pipeline_stack_size_khr(
16360 _command_buffer: CommandBuffer,
16361 _pipeline_stack_size: u32,
16362 ) {
16363 panic!(concat!(
16364 "Unable to load ",
16365 stringify!(cmd_set_ray_tracing_pipeline_stack_size_khr)
16366 ))
16367 }
16368 let val = _f(c"vkCmdSetRayTracingPipelineStackSizeKHR");
16369 if val.is_null() {
16370 cmd_set_ray_tracing_pipeline_stack_size_khr
16371 } else {
16372 ::core::mem::transmute::<
16373 *const c_void,
16374 PFN_vkCmdSetRayTracingPipelineStackSizeKHR,
16375 >(val)
16376 }
16377 },
16378 }
16379 }
16380 }
16381 }
16382 #[doc = "VK_KHR_ray_query"]
16383 pub mod ray_query {
16384 use super::super::*;
16385 pub use {
16386 crate::vk::KHR_RAY_QUERY_EXTENSION_NAME as NAME,
16387 crate::vk::KHR_RAY_QUERY_SPEC_VERSION as SPEC_VERSION,
16388 };
16389 }
16390 #[doc = "VK_KHR_sampler_ycbcr_conversion"]
16391 pub mod sampler_ycbcr_conversion {
16392 use super::super::*;
16393 pub use {
16394 crate::vk::KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME as NAME,
16395 crate::vk::KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION as SPEC_VERSION,
16396 };
16397 #[doc = "VK_KHR_sampler_ycbcr_conversion device-level functions"]
16398 #[derive(Clone)]
16399 pub struct Device {
16400 pub(crate) fp: DeviceFn,
16401 pub(crate) handle: crate::vk::Device,
16402 }
16403 impl Device {
16404 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
16405 let handle = device.handle();
16406 let fp = DeviceFn::load(|name| unsafe {
16407 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
16408 instance.get_device_proc_addr(handle, name.as_ptr()),
16409 )
16410 });
16411 Self { handle, fp }
16412 }
16413 #[inline]
16414 pub fn fp(&self) -> &DeviceFn {
16415 &self.fp
16416 }
16417 #[inline]
16418 pub fn device(&self) -> crate::vk::Device {
16419 self.handle
16420 }
16421 }
16422 #[derive(Clone)]
16423 #[doc = "Raw VK_KHR_sampler_ycbcr_conversion device-level function pointers"]
16424 pub struct DeviceFn {
16425 pub create_sampler_ycbcr_conversion_khr: PFN_vkCreateSamplerYcbcrConversion,
16426 pub destroy_sampler_ycbcr_conversion_khr: PFN_vkDestroySamplerYcbcrConversion,
16427 }
16428 unsafe impl Send for DeviceFn {}
16429 unsafe impl Sync for DeviceFn {}
16430 impl DeviceFn {
16431 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
16432 Self::load_erased(&mut f)
16433 }
16434 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
16435 Self {
16436 create_sampler_ycbcr_conversion_khr: unsafe {
16437 unsafe extern "system" fn create_sampler_ycbcr_conversion_khr(
16438 _device: crate::vk::Device,
16439 _p_create_info: *const SamplerYcbcrConversionCreateInfo<'_>,
16440 _p_allocator: *const AllocationCallbacks<'_>,
16441 _p_ycbcr_conversion: *mut SamplerYcbcrConversion,
16442 ) -> Result {
16443 panic!(concat!(
16444 "Unable to load ",
16445 stringify!(create_sampler_ycbcr_conversion_khr)
16446 ))
16447 }
16448 let val = _f(c"vkCreateSamplerYcbcrConversionKHR");
16449 if val.is_null() {
16450 create_sampler_ycbcr_conversion_khr
16451 } else {
16452 ::core::mem::transmute::<
16453 *const c_void,
16454 PFN_vkCreateSamplerYcbcrConversion,
16455 >(val)
16456 }
16457 },
16458 destroy_sampler_ycbcr_conversion_khr: unsafe {
16459 unsafe extern "system" fn destroy_sampler_ycbcr_conversion_khr(
16460 _device: crate::vk::Device,
16461 _ycbcr_conversion: SamplerYcbcrConversion,
16462 _p_allocator: *const AllocationCallbacks<'_>,
16463 ) {
16464 panic!(concat!(
16465 "Unable to load ",
16466 stringify!(destroy_sampler_ycbcr_conversion_khr)
16467 ))
16468 }
16469 let val = _f(c"vkDestroySamplerYcbcrConversionKHR");
16470 if val.is_null() {
16471 destroy_sampler_ycbcr_conversion_khr
16472 } else {
16473 ::core::mem::transmute::<
16474 *const c_void,
16475 PFN_vkDestroySamplerYcbcrConversion,
16476 >(val)
16477 }
16478 },
16479 }
16480 }
16481 }
16482 }
16483 #[doc = "VK_KHR_bind_memory2"]
16484 pub mod bind_memory2 {
16485 use super::super::*;
16486 pub use {
16487 crate::vk::KHR_BIND_MEMORY_2_EXTENSION_NAME as NAME,
16488 crate::vk::KHR_BIND_MEMORY_2_SPEC_VERSION as SPEC_VERSION,
16489 };
16490 #[doc = "VK_KHR_bind_memory2 device-level functions"]
16491 #[derive(Clone)]
16492 pub struct Device {
16493 pub(crate) fp: DeviceFn,
16494 pub(crate) handle: crate::vk::Device,
16495 }
16496 impl Device {
16497 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
16498 let handle = device.handle();
16499 let fp = DeviceFn::load(|name| unsafe {
16500 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
16501 instance.get_device_proc_addr(handle, name.as_ptr()),
16502 )
16503 });
16504 Self { handle, fp }
16505 }
16506 #[inline]
16507 pub fn fp(&self) -> &DeviceFn {
16508 &self.fp
16509 }
16510 #[inline]
16511 pub fn device(&self) -> crate::vk::Device {
16512 self.handle
16513 }
16514 }
16515 #[derive(Clone)]
16516 #[doc = "Raw VK_KHR_bind_memory2 device-level function pointers"]
16517 pub struct DeviceFn {
16518 pub bind_buffer_memory2_khr: PFN_vkBindBufferMemory2,
16519 pub bind_image_memory2_khr: PFN_vkBindImageMemory2,
16520 }
16521 unsafe impl Send for DeviceFn {}
16522 unsafe impl Sync for DeviceFn {}
16523 impl DeviceFn {
16524 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
16525 Self::load_erased(&mut f)
16526 }
16527 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
16528 Self {
16529 bind_buffer_memory2_khr: unsafe {
16530 unsafe extern "system" fn bind_buffer_memory2_khr(
16531 _device: crate::vk::Device,
16532 _bind_info_count: u32,
16533 _p_bind_infos: *const BindBufferMemoryInfo<'_>,
16534 ) -> Result {
16535 panic!(concat!(
16536 "Unable to load ",
16537 stringify!(bind_buffer_memory2_khr)
16538 ))
16539 }
16540 let val = _f(c"vkBindBufferMemory2KHR");
16541 if val.is_null() {
16542 bind_buffer_memory2_khr
16543 } else {
16544 ::core::mem::transmute::<*const c_void, PFN_vkBindBufferMemory2>(val)
16545 }
16546 },
16547 bind_image_memory2_khr: unsafe {
16548 unsafe extern "system" fn bind_image_memory2_khr(
16549 _device: crate::vk::Device,
16550 _bind_info_count: u32,
16551 _p_bind_infos: *const BindImageMemoryInfo<'_>,
16552 ) -> Result {
16553 panic!(concat!(
16554 "Unable to load ",
16555 stringify!(bind_image_memory2_khr)
16556 ))
16557 }
16558 let val = _f(c"vkBindImageMemory2KHR");
16559 if val.is_null() {
16560 bind_image_memory2_khr
16561 } else {
16562 ::core::mem::transmute::<*const c_void, PFN_vkBindImageMemory2>(val)
16563 }
16564 },
16565 }
16566 }
16567 }
16568 }
16569 #[doc = "VK_KHR_portability_subset"]
16570 pub mod portability_subset {
16571 use super::super::*;
16572 pub use {
16573 crate::vk::KHR_PORTABILITY_SUBSET_EXTENSION_NAME as NAME,
16574 crate::vk::KHR_PORTABILITY_SUBSET_SPEC_VERSION as SPEC_VERSION,
16575 };
16576 }
16577 #[doc = "VK_KHR_maintenance3"]
16578 pub mod maintenance3 {
16579 use super::super::*;
16580 pub use {
16581 crate::vk::KHR_MAINTENANCE_3_EXTENSION_NAME as NAME,
16582 crate::vk::KHR_MAINTENANCE_3_SPEC_VERSION as SPEC_VERSION,
16583 };
16584 #[doc = "VK_KHR_maintenance3 device-level functions"]
16585 #[derive(Clone)]
16586 pub struct Device {
16587 pub(crate) fp: DeviceFn,
16588 pub(crate) handle: crate::vk::Device,
16589 }
16590 impl Device {
16591 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
16592 let handle = device.handle();
16593 let fp = DeviceFn::load(|name| unsafe {
16594 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
16595 instance.get_device_proc_addr(handle, name.as_ptr()),
16596 )
16597 });
16598 Self { handle, fp }
16599 }
16600 #[inline]
16601 pub fn fp(&self) -> &DeviceFn {
16602 &self.fp
16603 }
16604 #[inline]
16605 pub fn device(&self) -> crate::vk::Device {
16606 self.handle
16607 }
16608 }
16609 #[derive(Clone)]
16610 #[doc = "Raw VK_KHR_maintenance3 device-level function pointers"]
16611 pub struct DeviceFn {
16612 pub get_descriptor_set_layout_support_khr: PFN_vkGetDescriptorSetLayoutSupport,
16613 }
16614 unsafe impl Send for DeviceFn {}
16615 unsafe impl Sync for DeviceFn {}
16616 impl DeviceFn {
16617 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
16618 Self::load_erased(&mut f)
16619 }
16620 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
16621 Self {
16622 get_descriptor_set_layout_support_khr: unsafe {
16623 unsafe extern "system" fn get_descriptor_set_layout_support_khr(
16624 _device: crate::vk::Device,
16625 _p_create_info: *const DescriptorSetLayoutCreateInfo<'_>,
16626 _p_support: *mut DescriptorSetLayoutSupport<'_>,
16627 ) {
16628 panic!(concat!(
16629 "Unable to load ",
16630 stringify!(get_descriptor_set_layout_support_khr)
16631 ))
16632 }
16633 let val = _f(c"vkGetDescriptorSetLayoutSupportKHR");
16634 if val.is_null() {
16635 get_descriptor_set_layout_support_khr
16636 } else {
16637 ::core::mem::transmute::<
16638 *const c_void,
16639 PFN_vkGetDescriptorSetLayoutSupport,
16640 >(val)
16641 }
16642 },
16643 }
16644 }
16645 }
16646 }
16647 #[doc = "VK_KHR_draw_indirect_count"]
16648 pub mod draw_indirect_count {
16649 use super::super::*;
16650 pub use {
16651 crate::vk::KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME as NAME,
16652 crate::vk::KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION as SPEC_VERSION,
16653 };
16654 #[doc = "VK_KHR_draw_indirect_count device-level functions"]
16655 #[derive(Clone)]
16656 pub struct Device {
16657 pub(crate) fp: DeviceFn,
16658 pub(crate) handle: crate::vk::Device,
16659 }
16660 impl Device {
16661 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
16662 let handle = device.handle();
16663 let fp = DeviceFn::load(|name| unsafe {
16664 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
16665 instance.get_device_proc_addr(handle, name.as_ptr()),
16666 )
16667 });
16668 Self { handle, fp }
16669 }
16670 #[inline]
16671 pub fn fp(&self) -> &DeviceFn {
16672 &self.fp
16673 }
16674 #[inline]
16675 pub fn device(&self) -> crate::vk::Device {
16676 self.handle
16677 }
16678 }
16679 #[derive(Clone)]
16680 #[doc = "Raw VK_KHR_draw_indirect_count device-level function pointers"]
16681 pub struct DeviceFn {
16682 pub cmd_draw_indirect_count_khr: PFN_vkCmdDrawIndirectCount,
16683 pub cmd_draw_indexed_indirect_count_khr: PFN_vkCmdDrawIndexedIndirectCount,
16684 }
16685 unsafe impl Send for DeviceFn {}
16686 unsafe impl Sync for DeviceFn {}
16687 impl DeviceFn {
16688 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
16689 Self::load_erased(&mut f)
16690 }
16691 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
16692 Self {
16693 cmd_draw_indirect_count_khr: unsafe {
16694 unsafe extern "system" fn cmd_draw_indirect_count_khr(
16695 _command_buffer: CommandBuffer,
16696 _buffer: Buffer,
16697 _offset: DeviceSize,
16698 _count_buffer: Buffer,
16699 _count_buffer_offset: DeviceSize,
16700 _max_draw_count: u32,
16701 _stride: u32,
16702 ) {
16703 panic!(concat!(
16704 "Unable to load ",
16705 stringify!(cmd_draw_indirect_count_khr)
16706 ))
16707 }
16708 let val = _f(c"vkCmdDrawIndirectCountKHR");
16709 if val.is_null() {
16710 cmd_draw_indirect_count_khr
16711 } else {
16712 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawIndirectCount>(val)
16713 }
16714 },
16715 cmd_draw_indexed_indirect_count_khr: unsafe {
16716 unsafe extern "system" fn cmd_draw_indexed_indirect_count_khr(
16717 _command_buffer: CommandBuffer,
16718 _buffer: Buffer,
16719 _offset: DeviceSize,
16720 _count_buffer: Buffer,
16721 _count_buffer_offset: DeviceSize,
16722 _max_draw_count: u32,
16723 _stride: u32,
16724 ) {
16725 panic!(concat!(
16726 "Unable to load ",
16727 stringify!(cmd_draw_indexed_indirect_count_khr)
16728 ))
16729 }
16730 let val = _f(c"vkCmdDrawIndexedIndirectCountKHR");
16731 if val.is_null() {
16732 cmd_draw_indexed_indirect_count_khr
16733 } else {
16734 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawIndexedIndirectCount>(
16735 val,
16736 )
16737 }
16738 },
16739 }
16740 }
16741 }
16742 }
16743 #[doc = "VK_KHR_shader_subgroup_extended_types"]
16744 pub mod shader_subgroup_extended_types {
16745 use super::super::*;
16746 pub use {
16747 crate::vk::KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME as NAME,
16748 crate::vk::KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION as SPEC_VERSION,
16749 };
16750 }
16751 #[doc = "VK_KHR_8bit_storage"]
16752 pub mod _8bit_storage {
16753 use super::super::*;
16754 pub use {
16755 crate::vk::KHR_8BIT_STORAGE_EXTENSION_NAME as NAME,
16756 crate::vk::KHR_8BIT_STORAGE_SPEC_VERSION as SPEC_VERSION,
16757 };
16758 }
16759 #[doc = "VK_KHR_shader_atomic_int64"]
16760 pub mod shader_atomic_int64 {
16761 use super::super::*;
16762 pub use {
16763 crate::vk::KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME as NAME,
16764 crate::vk::KHR_SHADER_ATOMIC_INT64_SPEC_VERSION as SPEC_VERSION,
16765 };
16766 }
16767 #[doc = "VK_KHR_shader_clock"]
16768 pub mod shader_clock {
16769 use super::super::*;
16770 pub use {
16771 crate::vk::KHR_SHADER_CLOCK_EXTENSION_NAME as NAME,
16772 crate::vk::KHR_SHADER_CLOCK_SPEC_VERSION as SPEC_VERSION,
16773 };
16774 }
16775 #[doc = "VK_KHR_video_decode_h265"]
16776 pub mod video_decode_h265 {
16777 use super::super::*;
16778 pub use {
16779 crate::vk::KHR_VIDEO_DECODE_H265_EXTENSION_NAME as NAME,
16780 crate::vk::KHR_VIDEO_DECODE_H265_SPEC_VERSION as SPEC_VERSION,
16781 };
16782 }
16783 #[doc = "VK_KHR_global_priority"]
16784 pub mod global_priority {
16785 use super::super::*;
16786 pub use {
16787 crate::vk::KHR_GLOBAL_PRIORITY_EXTENSION_NAME as NAME,
16788 crate::vk::KHR_GLOBAL_PRIORITY_SPEC_VERSION as SPEC_VERSION,
16789 };
16790 }
16791 #[doc = "VK_KHR_driver_properties"]
16792 pub mod driver_properties {
16793 use super::super::*;
16794 pub use {
16795 crate::vk::KHR_DRIVER_PROPERTIES_EXTENSION_NAME as NAME,
16796 crate::vk::KHR_DRIVER_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
16797 };
16798 }
16799 #[doc = "VK_KHR_shader_float_controls"]
16800 pub mod shader_float_controls {
16801 use super::super::*;
16802 pub use {
16803 crate::vk::KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME as NAME,
16804 crate::vk::KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION as SPEC_VERSION,
16805 };
16806 }
16807 #[doc = "VK_KHR_depth_stencil_resolve"]
16808 pub mod depth_stencil_resolve {
16809 use super::super::*;
16810 pub use {
16811 crate::vk::KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME as NAME,
16812 crate::vk::KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION as SPEC_VERSION,
16813 };
16814 }
16815 #[doc = "VK_KHR_swapchain_mutable_format"]
16816 pub mod swapchain_mutable_format {
16817 use super::super::*;
16818 pub use {
16819 crate::vk::KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME as NAME,
16820 crate::vk::KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION as SPEC_VERSION,
16821 };
16822 }
16823 #[doc = "VK_KHR_timeline_semaphore"]
16824 pub mod timeline_semaphore {
16825 use super::super::*;
16826 pub use {
16827 crate::vk::KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME as NAME,
16828 crate::vk::KHR_TIMELINE_SEMAPHORE_SPEC_VERSION as SPEC_VERSION,
16829 };
16830 #[doc = "VK_KHR_timeline_semaphore device-level functions"]
16831 #[derive(Clone)]
16832 pub struct Device {
16833 pub(crate) fp: DeviceFn,
16834 pub(crate) handle: crate::vk::Device,
16835 }
16836 impl Device {
16837 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
16838 let handle = device.handle();
16839 let fp = DeviceFn::load(|name| unsafe {
16840 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
16841 instance.get_device_proc_addr(handle, name.as_ptr()),
16842 )
16843 });
16844 Self { handle, fp }
16845 }
16846 #[inline]
16847 pub fn fp(&self) -> &DeviceFn {
16848 &self.fp
16849 }
16850 #[inline]
16851 pub fn device(&self) -> crate::vk::Device {
16852 self.handle
16853 }
16854 }
16855 #[derive(Clone)]
16856 #[doc = "Raw VK_KHR_timeline_semaphore device-level function pointers"]
16857 pub struct DeviceFn {
16858 pub get_semaphore_counter_value_khr: PFN_vkGetSemaphoreCounterValue,
16859 pub wait_semaphores_khr: PFN_vkWaitSemaphores,
16860 pub signal_semaphore_khr: PFN_vkSignalSemaphore,
16861 }
16862 unsafe impl Send for DeviceFn {}
16863 unsafe impl Sync for DeviceFn {}
16864 impl DeviceFn {
16865 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
16866 Self::load_erased(&mut f)
16867 }
16868 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
16869 Self {
16870 get_semaphore_counter_value_khr: unsafe {
16871 unsafe extern "system" fn get_semaphore_counter_value_khr(
16872 _device: crate::vk::Device,
16873 _semaphore: Semaphore,
16874 _p_value: *mut u64,
16875 ) -> Result {
16876 panic!(concat!(
16877 "Unable to load ",
16878 stringify!(get_semaphore_counter_value_khr)
16879 ))
16880 }
16881 let val = _f(c"vkGetSemaphoreCounterValueKHR");
16882 if val.is_null() {
16883 get_semaphore_counter_value_khr
16884 } else {
16885 ::core::mem::transmute::<*const c_void, PFN_vkGetSemaphoreCounterValue>(
16886 val,
16887 )
16888 }
16889 },
16890 wait_semaphores_khr: unsafe {
16891 unsafe extern "system" fn wait_semaphores_khr(
16892 _device: crate::vk::Device,
16893 _p_wait_info: *const SemaphoreWaitInfo<'_>,
16894 _timeout: u64,
16895 ) -> Result {
16896 panic!(concat!("Unable to load ", stringify!(wait_semaphores_khr)))
16897 }
16898 let val = _f(c"vkWaitSemaphoresKHR");
16899 if val.is_null() {
16900 wait_semaphores_khr
16901 } else {
16902 ::core::mem::transmute::<*const c_void, PFN_vkWaitSemaphores>(val)
16903 }
16904 },
16905 signal_semaphore_khr: unsafe {
16906 unsafe extern "system" fn signal_semaphore_khr(
16907 _device: crate::vk::Device,
16908 _p_signal_info: *const SemaphoreSignalInfo<'_>,
16909 ) -> Result {
16910 panic!(concat!("Unable to load ", stringify!(signal_semaphore_khr)))
16911 }
16912 let val = _f(c"vkSignalSemaphoreKHR");
16913 if val.is_null() {
16914 signal_semaphore_khr
16915 } else {
16916 ::core::mem::transmute::<*const c_void, PFN_vkSignalSemaphore>(val)
16917 }
16918 },
16919 }
16920 }
16921 }
16922 }
16923 #[doc = "VK_KHR_vulkan_memory_model"]
16924 pub mod vulkan_memory_model {
16925 use super::super::*;
16926 pub use {
16927 crate::vk::KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME as NAME,
16928 crate::vk::KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION as SPEC_VERSION,
16929 };
16930 }
16931 #[doc = "VK_KHR_shader_terminate_invocation"]
16932 pub mod shader_terminate_invocation {
16933 use super::super::*;
16934 pub use {
16935 crate::vk::KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME as NAME,
16936 crate::vk::KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION as SPEC_VERSION,
16937 };
16938 }
16939 #[doc = "VK_KHR_fragment_shading_rate"]
16940 pub mod fragment_shading_rate {
16941 use super::super::*;
16942 pub use {
16943 crate::vk::KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME as NAME,
16944 crate::vk::KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION as SPEC_VERSION,
16945 };
16946 #[doc = "VK_KHR_fragment_shading_rate instance-level functions"]
16947 #[derive(Clone)]
16948 pub struct Instance {
16949 pub(crate) fp: InstanceFn,
16950 pub(crate) handle: crate::vk::Instance,
16951 }
16952 impl Instance {
16953 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
16954 let handle = instance.handle();
16955 let fp = InstanceFn::load(|name| unsafe {
16956 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
16957 entry.get_instance_proc_addr(handle, name.as_ptr()),
16958 )
16959 });
16960 Self { handle, fp }
16961 }
16962 #[inline]
16963 pub fn fp(&self) -> &InstanceFn {
16964 &self.fp
16965 }
16966 #[inline]
16967 pub fn instance(&self) -> crate::vk::Instance {
16968 self.handle
16969 }
16970 }
16971 #[derive(Clone)]
16972 #[doc = "Raw VK_KHR_fragment_shading_rate instance-level function pointers"]
16973 pub struct InstanceFn {
16974 pub get_physical_device_fragment_shading_rates_khr:
16975 PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR,
16976 }
16977 unsafe impl Send for InstanceFn {}
16978 unsafe impl Sync for InstanceFn {}
16979 impl InstanceFn {
16980 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
16981 Self::load_erased(&mut f)
16982 }
16983 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
16984 Self {
16985 get_physical_device_fragment_shading_rates_khr: unsafe {
16986 unsafe extern "system" fn get_physical_device_fragment_shading_rates_khr(
16987 _physical_device: PhysicalDevice,
16988 _p_fragment_shading_rate_count: *mut u32,
16989 _p_fragment_shading_rates: *mut PhysicalDeviceFragmentShadingRateKHR<
16990 '_,
16991 >,
16992 ) -> Result {
16993 panic!(concat!(
16994 "Unable to load ",
16995 stringify!(get_physical_device_fragment_shading_rates_khr)
16996 ))
16997 }
16998 let val = _f(c"vkGetPhysicalDeviceFragmentShadingRatesKHR");
16999 if val.is_null() {
17000 get_physical_device_fragment_shading_rates_khr
17001 } else {
17002 ::core::mem::transmute::<
17003 *const c_void,
17004 PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR,
17005 >(val)
17006 }
17007 },
17008 }
17009 }
17010 }
17011 #[doc = "VK_KHR_fragment_shading_rate device-level functions"]
17012 #[derive(Clone)]
17013 pub struct Device {
17014 pub(crate) fp: DeviceFn,
17015 pub(crate) handle: crate::vk::Device,
17016 }
17017 impl Device {
17018 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17019 let handle = device.handle();
17020 let fp = DeviceFn::load(|name| unsafe {
17021 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17022 instance.get_device_proc_addr(handle, name.as_ptr()),
17023 )
17024 });
17025 Self { handle, fp }
17026 }
17027 #[inline]
17028 pub fn fp(&self) -> &DeviceFn {
17029 &self.fp
17030 }
17031 #[inline]
17032 pub fn device(&self) -> crate::vk::Device {
17033 self.handle
17034 }
17035 }
17036 #[derive(Clone)]
17037 #[doc = "Raw VK_KHR_fragment_shading_rate device-level function pointers"]
17038 pub struct DeviceFn {
17039 pub cmd_set_fragment_shading_rate_khr: PFN_vkCmdSetFragmentShadingRateKHR,
17040 }
17041 unsafe impl Send for DeviceFn {}
17042 unsafe impl Sync for DeviceFn {}
17043 impl DeviceFn {
17044 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17045 Self::load_erased(&mut f)
17046 }
17047 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17048 Self {
17049 cmd_set_fragment_shading_rate_khr: unsafe {
17050 unsafe extern "system" fn cmd_set_fragment_shading_rate_khr(
17051 _command_buffer: CommandBuffer,
17052 _p_fragment_size: *const Extent2D,
17053 _combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2usize],
17054 ) {
17055 panic!(concat!(
17056 "Unable to load ",
17057 stringify!(cmd_set_fragment_shading_rate_khr)
17058 ))
17059 }
17060 let val = _f(c"vkCmdSetFragmentShadingRateKHR");
17061 if val.is_null() {
17062 cmd_set_fragment_shading_rate_khr
17063 } else {
17064 ::core::mem::transmute::<
17065 *const c_void,
17066 PFN_vkCmdSetFragmentShadingRateKHR,
17067 >(val)
17068 }
17069 },
17070 }
17071 }
17072 }
17073 }
17074 #[doc = "VK_KHR_dynamic_rendering_local_read"]
17075 pub mod dynamic_rendering_local_read {
17076 use super::super::*;
17077 pub use {
17078 crate::vk::KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME as NAME,
17079 crate::vk::KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION as SPEC_VERSION,
17080 };
17081 #[doc = "VK_KHR_dynamic_rendering_local_read device-level functions"]
17082 #[derive(Clone)]
17083 pub struct Device {
17084 pub(crate) fp: DeviceFn,
17085 pub(crate) handle: crate::vk::Device,
17086 }
17087 impl Device {
17088 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17089 let handle = device.handle();
17090 let fp = DeviceFn::load(|name| unsafe {
17091 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17092 instance.get_device_proc_addr(handle, name.as_ptr()),
17093 )
17094 });
17095 Self { handle, fp }
17096 }
17097 #[inline]
17098 pub fn fp(&self) -> &DeviceFn {
17099 &self.fp
17100 }
17101 #[inline]
17102 pub fn device(&self) -> crate::vk::Device {
17103 self.handle
17104 }
17105 }
17106 #[derive(Clone)]
17107 #[doc = "Raw VK_KHR_dynamic_rendering_local_read device-level function pointers"]
17108 pub struct DeviceFn {
17109 pub cmd_set_rendering_attachment_locations_khr:
17110 PFN_vkCmdSetRenderingAttachmentLocations,
17111 pub cmd_set_rendering_input_attachment_indices_khr:
17112 PFN_vkCmdSetRenderingInputAttachmentIndices,
17113 }
17114 unsafe impl Send for DeviceFn {}
17115 unsafe impl Sync for DeviceFn {}
17116 impl DeviceFn {
17117 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17118 Self::load_erased(&mut f)
17119 }
17120 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17121 Self {
17122 cmd_set_rendering_attachment_locations_khr: unsafe {
17123 unsafe extern "system" fn cmd_set_rendering_attachment_locations_khr(
17124 _command_buffer: CommandBuffer,
17125 _p_location_info: *const RenderingAttachmentLocationInfo<'_>,
17126 ) {
17127 panic!(concat!(
17128 "Unable to load ",
17129 stringify!(cmd_set_rendering_attachment_locations_khr)
17130 ))
17131 }
17132 let val = _f(c"vkCmdSetRenderingAttachmentLocationsKHR");
17133 if val.is_null() {
17134 cmd_set_rendering_attachment_locations_khr
17135 } else {
17136 ::core::mem::transmute::<
17137 *const c_void,
17138 PFN_vkCmdSetRenderingAttachmentLocations,
17139 >(val)
17140 }
17141 },
17142 cmd_set_rendering_input_attachment_indices_khr: unsafe {
17143 unsafe extern "system" fn cmd_set_rendering_input_attachment_indices_khr(
17144 _command_buffer: CommandBuffer,
17145 _p_input_attachment_index_info : * const RenderingInputAttachmentIndexInfo < '_ >,
17146 ) {
17147 panic!(concat!(
17148 "Unable to load ",
17149 stringify!(cmd_set_rendering_input_attachment_indices_khr)
17150 ))
17151 }
17152 let val = _f(c"vkCmdSetRenderingInputAttachmentIndicesKHR");
17153 if val.is_null() {
17154 cmd_set_rendering_input_attachment_indices_khr
17155 } else {
17156 ::core::mem::transmute::<
17157 *const c_void,
17158 PFN_vkCmdSetRenderingInputAttachmentIndices,
17159 >(val)
17160 }
17161 },
17162 }
17163 }
17164 }
17165 }
17166 #[doc = "VK_KHR_shader_quad_control"]
17167 pub mod shader_quad_control {
17168 use super::super::*;
17169 pub use {
17170 crate::vk::KHR_SHADER_QUAD_CONTROL_EXTENSION_NAME as NAME,
17171 crate::vk::KHR_SHADER_QUAD_CONTROL_SPEC_VERSION as SPEC_VERSION,
17172 };
17173 }
17174 #[doc = "VK_KHR_spirv_1_4"]
17175 pub mod spirv_1_4 {
17176 use super::super::*;
17177 pub use {
17178 crate::vk::KHR_SPIRV_1_4_EXTENSION_NAME as NAME,
17179 crate::vk::KHR_SPIRV_1_4_SPEC_VERSION as SPEC_VERSION,
17180 };
17181 }
17182 #[doc = "VK_KHR_surface_protected_capabilities"]
17183 pub mod surface_protected_capabilities {
17184 use super::super::*;
17185 pub use {
17186 crate::vk::KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME as NAME,
17187 crate::vk::KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION as SPEC_VERSION,
17188 };
17189 }
17190 #[doc = "VK_KHR_separate_depth_stencil_layouts"]
17191 pub mod separate_depth_stencil_layouts {
17192 use super::super::*;
17193 pub use {
17194 crate::vk::KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME as NAME,
17195 crate::vk::KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION as SPEC_VERSION,
17196 };
17197 }
17198 #[doc = "VK_KHR_present_wait"]
17199 pub mod present_wait {
17200 use super::super::*;
17201 pub use {
17202 crate::vk::KHR_PRESENT_WAIT_EXTENSION_NAME as NAME,
17203 crate::vk::KHR_PRESENT_WAIT_SPEC_VERSION as SPEC_VERSION,
17204 };
17205 #[doc = "VK_KHR_present_wait device-level functions"]
17206 #[derive(Clone)]
17207 pub struct Device {
17208 pub(crate) fp: DeviceFn,
17209 pub(crate) handle: crate::vk::Device,
17210 }
17211 impl Device {
17212 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17213 let handle = device.handle();
17214 let fp = DeviceFn::load(|name| unsafe {
17215 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17216 instance.get_device_proc_addr(handle, name.as_ptr()),
17217 )
17218 });
17219 Self { handle, fp }
17220 }
17221 #[inline]
17222 pub fn fp(&self) -> &DeviceFn {
17223 &self.fp
17224 }
17225 #[inline]
17226 pub fn device(&self) -> crate::vk::Device {
17227 self.handle
17228 }
17229 }
17230 #[derive(Clone)]
17231 #[doc = "Raw VK_KHR_present_wait device-level function pointers"]
17232 pub struct DeviceFn {
17233 pub wait_for_present_khr: PFN_vkWaitForPresentKHR,
17234 }
17235 unsafe impl Send for DeviceFn {}
17236 unsafe impl Sync for DeviceFn {}
17237 impl DeviceFn {
17238 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17239 Self::load_erased(&mut f)
17240 }
17241 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17242 Self {
17243 wait_for_present_khr: unsafe {
17244 unsafe extern "system" fn wait_for_present_khr(
17245 _device: crate::vk::Device,
17246 _swapchain: SwapchainKHR,
17247 _present_id: u64,
17248 _timeout: u64,
17249 ) -> Result {
17250 panic!(concat!("Unable to load ", stringify!(wait_for_present_khr)))
17251 }
17252 let val = _f(c"vkWaitForPresentKHR");
17253 if val.is_null() {
17254 wait_for_present_khr
17255 } else {
17256 ::core::mem::transmute::<*const c_void, PFN_vkWaitForPresentKHR>(val)
17257 }
17258 },
17259 }
17260 }
17261 }
17262 }
17263 #[doc = "VK_KHR_uniform_buffer_standard_layout"]
17264 pub mod uniform_buffer_standard_layout {
17265 use super::super::*;
17266 pub use {
17267 crate::vk::KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME as NAME,
17268 crate::vk::KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION as SPEC_VERSION,
17269 };
17270 }
17271 #[doc = "VK_KHR_buffer_device_address"]
17272 pub mod buffer_device_address {
17273 use super::super::*;
17274 pub use {
17275 crate::vk::KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME as NAME,
17276 crate::vk::KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION as SPEC_VERSION,
17277 };
17278 #[doc = "VK_KHR_buffer_device_address device-level functions"]
17279 #[derive(Clone)]
17280 pub struct Device {
17281 pub(crate) fp: DeviceFn,
17282 pub(crate) handle: crate::vk::Device,
17283 }
17284 impl Device {
17285 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17286 let handle = device.handle();
17287 let fp = DeviceFn::load(|name| unsafe {
17288 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17289 instance.get_device_proc_addr(handle, name.as_ptr()),
17290 )
17291 });
17292 Self { handle, fp }
17293 }
17294 #[inline]
17295 pub fn fp(&self) -> &DeviceFn {
17296 &self.fp
17297 }
17298 #[inline]
17299 pub fn device(&self) -> crate::vk::Device {
17300 self.handle
17301 }
17302 }
17303 #[derive(Clone)]
17304 #[doc = "Raw VK_KHR_buffer_device_address device-level function pointers"]
17305 pub struct DeviceFn {
17306 pub get_buffer_device_address_khr: PFN_vkGetBufferDeviceAddress,
17307 pub get_buffer_opaque_capture_address_khr: PFN_vkGetBufferOpaqueCaptureAddress,
17308 pub get_device_memory_opaque_capture_address_khr:
17309 PFN_vkGetDeviceMemoryOpaqueCaptureAddress,
17310 }
17311 unsafe impl Send for DeviceFn {}
17312 unsafe impl Sync for DeviceFn {}
17313 impl DeviceFn {
17314 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17315 Self::load_erased(&mut f)
17316 }
17317 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17318 Self {
17319 get_buffer_device_address_khr: unsafe {
17320 unsafe extern "system" fn get_buffer_device_address_khr(
17321 _device: crate::vk::Device,
17322 _p_info: *const BufferDeviceAddressInfo<'_>,
17323 ) -> DeviceAddress {
17324 panic!(concat!(
17325 "Unable to load ",
17326 stringify!(get_buffer_device_address_khr)
17327 ))
17328 }
17329 let val = _f(c"vkGetBufferDeviceAddressKHR");
17330 if val.is_null() {
17331 get_buffer_device_address_khr
17332 } else {
17333 ::core::mem::transmute::<*const c_void, PFN_vkGetBufferDeviceAddress>(
17334 val,
17335 )
17336 }
17337 },
17338 get_buffer_opaque_capture_address_khr: unsafe {
17339 unsafe extern "system" fn get_buffer_opaque_capture_address_khr(
17340 _device: crate::vk::Device,
17341 _p_info: *const BufferDeviceAddressInfo<'_>,
17342 ) -> u64 {
17343 panic!(concat!(
17344 "Unable to load ",
17345 stringify!(get_buffer_opaque_capture_address_khr)
17346 ))
17347 }
17348 let val = _f(c"vkGetBufferOpaqueCaptureAddressKHR");
17349 if val.is_null() {
17350 get_buffer_opaque_capture_address_khr
17351 } else {
17352 ::core::mem::transmute::<
17353 *const c_void,
17354 PFN_vkGetBufferOpaqueCaptureAddress,
17355 >(val)
17356 }
17357 },
17358 get_device_memory_opaque_capture_address_khr: unsafe {
17359 unsafe extern "system" fn get_device_memory_opaque_capture_address_khr(
17360 _device: crate::vk::Device,
17361 _p_info: *const DeviceMemoryOpaqueCaptureAddressInfo<'_>,
17362 ) -> u64 {
17363 panic!(concat!(
17364 "Unable to load ",
17365 stringify!(get_device_memory_opaque_capture_address_khr)
17366 ))
17367 }
17368 let val = _f(c"vkGetDeviceMemoryOpaqueCaptureAddressKHR");
17369 if val.is_null() {
17370 get_device_memory_opaque_capture_address_khr
17371 } else {
17372 ::core::mem::transmute::<
17373 *const c_void,
17374 PFN_vkGetDeviceMemoryOpaqueCaptureAddress,
17375 >(val)
17376 }
17377 },
17378 }
17379 }
17380 }
17381 }
17382 #[doc = "VK_KHR_deferred_host_operations"]
17383 pub mod deferred_host_operations {
17384 use super::super::*;
17385 pub use {
17386 crate::vk::KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME as NAME,
17387 crate::vk::KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION as SPEC_VERSION,
17388 };
17389 #[doc = "VK_KHR_deferred_host_operations device-level functions"]
17390 #[derive(Clone)]
17391 pub struct Device {
17392 pub(crate) fp: DeviceFn,
17393 pub(crate) handle: crate::vk::Device,
17394 }
17395 impl Device {
17396 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17397 let handle = device.handle();
17398 let fp = DeviceFn::load(|name| unsafe {
17399 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17400 instance.get_device_proc_addr(handle, name.as_ptr()),
17401 )
17402 });
17403 Self { handle, fp }
17404 }
17405 #[inline]
17406 pub fn fp(&self) -> &DeviceFn {
17407 &self.fp
17408 }
17409 #[inline]
17410 pub fn device(&self) -> crate::vk::Device {
17411 self.handle
17412 }
17413 }
17414 #[derive(Clone)]
17415 #[doc = "Raw VK_KHR_deferred_host_operations device-level function pointers"]
17416 pub struct DeviceFn {
17417 pub create_deferred_operation_khr: PFN_vkCreateDeferredOperationKHR,
17418 pub destroy_deferred_operation_khr: PFN_vkDestroyDeferredOperationKHR,
17419 pub get_deferred_operation_max_concurrency_khr:
17420 PFN_vkGetDeferredOperationMaxConcurrencyKHR,
17421 pub get_deferred_operation_result_khr: PFN_vkGetDeferredOperationResultKHR,
17422 pub deferred_operation_join_khr: PFN_vkDeferredOperationJoinKHR,
17423 }
17424 unsafe impl Send for DeviceFn {}
17425 unsafe impl Sync for DeviceFn {}
17426 impl DeviceFn {
17427 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17428 Self::load_erased(&mut f)
17429 }
17430 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17431 Self {
17432 create_deferred_operation_khr: unsafe {
17433 unsafe extern "system" fn create_deferred_operation_khr(
17434 _device: crate::vk::Device,
17435 _p_allocator: *const AllocationCallbacks<'_>,
17436 _p_deferred_operation: *mut DeferredOperationKHR,
17437 ) -> Result {
17438 panic!(concat!(
17439 "Unable to load ",
17440 stringify!(create_deferred_operation_khr)
17441 ))
17442 }
17443 let val = _f(c"vkCreateDeferredOperationKHR");
17444 if val.is_null() {
17445 create_deferred_operation_khr
17446 } else {
17447 ::core::mem::transmute::<*const c_void, PFN_vkCreateDeferredOperationKHR>(
17448 val,
17449 )
17450 }
17451 },
17452 destroy_deferred_operation_khr: unsafe {
17453 unsafe extern "system" fn destroy_deferred_operation_khr(
17454 _device: crate::vk::Device,
17455 _operation: DeferredOperationKHR,
17456 _p_allocator: *const AllocationCallbacks<'_>,
17457 ) {
17458 panic!(concat!(
17459 "Unable to load ",
17460 stringify!(destroy_deferred_operation_khr)
17461 ))
17462 }
17463 let val = _f(c"vkDestroyDeferredOperationKHR");
17464 if val.is_null() {
17465 destroy_deferred_operation_khr
17466 } else {
17467 ::core::mem::transmute::<*const c_void, PFN_vkDestroyDeferredOperationKHR>(
17468 val,
17469 )
17470 }
17471 },
17472 get_deferred_operation_max_concurrency_khr: unsafe {
17473 unsafe extern "system" fn get_deferred_operation_max_concurrency_khr(
17474 _device: crate::vk::Device,
17475 _operation: DeferredOperationKHR,
17476 ) -> u32 {
17477 panic!(concat!(
17478 "Unable to load ",
17479 stringify!(get_deferred_operation_max_concurrency_khr)
17480 ))
17481 }
17482 let val = _f(c"vkGetDeferredOperationMaxConcurrencyKHR");
17483 if val.is_null() {
17484 get_deferred_operation_max_concurrency_khr
17485 } else {
17486 ::core::mem::transmute::<
17487 *const c_void,
17488 PFN_vkGetDeferredOperationMaxConcurrencyKHR,
17489 >(val)
17490 }
17491 },
17492 get_deferred_operation_result_khr: unsafe {
17493 unsafe extern "system" fn get_deferred_operation_result_khr(
17494 _device: crate::vk::Device,
17495 _operation: DeferredOperationKHR,
17496 ) -> Result {
17497 panic!(concat!(
17498 "Unable to load ",
17499 stringify!(get_deferred_operation_result_khr)
17500 ))
17501 }
17502 let val = _f(c"vkGetDeferredOperationResultKHR");
17503 if val.is_null() {
17504 get_deferred_operation_result_khr
17505 } else {
17506 ::core::mem::transmute::<
17507 *const c_void,
17508 PFN_vkGetDeferredOperationResultKHR,
17509 >(val)
17510 }
17511 },
17512 deferred_operation_join_khr: unsafe {
17513 unsafe extern "system" fn deferred_operation_join_khr(
17514 _device: crate::vk::Device,
17515 _operation: DeferredOperationKHR,
17516 ) -> Result {
17517 panic!(concat!(
17518 "Unable to load ",
17519 stringify!(deferred_operation_join_khr)
17520 ))
17521 }
17522 let val = _f(c"vkDeferredOperationJoinKHR");
17523 if val.is_null() {
17524 deferred_operation_join_khr
17525 } else {
17526 ::core::mem::transmute::<*const c_void, PFN_vkDeferredOperationJoinKHR>(
17527 val,
17528 )
17529 }
17530 },
17531 }
17532 }
17533 }
17534 }
17535 #[doc = "VK_KHR_pipeline_executable_properties"]
17536 pub mod pipeline_executable_properties {
17537 use super::super::*;
17538 pub use {
17539 crate::vk::KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME as NAME,
17540 crate::vk::KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
17541 };
17542 #[doc = "VK_KHR_pipeline_executable_properties device-level functions"]
17543 #[derive(Clone)]
17544 pub struct Device {
17545 pub(crate) fp: DeviceFn,
17546 pub(crate) handle: crate::vk::Device,
17547 }
17548 impl Device {
17549 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17550 let handle = device.handle();
17551 let fp = DeviceFn::load(|name| unsafe {
17552 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17553 instance.get_device_proc_addr(handle, name.as_ptr()),
17554 )
17555 });
17556 Self { handle, fp }
17557 }
17558 #[inline]
17559 pub fn fp(&self) -> &DeviceFn {
17560 &self.fp
17561 }
17562 #[inline]
17563 pub fn device(&self) -> crate::vk::Device {
17564 self.handle
17565 }
17566 }
17567 #[derive(Clone)]
17568 #[doc = "Raw VK_KHR_pipeline_executable_properties device-level function pointers"]
17569 pub struct DeviceFn {
17570 pub get_pipeline_executable_properties_khr: PFN_vkGetPipelineExecutablePropertiesKHR,
17571 pub get_pipeline_executable_statistics_khr: PFN_vkGetPipelineExecutableStatisticsKHR,
17572 pub get_pipeline_executable_internal_representations_khr:
17573 PFN_vkGetPipelineExecutableInternalRepresentationsKHR,
17574 }
17575 unsafe impl Send for DeviceFn {}
17576 unsafe impl Sync for DeviceFn {}
17577 impl DeviceFn {
17578 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17579 Self::load_erased(&mut f)
17580 }
17581 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17582 Self {
17583 get_pipeline_executable_properties_khr: unsafe {
17584 unsafe extern "system" fn get_pipeline_executable_properties_khr(
17585 _device: crate::vk::Device,
17586 _p_pipeline_info: *const PipelineInfoKHR<'_>,
17587 _p_executable_count: *mut u32,
17588 _p_properties: *mut PipelineExecutablePropertiesKHR<'_>,
17589 ) -> Result {
17590 panic!(concat!(
17591 "Unable to load ",
17592 stringify!(get_pipeline_executable_properties_khr)
17593 ))
17594 }
17595 let val = _f(c"vkGetPipelineExecutablePropertiesKHR");
17596 if val.is_null() {
17597 get_pipeline_executable_properties_khr
17598 } else {
17599 ::core::mem::transmute::<
17600 *const c_void,
17601 PFN_vkGetPipelineExecutablePropertiesKHR,
17602 >(val)
17603 }
17604 },
17605 get_pipeline_executable_statistics_khr: unsafe {
17606 unsafe extern "system" fn get_pipeline_executable_statistics_khr(
17607 _device: crate::vk::Device,
17608 _p_executable_info: *const PipelineExecutableInfoKHR<'_>,
17609 _p_statistic_count: *mut u32,
17610 _p_statistics: *mut PipelineExecutableStatisticKHR<'_>,
17611 ) -> Result {
17612 panic!(concat!(
17613 "Unable to load ",
17614 stringify!(get_pipeline_executable_statistics_khr)
17615 ))
17616 }
17617 let val = _f(c"vkGetPipelineExecutableStatisticsKHR");
17618 if val.is_null() {
17619 get_pipeline_executable_statistics_khr
17620 } else {
17621 ::core::mem::transmute::<
17622 *const c_void,
17623 PFN_vkGetPipelineExecutableStatisticsKHR,
17624 >(val)
17625 }
17626 },
17627 get_pipeline_executable_internal_representations_khr: unsafe {
17628 unsafe extern "system" fn get_pipeline_executable_internal_representations_khr(
17629 _device: crate::vk::Device,
17630 _p_executable_info: *const PipelineExecutableInfoKHR<'_>,
17631 _p_internal_representation_count: *mut u32,
17632 _p_internal_representations : * mut PipelineExecutableInternalRepresentationKHR < '_ >,
17633 ) -> Result {
17634 panic!(concat!(
17635 "Unable to load ",
17636 stringify!(get_pipeline_executable_internal_representations_khr)
17637 ))
17638 }
17639 let val = _f(c"vkGetPipelineExecutableInternalRepresentationsKHR");
17640 if val.is_null() {
17641 get_pipeline_executable_internal_representations_khr
17642 } else {
17643 ::core::mem::transmute::<
17644 *const c_void,
17645 PFN_vkGetPipelineExecutableInternalRepresentationsKHR,
17646 >(val)
17647 }
17648 },
17649 }
17650 }
17651 }
17652 }
17653 #[doc = "VK_KHR_map_memory2"]
17654 pub mod map_memory2 {
17655 use super::super::*;
17656 pub use {
17657 crate::vk::KHR_MAP_MEMORY_2_EXTENSION_NAME as NAME,
17658 crate::vk::KHR_MAP_MEMORY_2_SPEC_VERSION as SPEC_VERSION,
17659 };
17660 #[doc = "VK_KHR_map_memory2 device-level functions"]
17661 #[derive(Clone)]
17662 pub struct Device {
17663 pub(crate) fp: DeviceFn,
17664 pub(crate) handle: crate::vk::Device,
17665 }
17666 impl Device {
17667 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17668 let handle = device.handle();
17669 let fp = DeviceFn::load(|name| unsafe {
17670 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17671 instance.get_device_proc_addr(handle, name.as_ptr()),
17672 )
17673 });
17674 Self { handle, fp }
17675 }
17676 #[inline]
17677 pub fn fp(&self) -> &DeviceFn {
17678 &self.fp
17679 }
17680 #[inline]
17681 pub fn device(&self) -> crate::vk::Device {
17682 self.handle
17683 }
17684 }
17685 #[derive(Clone)]
17686 #[doc = "Raw VK_KHR_map_memory2 device-level function pointers"]
17687 pub struct DeviceFn {
17688 pub map_memory2_khr: PFN_vkMapMemory2,
17689 pub unmap_memory2_khr: PFN_vkUnmapMemory2,
17690 }
17691 unsafe impl Send for DeviceFn {}
17692 unsafe impl Sync for DeviceFn {}
17693 impl DeviceFn {
17694 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17695 Self::load_erased(&mut f)
17696 }
17697 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17698 Self {
17699 map_memory2_khr: unsafe {
17700 unsafe extern "system" fn map_memory2_khr(
17701 _device: crate::vk::Device,
17702 _p_memory_map_info: *const MemoryMapInfo<'_>,
17703 _pp_data: *mut *mut c_void,
17704 ) -> Result {
17705 panic!(concat!("Unable to load ", stringify!(map_memory2_khr)))
17706 }
17707 let val = _f(c"vkMapMemory2KHR");
17708 if val.is_null() {
17709 map_memory2_khr
17710 } else {
17711 ::core::mem::transmute::<*const c_void, PFN_vkMapMemory2>(val)
17712 }
17713 },
17714 unmap_memory2_khr: unsafe {
17715 unsafe extern "system" fn unmap_memory2_khr(
17716 _device: crate::vk::Device,
17717 _p_memory_unmap_info: *const MemoryUnmapInfo<'_>,
17718 ) -> Result {
17719 panic!(concat!("Unable to load ", stringify!(unmap_memory2_khr)))
17720 }
17721 let val = _f(c"vkUnmapMemory2KHR");
17722 if val.is_null() {
17723 unmap_memory2_khr
17724 } else {
17725 ::core::mem::transmute::<*const c_void, PFN_vkUnmapMemory2>(val)
17726 }
17727 },
17728 }
17729 }
17730 }
17731 }
17732 #[doc = "VK_KHR_shader_integer_dot_product"]
17733 pub mod shader_integer_dot_product {
17734 use super::super::*;
17735 pub use {
17736 crate::vk::KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME as NAME,
17737 crate::vk::KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION as SPEC_VERSION,
17738 };
17739 }
17740 #[doc = "VK_KHR_pipeline_library"]
17741 pub mod pipeline_library {
17742 use super::super::*;
17743 pub use {
17744 crate::vk::KHR_PIPELINE_LIBRARY_EXTENSION_NAME as NAME,
17745 crate::vk::KHR_PIPELINE_LIBRARY_SPEC_VERSION as SPEC_VERSION,
17746 };
17747 }
17748 #[doc = "VK_KHR_shader_non_semantic_info"]
17749 pub mod shader_non_semantic_info {
17750 use super::super::*;
17751 pub use {
17752 crate::vk::KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME as NAME,
17753 crate::vk::KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION as SPEC_VERSION,
17754 };
17755 }
17756 #[doc = "VK_KHR_present_id"]
17757 pub mod present_id {
17758 use super::super::*;
17759 pub use {
17760 crate::vk::KHR_PRESENT_ID_EXTENSION_NAME as NAME,
17761 crate::vk::KHR_PRESENT_ID_SPEC_VERSION as SPEC_VERSION,
17762 };
17763 }
17764 #[doc = "VK_KHR_video_encode_queue"]
17765 pub mod video_encode_queue {
17766 use super::super::*;
17767 pub use {
17768 crate::vk::KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME as NAME,
17769 crate::vk::KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION as SPEC_VERSION,
17770 };
17771 #[doc = "VK_KHR_video_encode_queue instance-level functions"]
17772 #[derive(Clone)]
17773 pub struct Instance {
17774 pub(crate) fp: InstanceFn,
17775 pub(crate) handle: crate::vk::Instance,
17776 }
17777 impl Instance {
17778 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
17779 let handle = instance.handle();
17780 let fp = InstanceFn::load(|name| unsafe {
17781 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17782 entry.get_instance_proc_addr(handle, name.as_ptr()),
17783 )
17784 });
17785 Self { handle, fp }
17786 }
17787 #[inline]
17788 pub fn fp(&self) -> &InstanceFn {
17789 &self.fp
17790 }
17791 #[inline]
17792 pub fn instance(&self) -> crate::vk::Instance {
17793 self.handle
17794 }
17795 }
17796 #[derive(Clone)]
17797 #[doc = "Raw VK_KHR_video_encode_queue instance-level function pointers"]
17798 pub struct InstanceFn {
17799 pub get_physical_device_video_encode_quality_level_properties_khr:
17800 PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR,
17801 }
17802 unsafe impl Send for InstanceFn {}
17803 unsafe impl Sync for InstanceFn {}
17804 impl InstanceFn {
17805 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17806 Self::load_erased(&mut f)
17807 }
17808 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17809 Self {
17810 get_physical_device_video_encode_quality_level_properties_khr: unsafe {
17811 unsafe extern "system" fn get_physical_device_video_encode_quality_level_properties_khr(
17812 _physical_device: PhysicalDevice,
17813 _p_quality_level_info : * const PhysicalDeviceVideoEncodeQualityLevelInfoKHR < '_ >,
17814 _p_quality_level_properties: *mut VideoEncodeQualityLevelPropertiesKHR<
17815 '_,
17816 >,
17817 ) -> Result {
17818 panic!(concat!(
17819 "Unable to load ",
17820 stringify!(
17821 get_physical_device_video_encode_quality_level_properties_khr
17822 )
17823 ))
17824 }
17825 let val = _f(c"vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR");
17826 if val.is_null() {
17827 get_physical_device_video_encode_quality_level_properties_khr
17828 } else {
17829 ::core::mem::transmute::<
17830 *const c_void,
17831 PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR,
17832 >(val)
17833 }
17834 },
17835 }
17836 }
17837 }
17838 #[doc = "VK_KHR_video_encode_queue device-level functions"]
17839 #[derive(Clone)]
17840 pub struct Device {
17841 pub(crate) fp: DeviceFn,
17842 pub(crate) handle: crate::vk::Device,
17843 }
17844 impl Device {
17845 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17846 let handle = device.handle();
17847 let fp = DeviceFn::load(|name| unsafe {
17848 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17849 instance.get_device_proc_addr(handle, name.as_ptr()),
17850 )
17851 });
17852 Self { handle, fp }
17853 }
17854 #[inline]
17855 pub fn fp(&self) -> &DeviceFn {
17856 &self.fp
17857 }
17858 #[inline]
17859 pub fn device(&self) -> crate::vk::Device {
17860 self.handle
17861 }
17862 }
17863 #[derive(Clone)]
17864 #[doc = "Raw VK_KHR_video_encode_queue device-level function pointers"]
17865 pub struct DeviceFn {
17866 pub get_encoded_video_session_parameters_khr: PFN_vkGetEncodedVideoSessionParametersKHR,
17867 pub cmd_encode_video_khr: PFN_vkCmdEncodeVideoKHR,
17868 }
17869 unsafe impl Send for DeviceFn {}
17870 unsafe impl Sync for DeviceFn {}
17871 impl DeviceFn {
17872 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17873 Self::load_erased(&mut f)
17874 }
17875 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17876 Self {
17877 get_encoded_video_session_parameters_khr: unsafe {
17878 unsafe extern "system" fn get_encoded_video_session_parameters_khr(
17879 _device: crate::vk::Device,
17880 _p_video_session_parameters_info : * const VideoEncodeSessionParametersGetInfoKHR < '_ >,
17881 _p_feedback_info: *mut VideoEncodeSessionParametersFeedbackInfoKHR<'_>,
17882 _p_data_size: *mut usize,
17883 _p_data: *mut c_void,
17884 ) -> Result {
17885 panic!(concat!(
17886 "Unable to load ",
17887 stringify!(get_encoded_video_session_parameters_khr)
17888 ))
17889 }
17890 let val = _f(c"vkGetEncodedVideoSessionParametersKHR");
17891 if val.is_null() {
17892 get_encoded_video_session_parameters_khr
17893 } else {
17894 ::core::mem::transmute::<
17895 *const c_void,
17896 PFN_vkGetEncodedVideoSessionParametersKHR,
17897 >(val)
17898 }
17899 },
17900 cmd_encode_video_khr: unsafe {
17901 unsafe extern "system" fn cmd_encode_video_khr(
17902 _command_buffer: CommandBuffer,
17903 _p_encode_info: *const VideoEncodeInfoKHR<'_>,
17904 ) {
17905 panic!(concat!("Unable to load ", stringify!(cmd_encode_video_khr)))
17906 }
17907 let val = _f(c"vkCmdEncodeVideoKHR");
17908 if val.is_null() {
17909 cmd_encode_video_khr
17910 } else {
17911 ::core::mem::transmute::<*const c_void, PFN_vkCmdEncodeVideoKHR>(val)
17912 }
17913 },
17914 }
17915 }
17916 }
17917 }
17918 #[doc = "VK_KHR_synchronization2"]
17919 pub mod synchronization2 {
17920 use super::super::*;
17921 pub use {
17922 crate::vk::KHR_SYNCHRONIZATION_2_EXTENSION_NAME as NAME,
17923 crate::vk::KHR_SYNCHRONIZATION_2_SPEC_VERSION as SPEC_VERSION,
17924 };
17925 #[doc = "VK_KHR_synchronization2 device-level functions"]
17926 #[derive(Clone)]
17927 pub struct Device {
17928 pub(crate) fp: DeviceFn,
17929 pub(crate) handle: crate::vk::Device,
17930 }
17931 impl Device {
17932 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
17933 let handle = device.handle();
17934 let fp = DeviceFn::load(|name| unsafe {
17935 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
17936 instance.get_device_proc_addr(handle, name.as_ptr()),
17937 )
17938 });
17939 Self { handle, fp }
17940 }
17941 #[inline]
17942 pub fn fp(&self) -> &DeviceFn {
17943 &self.fp
17944 }
17945 #[inline]
17946 pub fn device(&self) -> crate::vk::Device {
17947 self.handle
17948 }
17949 }
17950 #[derive(Clone)]
17951 #[doc = "Raw VK_KHR_synchronization2 device-level function pointers"]
17952 pub struct DeviceFn {
17953 pub cmd_set_event2_khr: PFN_vkCmdSetEvent2,
17954 pub cmd_reset_event2_khr: PFN_vkCmdResetEvent2,
17955 pub cmd_wait_events2_khr: PFN_vkCmdWaitEvents2,
17956 pub cmd_pipeline_barrier2_khr: PFN_vkCmdPipelineBarrier2,
17957 pub cmd_write_timestamp2_khr: PFN_vkCmdWriteTimestamp2,
17958 pub queue_submit2_khr: PFN_vkQueueSubmit2,
17959 }
17960 unsafe impl Send for DeviceFn {}
17961 unsafe impl Sync for DeviceFn {}
17962 impl DeviceFn {
17963 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
17964 Self::load_erased(&mut f)
17965 }
17966 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
17967 Self {
17968 cmd_set_event2_khr: unsafe {
17969 unsafe extern "system" fn cmd_set_event2_khr(
17970 _command_buffer: CommandBuffer,
17971 _event: Event,
17972 _p_dependency_info: *const DependencyInfo<'_>,
17973 ) {
17974 panic!(concat!("Unable to load ", stringify!(cmd_set_event2_khr)))
17975 }
17976 let val = _f(c"vkCmdSetEvent2KHR");
17977 if val.is_null() {
17978 cmd_set_event2_khr
17979 } else {
17980 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetEvent2>(val)
17981 }
17982 },
17983 cmd_reset_event2_khr: unsafe {
17984 unsafe extern "system" fn cmd_reset_event2_khr(
17985 _command_buffer: CommandBuffer,
17986 _event: Event,
17987 _stage_mask: PipelineStageFlags2,
17988 ) {
17989 panic!(concat!("Unable to load ", stringify!(cmd_reset_event2_khr)))
17990 }
17991 let val = _f(c"vkCmdResetEvent2KHR");
17992 if val.is_null() {
17993 cmd_reset_event2_khr
17994 } else {
17995 ::core::mem::transmute::<*const c_void, PFN_vkCmdResetEvent2>(val)
17996 }
17997 },
17998 cmd_wait_events2_khr: unsafe {
17999 unsafe extern "system" fn cmd_wait_events2_khr(
18000 _command_buffer: CommandBuffer,
18001 _event_count: u32,
18002 _p_events: *const Event,
18003 _p_dependency_infos: *const DependencyInfo<'_>,
18004 ) {
18005 panic!(concat!("Unable to load ", stringify!(cmd_wait_events2_khr)))
18006 }
18007 let val = _f(c"vkCmdWaitEvents2KHR");
18008 if val.is_null() {
18009 cmd_wait_events2_khr
18010 } else {
18011 ::core::mem::transmute::<*const c_void, PFN_vkCmdWaitEvents2>(val)
18012 }
18013 },
18014 cmd_pipeline_barrier2_khr: unsafe {
18015 unsafe extern "system" fn cmd_pipeline_barrier2_khr(
18016 _command_buffer: CommandBuffer,
18017 _p_dependency_info: *const DependencyInfo<'_>,
18018 ) {
18019 panic!(concat!(
18020 "Unable to load ",
18021 stringify!(cmd_pipeline_barrier2_khr)
18022 ))
18023 }
18024 let val = _f(c"vkCmdPipelineBarrier2KHR");
18025 if val.is_null() {
18026 cmd_pipeline_barrier2_khr
18027 } else {
18028 ::core::mem::transmute::<*const c_void, PFN_vkCmdPipelineBarrier2>(val)
18029 }
18030 },
18031 cmd_write_timestamp2_khr: unsafe {
18032 unsafe extern "system" fn cmd_write_timestamp2_khr(
18033 _command_buffer: CommandBuffer,
18034 _stage: PipelineStageFlags2,
18035 _query_pool: QueryPool,
18036 _query: u32,
18037 ) {
18038 panic!(concat!(
18039 "Unable to load ",
18040 stringify!(cmd_write_timestamp2_khr)
18041 ))
18042 }
18043 let val = _f(c"vkCmdWriteTimestamp2KHR");
18044 if val.is_null() {
18045 cmd_write_timestamp2_khr
18046 } else {
18047 ::core::mem::transmute::<*const c_void, PFN_vkCmdWriteTimestamp2>(val)
18048 }
18049 },
18050 queue_submit2_khr: unsafe {
18051 unsafe extern "system" fn queue_submit2_khr(
18052 _queue: Queue,
18053 _submit_count: u32,
18054 _p_submits: *const SubmitInfo2<'_>,
18055 _fence: Fence,
18056 ) -> Result {
18057 panic!(concat!("Unable to load ", stringify!(queue_submit2_khr)))
18058 }
18059 let val = _f(c"vkQueueSubmit2KHR");
18060 if val.is_null() {
18061 queue_submit2_khr
18062 } else {
18063 ::core::mem::transmute::<*const c_void, PFN_vkQueueSubmit2>(val)
18064 }
18065 },
18066 }
18067 }
18068 }
18069 }
18070 #[doc = "VK_KHR_fragment_shader_barycentric"]
18071 pub mod fragment_shader_barycentric {
18072 use super::super::*;
18073 pub use {
18074 crate::vk::KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME as NAME,
18075 crate::vk::KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION as SPEC_VERSION,
18076 };
18077 }
18078 #[doc = "VK_KHR_shader_subgroup_uniform_control_flow"]
18079 pub mod shader_subgroup_uniform_control_flow {
18080 use super::super::*;
18081 pub use {
18082 crate::vk::KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME as NAME,
18083 crate::vk::KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION as SPEC_VERSION,
18084 };
18085 }
18086 #[doc = "VK_KHR_zero_initialize_workgroup_memory"]
18087 pub mod zero_initialize_workgroup_memory {
18088 use super::super::*;
18089 pub use {
18090 crate::vk::KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME as NAME,
18091 crate::vk::KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION as SPEC_VERSION,
18092 };
18093 }
18094 #[doc = "VK_KHR_workgroup_memory_explicit_layout"]
18095 pub mod workgroup_memory_explicit_layout {
18096 use super::super::*;
18097 pub use {
18098 crate::vk::KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME as NAME,
18099 crate::vk::KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION as SPEC_VERSION,
18100 };
18101 }
18102 #[doc = "VK_KHR_copy_commands2"]
18103 pub mod copy_commands2 {
18104 use super::super::*;
18105 pub use {
18106 crate::vk::KHR_COPY_COMMANDS_2_EXTENSION_NAME as NAME,
18107 crate::vk::KHR_COPY_COMMANDS_2_SPEC_VERSION as SPEC_VERSION,
18108 };
18109 #[doc = "VK_KHR_copy_commands2 device-level functions"]
18110 #[derive(Clone)]
18111 pub struct Device {
18112 pub(crate) fp: DeviceFn,
18113 pub(crate) handle: crate::vk::Device,
18114 }
18115 impl Device {
18116 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
18117 let handle = device.handle();
18118 let fp = DeviceFn::load(|name| unsafe {
18119 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18120 instance.get_device_proc_addr(handle, name.as_ptr()),
18121 )
18122 });
18123 Self { handle, fp }
18124 }
18125 #[inline]
18126 pub fn fp(&self) -> &DeviceFn {
18127 &self.fp
18128 }
18129 #[inline]
18130 pub fn device(&self) -> crate::vk::Device {
18131 self.handle
18132 }
18133 }
18134 #[derive(Clone)]
18135 #[doc = "Raw VK_KHR_copy_commands2 device-level function pointers"]
18136 pub struct DeviceFn {
18137 pub cmd_copy_buffer2_khr: PFN_vkCmdCopyBuffer2,
18138 pub cmd_copy_image2_khr: PFN_vkCmdCopyImage2,
18139 pub cmd_copy_buffer_to_image2_khr: PFN_vkCmdCopyBufferToImage2,
18140 pub cmd_copy_image_to_buffer2_khr: PFN_vkCmdCopyImageToBuffer2,
18141 pub cmd_blit_image2_khr: PFN_vkCmdBlitImage2,
18142 pub cmd_resolve_image2_khr: PFN_vkCmdResolveImage2,
18143 }
18144 unsafe impl Send for DeviceFn {}
18145 unsafe impl Sync for DeviceFn {}
18146 impl DeviceFn {
18147 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18148 Self::load_erased(&mut f)
18149 }
18150 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18151 Self {
18152 cmd_copy_buffer2_khr: unsafe {
18153 unsafe extern "system" fn cmd_copy_buffer2_khr(
18154 _command_buffer: CommandBuffer,
18155 _p_copy_buffer_info: *const CopyBufferInfo2<'_>,
18156 ) {
18157 panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer2_khr)))
18158 }
18159 let val = _f(c"vkCmdCopyBuffer2KHR");
18160 if val.is_null() {
18161 cmd_copy_buffer2_khr
18162 } else {
18163 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyBuffer2>(val)
18164 }
18165 },
18166 cmd_copy_image2_khr: unsafe {
18167 unsafe extern "system" fn cmd_copy_image2_khr(
18168 _command_buffer: CommandBuffer,
18169 _p_copy_image_info: *const CopyImageInfo2<'_>,
18170 ) {
18171 panic!(concat!("Unable to load ", stringify!(cmd_copy_image2_khr)))
18172 }
18173 let val = _f(c"vkCmdCopyImage2KHR");
18174 if val.is_null() {
18175 cmd_copy_image2_khr
18176 } else {
18177 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyImage2>(val)
18178 }
18179 },
18180 cmd_copy_buffer_to_image2_khr: unsafe {
18181 unsafe extern "system" fn cmd_copy_buffer_to_image2_khr(
18182 _command_buffer: CommandBuffer,
18183 _p_copy_buffer_to_image_info: *const CopyBufferToImageInfo2<'_>,
18184 ) {
18185 panic!(concat!(
18186 "Unable to load ",
18187 stringify!(cmd_copy_buffer_to_image2_khr)
18188 ))
18189 }
18190 let val = _f(c"vkCmdCopyBufferToImage2KHR");
18191 if val.is_null() {
18192 cmd_copy_buffer_to_image2_khr
18193 } else {
18194 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyBufferToImage2>(
18195 val,
18196 )
18197 }
18198 },
18199 cmd_copy_image_to_buffer2_khr: unsafe {
18200 unsafe extern "system" fn cmd_copy_image_to_buffer2_khr(
18201 _command_buffer: CommandBuffer,
18202 _p_copy_image_to_buffer_info: *const CopyImageToBufferInfo2<'_>,
18203 ) {
18204 panic!(concat!(
18205 "Unable to load ",
18206 stringify!(cmd_copy_image_to_buffer2_khr)
18207 ))
18208 }
18209 let val = _f(c"vkCmdCopyImageToBuffer2KHR");
18210 if val.is_null() {
18211 cmd_copy_image_to_buffer2_khr
18212 } else {
18213 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyImageToBuffer2>(
18214 val,
18215 )
18216 }
18217 },
18218 cmd_blit_image2_khr: unsafe {
18219 unsafe extern "system" fn cmd_blit_image2_khr(
18220 _command_buffer: CommandBuffer,
18221 _p_blit_image_info: *const BlitImageInfo2<'_>,
18222 ) {
18223 panic!(concat!("Unable to load ", stringify!(cmd_blit_image2_khr)))
18224 }
18225 let val = _f(c"vkCmdBlitImage2KHR");
18226 if val.is_null() {
18227 cmd_blit_image2_khr
18228 } else {
18229 ::core::mem::transmute::<*const c_void, PFN_vkCmdBlitImage2>(val)
18230 }
18231 },
18232 cmd_resolve_image2_khr: unsafe {
18233 unsafe extern "system" fn cmd_resolve_image2_khr(
18234 _command_buffer: CommandBuffer,
18235 _p_resolve_image_info: *const ResolveImageInfo2<'_>,
18236 ) {
18237 panic!(concat!(
18238 "Unable to load ",
18239 stringify!(cmd_resolve_image2_khr)
18240 ))
18241 }
18242 let val = _f(c"vkCmdResolveImage2KHR");
18243 if val.is_null() {
18244 cmd_resolve_image2_khr
18245 } else {
18246 ::core::mem::transmute::<*const c_void, PFN_vkCmdResolveImage2>(val)
18247 }
18248 },
18249 }
18250 }
18251 }
18252 }
18253 #[doc = "VK_KHR_format_feature_flags2"]
18254 pub mod format_feature_flags2 {
18255 use super::super::*;
18256 pub use {
18257 crate::vk::KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME as NAME,
18258 crate::vk::KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION as SPEC_VERSION,
18259 };
18260 }
18261 #[doc = "VK_KHR_ray_tracing_maintenance1"]
18262 pub mod ray_tracing_maintenance1 {
18263 use super::super::*;
18264 pub use {
18265 crate::vk::KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME as NAME,
18266 crate::vk::KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION as SPEC_VERSION,
18267 };
18268 #[doc = "VK_KHR_ray_tracing_maintenance1 device-level functions"]
18269 #[derive(Clone)]
18270 pub struct Device {
18271 pub(crate) fp: DeviceFn,
18272 pub(crate) handle: crate::vk::Device,
18273 }
18274 impl Device {
18275 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
18276 let handle = device.handle();
18277 let fp = DeviceFn::load(|name| unsafe {
18278 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18279 instance.get_device_proc_addr(handle, name.as_ptr()),
18280 )
18281 });
18282 Self { handle, fp }
18283 }
18284 #[inline]
18285 pub fn fp(&self) -> &DeviceFn {
18286 &self.fp
18287 }
18288 #[inline]
18289 pub fn device(&self) -> crate::vk::Device {
18290 self.handle
18291 }
18292 }
18293 #[derive(Clone)]
18294 #[doc = "Raw VK_KHR_ray_tracing_maintenance1 device-level function pointers"]
18295 pub struct DeviceFn {
18296 pub cmd_trace_rays_indirect2_khr: PFN_vkCmdTraceRaysIndirect2KHR,
18297 }
18298 unsafe impl Send for DeviceFn {}
18299 unsafe impl Sync for DeviceFn {}
18300 impl DeviceFn {
18301 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18302 Self::load_erased(&mut f)
18303 }
18304 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18305 Self {
18306 cmd_trace_rays_indirect2_khr: unsafe {
18307 unsafe extern "system" fn cmd_trace_rays_indirect2_khr(
18308 _command_buffer: CommandBuffer,
18309 _indirect_device_address: DeviceAddress,
18310 ) {
18311 panic!(concat!(
18312 "Unable to load ",
18313 stringify!(cmd_trace_rays_indirect2_khr)
18314 ))
18315 }
18316 let val = _f(c"vkCmdTraceRaysIndirect2KHR");
18317 if val.is_null() {
18318 cmd_trace_rays_indirect2_khr
18319 } else {
18320 ::core::mem::transmute::<*const c_void, PFN_vkCmdTraceRaysIndirect2KHR>(
18321 val,
18322 )
18323 }
18324 },
18325 }
18326 }
18327 }
18328 }
18329 #[doc = "VK_KHR_shader_untyped_pointers"]
18330 pub mod shader_untyped_pointers {
18331 use super::super::*;
18332 pub use {
18333 crate::vk::KHR_SHADER_UNTYPED_POINTERS_EXTENSION_NAME as NAME,
18334 crate::vk::KHR_SHADER_UNTYPED_POINTERS_SPEC_VERSION as SPEC_VERSION,
18335 };
18336 }
18337 #[doc = "VK_KHR_portability_enumeration"]
18338 pub mod portability_enumeration {
18339 use super::super::*;
18340 pub use {
18341 crate::vk::KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME as NAME,
18342 crate::vk::KHR_PORTABILITY_ENUMERATION_SPEC_VERSION as SPEC_VERSION,
18343 };
18344 }
18345 #[doc = "VK_KHR_maintenance4"]
18346 pub mod maintenance4 {
18347 use super::super::*;
18348 pub use {
18349 crate::vk::KHR_MAINTENANCE_4_EXTENSION_NAME as NAME,
18350 crate::vk::KHR_MAINTENANCE_4_SPEC_VERSION as SPEC_VERSION,
18351 };
18352 #[doc = "VK_KHR_maintenance4 device-level functions"]
18353 #[derive(Clone)]
18354 pub struct Device {
18355 pub(crate) fp: DeviceFn,
18356 pub(crate) handle: crate::vk::Device,
18357 }
18358 impl Device {
18359 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
18360 let handle = device.handle();
18361 let fp = DeviceFn::load(|name| unsafe {
18362 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18363 instance.get_device_proc_addr(handle, name.as_ptr()),
18364 )
18365 });
18366 Self { handle, fp }
18367 }
18368 #[inline]
18369 pub fn fp(&self) -> &DeviceFn {
18370 &self.fp
18371 }
18372 #[inline]
18373 pub fn device(&self) -> crate::vk::Device {
18374 self.handle
18375 }
18376 }
18377 #[derive(Clone)]
18378 #[doc = "Raw VK_KHR_maintenance4 device-level function pointers"]
18379 pub struct DeviceFn {
18380 pub get_device_buffer_memory_requirements_khr: PFN_vkGetDeviceBufferMemoryRequirements,
18381 pub get_device_image_memory_requirements_khr: PFN_vkGetDeviceImageMemoryRequirements,
18382 pub get_device_image_sparse_memory_requirements_khr:
18383 PFN_vkGetDeviceImageSparseMemoryRequirements,
18384 }
18385 unsafe impl Send for DeviceFn {}
18386 unsafe impl Sync for DeviceFn {}
18387 impl DeviceFn {
18388 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18389 Self::load_erased(&mut f)
18390 }
18391 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18392 Self {
18393 get_device_buffer_memory_requirements_khr: unsafe {
18394 unsafe extern "system" fn get_device_buffer_memory_requirements_khr(
18395 _device: crate::vk::Device,
18396 _p_info: *const DeviceBufferMemoryRequirements<'_>,
18397 _p_memory_requirements: *mut MemoryRequirements2<'_>,
18398 ) {
18399 panic!(concat!(
18400 "Unable to load ",
18401 stringify!(get_device_buffer_memory_requirements_khr)
18402 ))
18403 }
18404 let val = _f(c"vkGetDeviceBufferMemoryRequirementsKHR");
18405 if val.is_null() {
18406 get_device_buffer_memory_requirements_khr
18407 } else {
18408 ::core::mem::transmute::<
18409 *const c_void,
18410 PFN_vkGetDeviceBufferMemoryRequirements,
18411 >(val)
18412 }
18413 },
18414 get_device_image_memory_requirements_khr: unsafe {
18415 unsafe extern "system" fn get_device_image_memory_requirements_khr(
18416 _device: crate::vk::Device,
18417 _p_info: *const DeviceImageMemoryRequirements<'_>,
18418 _p_memory_requirements: *mut MemoryRequirements2<'_>,
18419 ) {
18420 panic!(concat!(
18421 "Unable to load ",
18422 stringify!(get_device_image_memory_requirements_khr)
18423 ))
18424 }
18425 let val = _f(c"vkGetDeviceImageMemoryRequirementsKHR");
18426 if val.is_null() {
18427 get_device_image_memory_requirements_khr
18428 } else {
18429 ::core::mem::transmute::<
18430 *const c_void,
18431 PFN_vkGetDeviceImageMemoryRequirements,
18432 >(val)
18433 }
18434 },
18435 get_device_image_sparse_memory_requirements_khr: unsafe {
18436 unsafe extern "system" fn get_device_image_sparse_memory_requirements_khr(
18437 _device: crate::vk::Device,
18438 _p_info: *const DeviceImageMemoryRequirements<'_>,
18439 _p_sparse_memory_requirement_count: *mut u32,
18440 _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>,
18441 ) {
18442 panic!(concat!(
18443 "Unable to load ",
18444 stringify!(get_device_image_sparse_memory_requirements_khr)
18445 ))
18446 }
18447 let val = _f(c"vkGetDeviceImageSparseMemoryRequirementsKHR");
18448 if val.is_null() {
18449 get_device_image_sparse_memory_requirements_khr
18450 } else {
18451 ::core::mem::transmute::<
18452 *const c_void,
18453 PFN_vkGetDeviceImageSparseMemoryRequirements,
18454 >(val)
18455 }
18456 },
18457 }
18458 }
18459 }
18460 }
18461 #[doc = "VK_KHR_shader_subgroup_rotate"]
18462 pub mod shader_subgroup_rotate {
18463 use super::super::*;
18464 pub use {
18465 crate::vk::KHR_SHADER_SUBGROUP_ROTATE_EXTENSION_NAME as NAME,
18466 crate::vk::KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION as SPEC_VERSION,
18467 };
18468 }
18469 #[doc = "VK_KHR_shader_maximal_reconvergence"]
18470 pub mod shader_maximal_reconvergence {
18471 use super::super::*;
18472 pub use {
18473 crate::vk::KHR_SHADER_MAXIMAL_RECONVERGENCE_EXTENSION_NAME as NAME,
18474 crate::vk::KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION as SPEC_VERSION,
18475 };
18476 }
18477 #[doc = "VK_KHR_maintenance5"]
18478 pub mod maintenance5 {
18479 use super::super::*;
18480 pub use {
18481 crate::vk::KHR_MAINTENANCE_5_EXTENSION_NAME as NAME,
18482 crate::vk::KHR_MAINTENANCE_5_SPEC_VERSION as SPEC_VERSION,
18483 };
18484 #[doc = "VK_KHR_maintenance5 device-level functions"]
18485 #[derive(Clone)]
18486 pub struct Device {
18487 pub(crate) fp: DeviceFn,
18488 pub(crate) handle: crate::vk::Device,
18489 }
18490 impl Device {
18491 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
18492 let handle = device.handle();
18493 let fp = DeviceFn::load(|name| unsafe {
18494 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18495 instance.get_device_proc_addr(handle, name.as_ptr()),
18496 )
18497 });
18498 Self { handle, fp }
18499 }
18500 #[inline]
18501 pub fn fp(&self) -> &DeviceFn {
18502 &self.fp
18503 }
18504 #[inline]
18505 pub fn device(&self) -> crate::vk::Device {
18506 self.handle
18507 }
18508 }
18509 #[derive(Clone)]
18510 #[doc = "Raw VK_KHR_maintenance5 device-level function pointers"]
18511 pub struct DeviceFn {
18512 pub cmd_bind_index_buffer2_khr: PFN_vkCmdBindIndexBuffer2,
18513 pub get_rendering_area_granularity_khr: PFN_vkGetRenderingAreaGranularity,
18514 pub get_device_image_subresource_layout_khr: PFN_vkGetDeviceImageSubresourceLayout,
18515 pub get_image_subresource_layout2_khr: PFN_vkGetImageSubresourceLayout2,
18516 }
18517 unsafe impl Send for DeviceFn {}
18518 unsafe impl Sync for DeviceFn {}
18519 impl DeviceFn {
18520 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18521 Self::load_erased(&mut f)
18522 }
18523 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18524 Self {
18525 cmd_bind_index_buffer2_khr: unsafe {
18526 unsafe extern "system" fn cmd_bind_index_buffer2_khr(
18527 _command_buffer: CommandBuffer,
18528 _buffer: Buffer,
18529 _offset: DeviceSize,
18530 _size: DeviceSize,
18531 _index_type: IndexType,
18532 ) {
18533 panic!(concat!(
18534 "Unable to load ",
18535 stringify!(cmd_bind_index_buffer2_khr)
18536 ))
18537 }
18538 let val = _f(c"vkCmdBindIndexBuffer2KHR");
18539 if val.is_null() {
18540 cmd_bind_index_buffer2_khr
18541 } else {
18542 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindIndexBuffer2>(val)
18543 }
18544 },
18545 get_rendering_area_granularity_khr: unsafe {
18546 unsafe extern "system" fn get_rendering_area_granularity_khr(
18547 _device: crate::vk::Device,
18548 _p_rendering_area_info: *const RenderingAreaInfo<'_>,
18549 _p_granularity: *mut Extent2D,
18550 ) {
18551 panic!(concat!(
18552 "Unable to load ",
18553 stringify!(get_rendering_area_granularity_khr)
18554 ))
18555 }
18556 let val = _f(c"vkGetRenderingAreaGranularityKHR");
18557 if val.is_null() {
18558 get_rendering_area_granularity_khr
18559 } else {
18560 ::core::mem::transmute::<*const c_void, PFN_vkGetRenderingAreaGranularity>(
18561 val,
18562 )
18563 }
18564 },
18565 get_device_image_subresource_layout_khr: unsafe {
18566 unsafe extern "system" fn get_device_image_subresource_layout_khr(
18567 _device: crate::vk::Device,
18568 _p_info: *const DeviceImageSubresourceInfo<'_>,
18569 _p_layout: *mut SubresourceLayout2<'_>,
18570 ) {
18571 panic!(concat!(
18572 "Unable to load ",
18573 stringify!(get_device_image_subresource_layout_khr)
18574 ))
18575 }
18576 let val = _f(c"vkGetDeviceImageSubresourceLayoutKHR");
18577 if val.is_null() {
18578 get_device_image_subresource_layout_khr
18579 } else {
18580 ::core::mem::transmute::<
18581 *const c_void,
18582 PFN_vkGetDeviceImageSubresourceLayout,
18583 >(val)
18584 }
18585 },
18586 get_image_subresource_layout2_khr: unsafe {
18587 unsafe extern "system" fn get_image_subresource_layout2_khr(
18588 _device: crate::vk::Device,
18589 _image: Image,
18590 _p_subresource: *const ImageSubresource2<'_>,
18591 _p_layout: *mut SubresourceLayout2<'_>,
18592 ) {
18593 panic!(concat!(
18594 "Unable to load ",
18595 stringify!(get_image_subresource_layout2_khr)
18596 ))
18597 }
18598 let val = _f(c"vkGetImageSubresourceLayout2KHR");
18599 if val.is_null() {
18600 get_image_subresource_layout2_khr
18601 } else {
18602 ::core::mem::transmute::<*const c_void, PFN_vkGetImageSubresourceLayout2>(
18603 val,
18604 )
18605 }
18606 },
18607 }
18608 }
18609 }
18610 }
18611 #[doc = "VK_KHR_present_id2"]
18612 pub mod present_id2 {
18613 use super::super::*;
18614 pub use {
18615 crate::vk::KHR_PRESENT_ID_2_EXTENSION_NAME as NAME,
18616 crate::vk::KHR_PRESENT_ID_2_SPEC_VERSION as SPEC_VERSION,
18617 };
18618 }
18619 #[doc = "VK_KHR_present_wait2"]
18620 pub mod present_wait2 {
18621 use super::super::*;
18622 pub use {
18623 crate::vk::KHR_PRESENT_WAIT_2_EXTENSION_NAME as NAME,
18624 crate::vk::KHR_PRESENT_WAIT_2_SPEC_VERSION as SPEC_VERSION,
18625 };
18626 #[doc = "VK_KHR_present_wait2 device-level functions"]
18627 #[derive(Clone)]
18628 pub struct Device {
18629 pub(crate) fp: DeviceFn,
18630 pub(crate) handle: crate::vk::Device,
18631 }
18632 impl Device {
18633 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
18634 let handle = device.handle();
18635 let fp = DeviceFn::load(|name| unsafe {
18636 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18637 instance.get_device_proc_addr(handle, name.as_ptr()),
18638 )
18639 });
18640 Self { handle, fp }
18641 }
18642 #[inline]
18643 pub fn fp(&self) -> &DeviceFn {
18644 &self.fp
18645 }
18646 #[inline]
18647 pub fn device(&self) -> crate::vk::Device {
18648 self.handle
18649 }
18650 }
18651 #[derive(Clone)]
18652 #[doc = "Raw VK_KHR_present_wait2 device-level function pointers"]
18653 pub struct DeviceFn {
18654 pub wait_for_present2_khr: PFN_vkWaitForPresent2KHR,
18655 }
18656 unsafe impl Send for DeviceFn {}
18657 unsafe impl Sync for DeviceFn {}
18658 impl DeviceFn {
18659 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18660 Self::load_erased(&mut f)
18661 }
18662 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18663 Self {
18664 wait_for_present2_khr: unsafe {
18665 unsafe extern "system" fn wait_for_present2_khr(
18666 _device: crate::vk::Device,
18667 _swapchain: SwapchainKHR,
18668 _p_present_wait2_info: *const PresentWait2InfoKHR<'_>,
18669 ) -> Result {
18670 panic!(concat!(
18671 "Unable to load ",
18672 stringify!(wait_for_present2_khr)
18673 ))
18674 }
18675 let val = _f(c"vkWaitForPresent2KHR");
18676 if val.is_null() {
18677 wait_for_present2_khr
18678 } else {
18679 ::core::mem::transmute::<*const c_void, PFN_vkWaitForPresent2KHR>(val)
18680 }
18681 },
18682 }
18683 }
18684 }
18685 }
18686 #[doc = "VK_KHR_ray_tracing_position_fetch"]
18687 pub mod ray_tracing_position_fetch {
18688 use super::super::*;
18689 pub use {
18690 crate::vk::KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME as NAME,
18691 crate::vk::KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION as SPEC_VERSION,
18692 };
18693 }
18694 #[doc = "VK_KHR_pipeline_binary"]
18695 pub mod pipeline_binary {
18696 use super::super::*;
18697 pub use {
18698 crate::vk::KHR_PIPELINE_BINARY_EXTENSION_NAME as NAME,
18699 crate::vk::KHR_PIPELINE_BINARY_SPEC_VERSION as SPEC_VERSION,
18700 };
18701 #[doc = "VK_KHR_pipeline_binary device-level functions"]
18702 #[derive(Clone)]
18703 pub struct Device {
18704 pub(crate) fp: DeviceFn,
18705 pub(crate) handle: crate::vk::Device,
18706 }
18707 impl Device {
18708 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
18709 let handle = device.handle();
18710 let fp = DeviceFn::load(|name| unsafe {
18711 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18712 instance.get_device_proc_addr(handle, name.as_ptr()),
18713 )
18714 });
18715 Self { handle, fp }
18716 }
18717 #[inline]
18718 pub fn fp(&self) -> &DeviceFn {
18719 &self.fp
18720 }
18721 #[inline]
18722 pub fn device(&self) -> crate::vk::Device {
18723 self.handle
18724 }
18725 }
18726 #[derive(Clone)]
18727 #[doc = "Raw VK_KHR_pipeline_binary device-level function pointers"]
18728 pub struct DeviceFn {
18729 pub create_pipeline_binaries_khr: PFN_vkCreatePipelineBinariesKHR,
18730 pub destroy_pipeline_binary_khr: PFN_vkDestroyPipelineBinaryKHR,
18731 pub get_pipeline_key_khr: PFN_vkGetPipelineKeyKHR,
18732 pub get_pipeline_binary_data_khr: PFN_vkGetPipelineBinaryDataKHR,
18733 pub release_captured_pipeline_data_khr: PFN_vkReleaseCapturedPipelineDataKHR,
18734 }
18735 unsafe impl Send for DeviceFn {}
18736 unsafe impl Sync for DeviceFn {}
18737 impl DeviceFn {
18738 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18739 Self::load_erased(&mut f)
18740 }
18741 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18742 Self {
18743 create_pipeline_binaries_khr: unsafe {
18744 unsafe extern "system" fn create_pipeline_binaries_khr(
18745 _device: crate::vk::Device,
18746 _p_create_info: *const PipelineBinaryCreateInfoKHR<'_>,
18747 _p_allocator: *const AllocationCallbacks<'_>,
18748 _p_binaries: *mut PipelineBinaryHandlesInfoKHR<'_>,
18749 ) -> Result {
18750 panic!(concat!(
18751 "Unable to load ",
18752 stringify!(create_pipeline_binaries_khr)
18753 ))
18754 }
18755 let val = _f(c"vkCreatePipelineBinariesKHR");
18756 if val.is_null() {
18757 create_pipeline_binaries_khr
18758 } else {
18759 ::core::mem::transmute::<*const c_void, PFN_vkCreatePipelineBinariesKHR>(
18760 val,
18761 )
18762 }
18763 },
18764 destroy_pipeline_binary_khr: unsafe {
18765 unsafe extern "system" fn destroy_pipeline_binary_khr(
18766 _device: crate::vk::Device,
18767 _pipeline_binary: PipelineBinaryKHR,
18768 _p_allocator: *const AllocationCallbacks<'_>,
18769 ) {
18770 panic!(concat!(
18771 "Unable to load ",
18772 stringify!(destroy_pipeline_binary_khr)
18773 ))
18774 }
18775 let val = _f(c"vkDestroyPipelineBinaryKHR");
18776 if val.is_null() {
18777 destroy_pipeline_binary_khr
18778 } else {
18779 ::core::mem::transmute::<*const c_void, PFN_vkDestroyPipelineBinaryKHR>(
18780 val,
18781 )
18782 }
18783 },
18784 get_pipeline_key_khr: unsafe {
18785 unsafe extern "system" fn get_pipeline_key_khr(
18786 _device: crate::vk::Device,
18787 _p_pipeline_create_info: *const PipelineCreateInfoKHR<'_>,
18788 _p_pipeline_key: *mut PipelineBinaryKeyKHR<'_>,
18789 ) -> Result {
18790 panic!(concat!("Unable to load ", stringify!(get_pipeline_key_khr)))
18791 }
18792 let val = _f(c"vkGetPipelineKeyKHR");
18793 if val.is_null() {
18794 get_pipeline_key_khr
18795 } else {
18796 ::core::mem::transmute::<*const c_void, PFN_vkGetPipelineKeyKHR>(val)
18797 }
18798 },
18799 get_pipeline_binary_data_khr: unsafe {
18800 unsafe extern "system" fn get_pipeline_binary_data_khr(
18801 _device: crate::vk::Device,
18802 _p_info: *const PipelineBinaryDataInfoKHR<'_>,
18803 _p_pipeline_binary_key: *mut PipelineBinaryKeyKHR<'_>,
18804 _p_pipeline_binary_data_size: *mut usize,
18805 _p_pipeline_binary_data: *mut c_void,
18806 ) -> Result {
18807 panic!(concat!(
18808 "Unable to load ",
18809 stringify!(get_pipeline_binary_data_khr)
18810 ))
18811 }
18812 let val = _f(c"vkGetPipelineBinaryDataKHR");
18813 if val.is_null() {
18814 get_pipeline_binary_data_khr
18815 } else {
18816 ::core::mem::transmute::<*const c_void, PFN_vkGetPipelineBinaryDataKHR>(
18817 val,
18818 )
18819 }
18820 },
18821 release_captured_pipeline_data_khr: unsafe {
18822 unsafe extern "system" fn release_captured_pipeline_data_khr(
18823 _device: crate::vk::Device,
18824 _p_info: *const ReleaseCapturedPipelineDataInfoKHR<'_>,
18825 _p_allocator: *const AllocationCallbacks<'_>,
18826 ) -> Result {
18827 panic!(concat!(
18828 "Unable to load ",
18829 stringify!(release_captured_pipeline_data_khr)
18830 ))
18831 }
18832 let val = _f(c"vkReleaseCapturedPipelineDataKHR");
18833 if val.is_null() {
18834 release_captured_pipeline_data_khr
18835 } else {
18836 ::core::mem::transmute::<
18837 *const c_void,
18838 PFN_vkReleaseCapturedPipelineDataKHR,
18839 >(val)
18840 }
18841 },
18842 }
18843 }
18844 }
18845 }
18846 #[doc = "VK_KHR_surface_maintenance1"]
18847 pub mod surface_maintenance1 {
18848 use super::super::*;
18849 pub use {
18850 crate::vk::KHR_SURFACE_MAINTENANCE_1_EXTENSION_NAME as NAME,
18851 crate::vk::KHR_SURFACE_MAINTENANCE_1_SPEC_VERSION as SPEC_VERSION,
18852 };
18853 }
18854 #[doc = "VK_KHR_swapchain_maintenance1"]
18855 pub mod swapchain_maintenance1 {
18856 use super::super::*;
18857 pub use {
18858 crate::vk::KHR_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME as NAME,
18859 crate::vk::KHR_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION as SPEC_VERSION,
18860 };
18861 #[doc = "VK_KHR_swapchain_maintenance1 device-level functions"]
18862 #[derive(Clone)]
18863 pub struct Device {
18864 pub(crate) fp: DeviceFn,
18865 pub(crate) handle: crate::vk::Device,
18866 }
18867 impl Device {
18868 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
18869 let handle = device.handle();
18870 let fp = DeviceFn::load(|name| unsafe {
18871 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18872 instance.get_device_proc_addr(handle, name.as_ptr()),
18873 )
18874 });
18875 Self { handle, fp }
18876 }
18877 #[inline]
18878 pub fn fp(&self) -> &DeviceFn {
18879 &self.fp
18880 }
18881 #[inline]
18882 pub fn device(&self) -> crate::vk::Device {
18883 self.handle
18884 }
18885 }
18886 #[derive(Clone)]
18887 #[doc = "Raw VK_KHR_swapchain_maintenance1 device-level function pointers"]
18888 pub struct DeviceFn {
18889 pub release_swapchain_images_khr: PFN_vkReleaseSwapchainImagesKHR,
18890 }
18891 unsafe impl Send for DeviceFn {}
18892 unsafe impl Sync for DeviceFn {}
18893 impl DeviceFn {
18894 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18895 Self::load_erased(&mut f)
18896 }
18897 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18898 Self {
18899 release_swapchain_images_khr: unsafe {
18900 unsafe extern "system" fn release_swapchain_images_khr(
18901 _device: crate::vk::Device,
18902 _p_release_info: *const ReleaseSwapchainImagesInfoKHR<'_>,
18903 ) -> Result {
18904 panic!(concat!(
18905 "Unable to load ",
18906 stringify!(release_swapchain_images_khr)
18907 ))
18908 }
18909 let val = _f(c"vkReleaseSwapchainImagesKHR");
18910 if val.is_null() {
18911 release_swapchain_images_khr
18912 } else {
18913 ::core::mem::transmute::<*const c_void, PFN_vkReleaseSwapchainImagesKHR>(
18914 val,
18915 )
18916 }
18917 },
18918 }
18919 }
18920 }
18921 }
18922 #[doc = "VK_KHR_internally_synchronized_queues"]
18923 pub mod internally_synchronized_queues {
18924 use super::super::*;
18925 pub use {
18926 crate::vk::KHR_INTERNALLY_SYNCHRONIZED_QUEUES_EXTENSION_NAME as NAME,
18927 crate::vk::KHR_INTERNALLY_SYNCHRONIZED_QUEUES_SPEC_VERSION as SPEC_VERSION,
18928 };
18929 }
18930 #[doc = "VK_KHR_cooperative_matrix"]
18931 pub mod cooperative_matrix {
18932 use super::super::*;
18933 pub use {
18934 crate::vk::KHR_COOPERATIVE_MATRIX_EXTENSION_NAME as NAME,
18935 crate::vk::KHR_COOPERATIVE_MATRIX_SPEC_VERSION as SPEC_VERSION,
18936 };
18937 #[doc = "VK_KHR_cooperative_matrix instance-level functions"]
18938 #[derive(Clone)]
18939 pub struct Instance {
18940 pub(crate) fp: InstanceFn,
18941 pub(crate) handle: crate::vk::Instance,
18942 }
18943 impl Instance {
18944 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
18945 let handle = instance.handle();
18946 let fp = InstanceFn::load(|name| unsafe {
18947 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
18948 entry.get_instance_proc_addr(handle, name.as_ptr()),
18949 )
18950 });
18951 Self { handle, fp }
18952 }
18953 #[inline]
18954 pub fn fp(&self) -> &InstanceFn {
18955 &self.fp
18956 }
18957 #[inline]
18958 pub fn instance(&self) -> crate::vk::Instance {
18959 self.handle
18960 }
18961 }
18962 #[derive(Clone)]
18963 #[doc = "Raw VK_KHR_cooperative_matrix instance-level function pointers"]
18964 pub struct InstanceFn {
18965 pub get_physical_device_cooperative_matrix_properties_khr:
18966 PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR,
18967 }
18968 unsafe impl Send for InstanceFn {}
18969 unsafe impl Sync for InstanceFn {}
18970 impl InstanceFn {
18971 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
18972 Self::load_erased(&mut f)
18973 }
18974 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
18975 Self {
18976 get_physical_device_cooperative_matrix_properties_khr: unsafe {
18977 unsafe extern "system" fn get_physical_device_cooperative_matrix_properties_khr(
18978 _physical_device: PhysicalDevice,
18979 _p_property_count: *mut u32,
18980 _p_properties: *mut CooperativeMatrixPropertiesKHR<'_>,
18981 ) -> Result {
18982 panic!(concat!(
18983 "Unable to load ",
18984 stringify!(get_physical_device_cooperative_matrix_properties_khr)
18985 ))
18986 }
18987 let val = _f(c"vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR");
18988 if val.is_null() {
18989 get_physical_device_cooperative_matrix_properties_khr
18990 } else {
18991 ::core::mem::transmute::<
18992 *const c_void,
18993 PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR,
18994 >(val)
18995 }
18996 },
18997 }
18998 }
18999 }
19000 }
19001 #[doc = "VK_KHR_compute_shader_derivatives"]
19002 pub mod compute_shader_derivatives {
19003 use super::super::*;
19004 pub use {
19005 crate::vk::KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME as NAME,
19006 crate::vk::KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION as SPEC_VERSION,
19007 };
19008 }
19009 #[doc = "VK_KHR_video_decode_av1"]
19010 pub mod video_decode_av1 {
19011 use super::super::*;
19012 pub use {
19013 crate::vk::KHR_VIDEO_DECODE_AV1_EXTENSION_NAME as NAME,
19014 crate::vk::KHR_VIDEO_DECODE_AV1_SPEC_VERSION as SPEC_VERSION,
19015 };
19016 }
19017 #[doc = "VK_KHR_video_encode_av1"]
19018 pub mod video_encode_av1 {
19019 use super::super::*;
19020 pub use {
19021 crate::vk::KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME as NAME,
19022 crate::vk::KHR_VIDEO_ENCODE_AV1_SPEC_VERSION as SPEC_VERSION,
19023 };
19024 }
19025 #[doc = "VK_KHR_video_decode_vp9"]
19026 pub mod video_decode_vp9 {
19027 use super::super::*;
19028 pub use {
19029 crate::vk::KHR_VIDEO_DECODE_VP9_EXTENSION_NAME as NAME,
19030 crate::vk::KHR_VIDEO_DECODE_VP9_SPEC_VERSION as SPEC_VERSION,
19031 };
19032 }
19033 #[doc = "VK_KHR_video_maintenance1"]
19034 pub mod video_maintenance1 {
19035 use super::super::*;
19036 pub use {
19037 crate::vk::KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME as NAME,
19038 crate::vk::KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION as SPEC_VERSION,
19039 };
19040 }
19041 #[doc = "VK_KHR_vertex_attribute_divisor"]
19042 pub mod vertex_attribute_divisor {
19043 use super::super::*;
19044 pub use {
19045 crate::vk::KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME as NAME,
19046 crate::vk::KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION as SPEC_VERSION,
19047 };
19048 }
19049 #[doc = "VK_KHR_load_store_op_none"]
19050 pub mod load_store_op_none {
19051 use super::super::*;
19052 pub use {
19053 crate::vk::KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME as NAME,
19054 crate::vk::KHR_LOAD_STORE_OP_NONE_SPEC_VERSION as SPEC_VERSION,
19055 };
19056 }
19057 #[doc = "VK_KHR_unified_image_layouts"]
19058 pub mod unified_image_layouts {
19059 use super::super::*;
19060 pub use {
19061 crate::vk::KHR_UNIFIED_IMAGE_LAYOUTS_EXTENSION_NAME as NAME,
19062 crate::vk::KHR_UNIFIED_IMAGE_LAYOUTS_SPEC_VERSION as SPEC_VERSION,
19063 };
19064 }
19065 #[doc = "VK_KHR_shader_float_controls2"]
19066 pub mod shader_float_controls2 {
19067 use super::super::*;
19068 pub use {
19069 crate::vk::KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME as NAME,
19070 crate::vk::KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION as SPEC_VERSION,
19071 };
19072 }
19073 #[doc = "VK_KHR_index_type_uint8"]
19074 pub mod index_type_uint8 {
19075 use super::super::*;
19076 pub use {
19077 crate::vk::KHR_INDEX_TYPE_UINT8_EXTENSION_NAME as NAME,
19078 crate::vk::KHR_INDEX_TYPE_UINT8_SPEC_VERSION as SPEC_VERSION,
19079 };
19080 }
19081 #[doc = "VK_KHR_line_rasterization"]
19082 pub mod line_rasterization {
19083 use super::super::*;
19084 pub use {
19085 crate::vk::KHR_LINE_RASTERIZATION_EXTENSION_NAME as NAME,
19086 crate::vk::KHR_LINE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION,
19087 };
19088 #[doc = "VK_KHR_line_rasterization device-level functions"]
19089 #[derive(Clone)]
19090 pub struct Device {
19091 pub(crate) fp: DeviceFn,
19092 pub(crate) handle: crate::vk::Device,
19093 }
19094 impl Device {
19095 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
19096 let handle = device.handle();
19097 let fp = DeviceFn::load(|name| unsafe {
19098 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19099 instance.get_device_proc_addr(handle, name.as_ptr()),
19100 )
19101 });
19102 Self { handle, fp }
19103 }
19104 #[inline]
19105 pub fn fp(&self) -> &DeviceFn {
19106 &self.fp
19107 }
19108 #[inline]
19109 pub fn device(&self) -> crate::vk::Device {
19110 self.handle
19111 }
19112 }
19113 #[derive(Clone)]
19114 #[doc = "Raw VK_KHR_line_rasterization device-level function pointers"]
19115 pub struct DeviceFn {
19116 pub cmd_set_line_stipple_khr: PFN_vkCmdSetLineStipple,
19117 }
19118 unsafe impl Send for DeviceFn {}
19119 unsafe impl Sync for DeviceFn {}
19120 impl DeviceFn {
19121 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19122 Self::load_erased(&mut f)
19123 }
19124 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19125 Self {
19126 cmd_set_line_stipple_khr: unsafe {
19127 unsafe extern "system" fn cmd_set_line_stipple_khr(
19128 _command_buffer: CommandBuffer,
19129 _line_stipple_factor: u32,
19130 _line_stipple_pattern: u16,
19131 ) {
19132 panic!(concat!(
19133 "Unable to load ",
19134 stringify!(cmd_set_line_stipple_khr)
19135 ))
19136 }
19137 let val = _f(c"vkCmdSetLineStippleKHR");
19138 if val.is_null() {
19139 cmd_set_line_stipple_khr
19140 } else {
19141 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetLineStipple>(val)
19142 }
19143 },
19144 }
19145 }
19146 }
19147 }
19148 #[doc = "VK_KHR_calibrated_timestamps"]
19149 pub mod calibrated_timestamps {
19150 use super::super::*;
19151 pub use {
19152 crate::vk::KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME as NAME,
19153 crate::vk::KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION as SPEC_VERSION,
19154 };
19155 #[doc = "VK_KHR_calibrated_timestamps instance-level functions"]
19156 #[derive(Clone)]
19157 pub struct Instance {
19158 pub(crate) fp: InstanceFn,
19159 pub(crate) handle: crate::vk::Instance,
19160 }
19161 impl Instance {
19162 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
19163 let handle = instance.handle();
19164 let fp = InstanceFn::load(|name| unsafe {
19165 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19166 entry.get_instance_proc_addr(handle, name.as_ptr()),
19167 )
19168 });
19169 Self { handle, fp }
19170 }
19171 #[inline]
19172 pub fn fp(&self) -> &InstanceFn {
19173 &self.fp
19174 }
19175 #[inline]
19176 pub fn instance(&self) -> crate::vk::Instance {
19177 self.handle
19178 }
19179 }
19180 #[derive(Clone)]
19181 #[doc = "Raw VK_KHR_calibrated_timestamps instance-level function pointers"]
19182 pub struct InstanceFn {
19183 pub get_physical_device_calibrateable_time_domains_khr:
19184 PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR,
19185 }
19186 unsafe impl Send for InstanceFn {}
19187 unsafe impl Sync for InstanceFn {}
19188 impl InstanceFn {
19189 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19190 Self::load_erased(&mut f)
19191 }
19192 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19193 Self {
19194 get_physical_device_calibrateable_time_domains_khr: unsafe {
19195 unsafe extern "system" fn get_physical_device_calibrateable_time_domains_khr(
19196 _physical_device: PhysicalDevice,
19197 _p_time_domain_count: *mut u32,
19198 _p_time_domains: *mut TimeDomainKHR,
19199 ) -> Result {
19200 panic!(concat!(
19201 "Unable to load ",
19202 stringify!(get_physical_device_calibrateable_time_domains_khr)
19203 ))
19204 }
19205 let val = _f(c"vkGetPhysicalDeviceCalibrateableTimeDomainsKHR");
19206 if val.is_null() {
19207 get_physical_device_calibrateable_time_domains_khr
19208 } else {
19209 ::core::mem::transmute::<
19210 *const c_void,
19211 PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR,
19212 >(val)
19213 }
19214 },
19215 }
19216 }
19217 }
19218 #[doc = "VK_KHR_calibrated_timestamps device-level functions"]
19219 #[derive(Clone)]
19220 pub struct Device {
19221 pub(crate) fp: DeviceFn,
19222 pub(crate) handle: crate::vk::Device,
19223 }
19224 impl Device {
19225 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
19226 let handle = device.handle();
19227 let fp = DeviceFn::load(|name| unsafe {
19228 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19229 instance.get_device_proc_addr(handle, name.as_ptr()),
19230 )
19231 });
19232 Self { handle, fp }
19233 }
19234 #[inline]
19235 pub fn fp(&self) -> &DeviceFn {
19236 &self.fp
19237 }
19238 #[inline]
19239 pub fn device(&self) -> crate::vk::Device {
19240 self.handle
19241 }
19242 }
19243 #[derive(Clone)]
19244 #[doc = "Raw VK_KHR_calibrated_timestamps device-level function pointers"]
19245 pub struct DeviceFn {
19246 pub get_calibrated_timestamps_khr: PFN_vkGetCalibratedTimestampsKHR,
19247 }
19248 unsafe impl Send for DeviceFn {}
19249 unsafe impl Sync for DeviceFn {}
19250 impl DeviceFn {
19251 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19252 Self::load_erased(&mut f)
19253 }
19254 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19255 Self {
19256 get_calibrated_timestamps_khr: unsafe {
19257 unsafe extern "system" fn get_calibrated_timestamps_khr(
19258 _device: crate::vk::Device,
19259 _timestamp_count: u32,
19260 _p_timestamp_infos: *const CalibratedTimestampInfoKHR<'_>,
19261 _p_timestamps: *mut u64,
19262 _p_max_deviation: *mut u64,
19263 ) -> Result {
19264 panic!(concat!(
19265 "Unable to load ",
19266 stringify!(get_calibrated_timestamps_khr)
19267 ))
19268 }
19269 let val = _f(c"vkGetCalibratedTimestampsKHR");
19270 if val.is_null() {
19271 get_calibrated_timestamps_khr
19272 } else {
19273 ::core::mem::transmute::<*const c_void, PFN_vkGetCalibratedTimestampsKHR>(
19274 val,
19275 )
19276 }
19277 },
19278 }
19279 }
19280 }
19281 }
19282 #[doc = "VK_KHR_shader_expect_assume"]
19283 pub mod shader_expect_assume {
19284 use super::super::*;
19285 pub use {
19286 crate::vk::KHR_SHADER_EXPECT_ASSUME_EXTENSION_NAME as NAME,
19287 crate::vk::KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION as SPEC_VERSION,
19288 };
19289 }
19290 #[doc = "VK_KHR_maintenance6"]
19291 pub mod maintenance6 {
19292 use super::super::*;
19293 pub use {
19294 crate::vk::KHR_MAINTENANCE_6_EXTENSION_NAME as NAME,
19295 crate::vk::KHR_MAINTENANCE_6_SPEC_VERSION as SPEC_VERSION,
19296 };
19297 #[doc = "VK_KHR_maintenance6 device-level functions"]
19298 #[derive(Clone)]
19299 pub struct Device {
19300 pub(crate) fp: DeviceFn,
19301 pub(crate) handle: crate::vk::Device,
19302 }
19303 impl Device {
19304 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
19305 let handle = device.handle();
19306 let fp = DeviceFn::load(|name| unsafe {
19307 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19308 instance.get_device_proc_addr(handle, name.as_ptr()),
19309 )
19310 });
19311 Self { handle, fp }
19312 }
19313 #[inline]
19314 pub fn fp(&self) -> &DeviceFn {
19315 &self.fp
19316 }
19317 #[inline]
19318 pub fn device(&self) -> crate::vk::Device {
19319 self.handle
19320 }
19321 }
19322 #[derive(Clone)]
19323 #[doc = "Raw VK_KHR_maintenance6 device-level function pointers"]
19324 pub struct DeviceFn {
19325 pub cmd_bind_descriptor_sets2_khr: PFN_vkCmdBindDescriptorSets2,
19326 pub cmd_push_constants2_khr: PFN_vkCmdPushConstants2,
19327 pub cmd_push_descriptor_set2_khr: PFN_vkCmdPushDescriptorSet2,
19328 pub cmd_push_descriptor_set_with_template2_khr: PFN_vkCmdPushDescriptorSetWithTemplate2,
19329 pub cmd_set_descriptor_buffer_offsets2_ext: PFN_vkCmdSetDescriptorBufferOffsets2EXT,
19330 pub cmd_bind_descriptor_buffer_embedded_samplers2_ext:
19331 PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT,
19332 }
19333 unsafe impl Send for DeviceFn {}
19334 unsafe impl Sync for DeviceFn {}
19335 impl DeviceFn {
19336 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19337 Self::load_erased(&mut f)
19338 }
19339 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19340 Self {
19341 cmd_bind_descriptor_sets2_khr: unsafe {
19342 unsafe extern "system" fn cmd_bind_descriptor_sets2_khr(
19343 _command_buffer: CommandBuffer,
19344 _p_bind_descriptor_sets_info: *const BindDescriptorSetsInfo<'_>,
19345 ) {
19346 panic!(concat!(
19347 "Unable to load ",
19348 stringify!(cmd_bind_descriptor_sets2_khr)
19349 ))
19350 }
19351 let val = _f(c"vkCmdBindDescriptorSets2KHR");
19352 if val.is_null() {
19353 cmd_bind_descriptor_sets2_khr
19354 } else {
19355 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindDescriptorSets2>(
19356 val,
19357 )
19358 }
19359 },
19360 cmd_push_constants2_khr: unsafe {
19361 unsafe extern "system" fn cmd_push_constants2_khr(
19362 _command_buffer: CommandBuffer,
19363 _p_push_constants_info: *const PushConstantsInfo<'_>,
19364 ) {
19365 panic!(concat!(
19366 "Unable to load ",
19367 stringify!(cmd_push_constants2_khr)
19368 ))
19369 }
19370 let val = _f(c"vkCmdPushConstants2KHR");
19371 if val.is_null() {
19372 cmd_push_constants2_khr
19373 } else {
19374 ::core::mem::transmute::<*const c_void, PFN_vkCmdPushConstants2>(val)
19375 }
19376 },
19377 cmd_push_descriptor_set2_khr: unsafe {
19378 unsafe extern "system" fn cmd_push_descriptor_set2_khr(
19379 _command_buffer: CommandBuffer,
19380 _p_push_descriptor_set_info: *const PushDescriptorSetInfo<'_>,
19381 ) {
19382 panic!(concat!(
19383 "Unable to load ",
19384 stringify!(cmd_push_descriptor_set2_khr)
19385 ))
19386 }
19387 let val = _f(c"vkCmdPushDescriptorSet2KHR");
19388 if val.is_null() {
19389 cmd_push_descriptor_set2_khr
19390 } else {
19391 ::core::mem::transmute::<*const c_void, PFN_vkCmdPushDescriptorSet2>(
19392 val,
19393 )
19394 }
19395 },
19396 cmd_push_descriptor_set_with_template2_khr: unsafe {
19397 unsafe extern "system" fn cmd_push_descriptor_set_with_template2_khr(
19398 _command_buffer: CommandBuffer,
19399 _p_push_descriptor_set_with_template_info : * const PushDescriptorSetWithTemplateInfo < '_ >,
19400 ) {
19401 panic!(concat!(
19402 "Unable to load ",
19403 stringify!(cmd_push_descriptor_set_with_template2_khr)
19404 ))
19405 }
19406 let val = _f(c"vkCmdPushDescriptorSetWithTemplate2KHR");
19407 if val.is_null() {
19408 cmd_push_descriptor_set_with_template2_khr
19409 } else {
19410 ::core::mem::transmute::<
19411 *const c_void,
19412 PFN_vkCmdPushDescriptorSetWithTemplate2,
19413 >(val)
19414 }
19415 },
19416 cmd_set_descriptor_buffer_offsets2_ext: unsafe {
19417 unsafe extern "system" fn cmd_set_descriptor_buffer_offsets2_ext(
19418 _command_buffer: CommandBuffer,
19419 _p_set_descriptor_buffer_offsets_info : * const SetDescriptorBufferOffsetsInfoEXT < '_ >,
19420 ) {
19421 panic!(concat!(
19422 "Unable to load ",
19423 stringify!(cmd_set_descriptor_buffer_offsets2_ext)
19424 ))
19425 }
19426 let val = _f(c"vkCmdSetDescriptorBufferOffsets2EXT");
19427 if val.is_null() {
19428 cmd_set_descriptor_buffer_offsets2_ext
19429 } else {
19430 ::core::mem::transmute::<
19431 *const c_void,
19432 PFN_vkCmdSetDescriptorBufferOffsets2EXT,
19433 >(val)
19434 }
19435 },
19436 cmd_bind_descriptor_buffer_embedded_samplers2_ext: unsafe {
19437 unsafe extern "system" fn cmd_bind_descriptor_buffer_embedded_samplers2_ext(
19438 _command_buffer: CommandBuffer,
19439 _p_bind_descriptor_buffer_embedded_samplers_info : * const BindDescriptorBufferEmbeddedSamplersInfoEXT < '_ >,
19440 ) {
19441 panic!(concat!(
19442 "Unable to load ",
19443 stringify!(cmd_bind_descriptor_buffer_embedded_samplers2_ext)
19444 ))
19445 }
19446 let val = _f(c"vkCmdBindDescriptorBufferEmbeddedSamplers2EXT");
19447 if val.is_null() {
19448 cmd_bind_descriptor_buffer_embedded_samplers2_ext
19449 } else {
19450 ::core::mem::transmute::<
19451 *const c_void,
19452 PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT,
19453 >(val)
19454 }
19455 },
19456 }
19457 }
19458 }
19459 }
19460 #[doc = "VK_KHR_copy_memory_indirect"]
19461 pub mod copy_memory_indirect {
19462 use super::super::*;
19463 pub use {
19464 crate::vk::KHR_COPY_MEMORY_INDIRECT_EXTENSION_NAME as NAME,
19465 crate::vk::KHR_COPY_MEMORY_INDIRECT_SPEC_VERSION as SPEC_VERSION,
19466 };
19467 #[doc = "VK_KHR_copy_memory_indirect device-level functions"]
19468 #[derive(Clone)]
19469 pub struct Device {
19470 pub(crate) fp: DeviceFn,
19471 pub(crate) handle: crate::vk::Device,
19472 }
19473 impl Device {
19474 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
19475 let handle = device.handle();
19476 let fp = DeviceFn::load(|name| unsafe {
19477 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19478 instance.get_device_proc_addr(handle, name.as_ptr()),
19479 )
19480 });
19481 Self { handle, fp }
19482 }
19483 #[inline]
19484 pub fn fp(&self) -> &DeviceFn {
19485 &self.fp
19486 }
19487 #[inline]
19488 pub fn device(&self) -> crate::vk::Device {
19489 self.handle
19490 }
19491 }
19492 #[derive(Clone)]
19493 #[doc = "Raw VK_KHR_copy_memory_indirect device-level function pointers"]
19494 pub struct DeviceFn {
19495 pub cmd_copy_memory_indirect_khr: PFN_vkCmdCopyMemoryIndirectKHR,
19496 pub cmd_copy_memory_to_image_indirect_khr: PFN_vkCmdCopyMemoryToImageIndirectKHR,
19497 }
19498 unsafe impl Send for DeviceFn {}
19499 unsafe impl Sync for DeviceFn {}
19500 impl DeviceFn {
19501 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19502 Self::load_erased(&mut f)
19503 }
19504 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19505 Self {
19506 cmd_copy_memory_indirect_khr: unsafe {
19507 unsafe extern "system" fn cmd_copy_memory_indirect_khr(
19508 _command_buffer: CommandBuffer,
19509 _p_copy_memory_indirect_info: *const CopyMemoryIndirectInfoKHR<'_>,
19510 ) {
19511 panic!(concat!(
19512 "Unable to load ",
19513 stringify!(cmd_copy_memory_indirect_khr)
19514 ))
19515 }
19516 let val = _f(c"vkCmdCopyMemoryIndirectKHR");
19517 if val.is_null() {
19518 cmd_copy_memory_indirect_khr
19519 } else {
19520 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyMemoryIndirectKHR>(
19521 val,
19522 )
19523 }
19524 },
19525 cmd_copy_memory_to_image_indirect_khr: unsafe {
19526 unsafe extern "system" fn cmd_copy_memory_to_image_indirect_khr(
19527 _command_buffer: CommandBuffer,
19528 _p_copy_memory_to_image_indirect_info : * const CopyMemoryToImageIndirectInfoKHR < '_ >,
19529 ) {
19530 panic!(concat!(
19531 "Unable to load ",
19532 stringify!(cmd_copy_memory_to_image_indirect_khr)
19533 ))
19534 }
19535 let val = _f(c"vkCmdCopyMemoryToImageIndirectKHR");
19536 if val.is_null() {
19537 cmd_copy_memory_to_image_indirect_khr
19538 } else {
19539 ::core::mem::transmute::<
19540 *const c_void,
19541 PFN_vkCmdCopyMemoryToImageIndirectKHR,
19542 >(val)
19543 }
19544 },
19545 }
19546 }
19547 }
19548 }
19549 #[doc = "VK_KHR_video_encode_intra_refresh"]
19550 pub mod video_encode_intra_refresh {
19551 use super::super::*;
19552 pub use {
19553 crate::vk::KHR_VIDEO_ENCODE_INTRA_REFRESH_EXTENSION_NAME as NAME,
19554 crate::vk::KHR_VIDEO_ENCODE_INTRA_REFRESH_SPEC_VERSION as SPEC_VERSION,
19555 };
19556 }
19557 #[doc = "VK_KHR_video_encode_quantization_map"]
19558 pub mod video_encode_quantization_map {
19559 use super::super::*;
19560 pub use {
19561 crate::vk::KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME as NAME,
19562 crate::vk::KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION as SPEC_VERSION,
19563 };
19564 }
19565 #[doc = "VK_KHR_shader_relaxed_extended_instruction"]
19566 pub mod shader_relaxed_extended_instruction {
19567 use super::super::*;
19568 pub use {
19569 crate::vk::KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME as NAME,
19570 crate::vk::KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION as SPEC_VERSION,
19571 };
19572 }
19573 #[doc = "VK_KHR_maintenance7"]
19574 pub mod maintenance7 {
19575 use super::super::*;
19576 pub use {
19577 crate::vk::KHR_MAINTENANCE_7_EXTENSION_NAME as NAME,
19578 crate::vk::KHR_MAINTENANCE_7_SPEC_VERSION as SPEC_VERSION,
19579 };
19580 }
19581 #[doc = "VK_KHR_maintenance8"]
19582 pub mod maintenance8 {
19583 use super::super::*;
19584 pub use {
19585 crate::vk::KHR_MAINTENANCE_8_EXTENSION_NAME as NAME,
19586 crate::vk::KHR_MAINTENANCE_8_SPEC_VERSION as SPEC_VERSION,
19587 };
19588 }
19589 #[doc = "VK_KHR_shader_fma"]
19590 pub mod shader_fma {
19591 use super::super::*;
19592 pub use {
19593 crate::vk::KHR_SHADER_FMA_EXTENSION_NAME as NAME,
19594 crate::vk::KHR_SHADER_FMA_SPEC_VERSION as SPEC_VERSION,
19595 };
19596 }
19597 #[doc = "VK_KHR_maintenance9"]
19598 pub mod maintenance9 {
19599 use super::super::*;
19600 pub use {
19601 crate::vk::KHR_MAINTENANCE_9_EXTENSION_NAME as NAME,
19602 crate::vk::KHR_MAINTENANCE_9_SPEC_VERSION as SPEC_VERSION,
19603 };
19604 }
19605 #[doc = "VK_KHR_video_maintenance2"]
19606 pub mod video_maintenance2 {
19607 use super::super::*;
19608 pub use {
19609 crate::vk::KHR_VIDEO_MAINTENANCE_2_EXTENSION_NAME as NAME,
19610 crate::vk::KHR_VIDEO_MAINTENANCE_2_SPEC_VERSION as SPEC_VERSION,
19611 };
19612 }
19613 #[doc = "VK_KHR_depth_clamp_zero_one"]
19614 pub mod depth_clamp_zero_one {
19615 use super::super::*;
19616 pub use {
19617 crate::vk::KHR_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME as NAME,
19618 crate::vk::KHR_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION as SPEC_VERSION,
19619 };
19620 }
19621 #[doc = "VK_KHR_robustness2"]
19622 pub mod robustness2 {
19623 use super::super::*;
19624 pub use {
19625 crate::vk::KHR_ROBUSTNESS_2_EXTENSION_NAME as NAME,
19626 crate::vk::KHR_ROBUSTNESS_2_SPEC_VERSION as SPEC_VERSION,
19627 };
19628 }
19629 #[doc = "VK_KHR_present_mode_fifo_latest_ready"]
19630 pub mod present_mode_fifo_latest_ready {
19631 use super::super::*;
19632 pub use {
19633 crate::vk::KHR_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME as NAME,
19634 crate::vk::KHR_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION as SPEC_VERSION,
19635 };
19636 }
19637 #[doc = "VK_KHR_maintenance10"]
19638 pub mod maintenance10 {
19639 use super::super::*;
19640 pub use {
19641 crate::vk::KHR_MAINTENANCE_10_EXTENSION_NAME as NAME,
19642 crate::vk::KHR_MAINTENANCE_10_SPEC_VERSION as SPEC_VERSION,
19643 };
19644 #[doc = "VK_KHR_maintenance10 device-level functions"]
19645 #[derive(Clone)]
19646 pub struct Device {
19647 pub(crate) fp: DeviceFn,
19648 pub(crate) handle: crate::vk::Device,
19649 }
19650 impl Device {
19651 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
19652 let handle = device.handle();
19653 let fp = DeviceFn::load(|name| unsafe {
19654 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19655 instance.get_device_proc_addr(handle, name.as_ptr()),
19656 )
19657 });
19658 Self { handle, fp }
19659 }
19660 #[inline]
19661 pub fn fp(&self) -> &DeviceFn {
19662 &self.fp
19663 }
19664 #[inline]
19665 pub fn device(&self) -> crate::vk::Device {
19666 self.handle
19667 }
19668 }
19669 #[derive(Clone)]
19670 #[doc = "Raw VK_KHR_maintenance10 device-level function pointers"]
19671 pub struct DeviceFn {
19672 pub cmd_end_rendering2_khr: PFN_vkCmdEndRendering2KHR,
19673 }
19674 unsafe impl Send for DeviceFn {}
19675 unsafe impl Sync for DeviceFn {}
19676 impl DeviceFn {
19677 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19678 Self::load_erased(&mut f)
19679 }
19680 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19681 Self {
19682 cmd_end_rendering2_khr: unsafe {
19683 unsafe extern "system" fn cmd_end_rendering2_khr(
19684 _command_buffer: CommandBuffer,
19685 _p_rendering_end_info: *const RenderingEndInfoKHR<'_>,
19686 ) {
19687 panic!(concat!(
19688 "Unable to load ",
19689 stringify!(cmd_end_rendering2_khr)
19690 ))
19691 }
19692 let val = _f(c"vkCmdEndRendering2KHR");
19693 if val.is_null() {
19694 cmd_end_rendering2_khr
19695 } else {
19696 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndRendering2KHR>(val)
19697 }
19698 },
19699 }
19700 }
19701 }
19702 }
19703}
19704#[doc = "Extensions tagged LUNARG"]
19705pub mod lunarg {
19706 #[doc = "VK_LUNARG_direct_driver_loading"]
19707 pub mod direct_driver_loading {
19708 use super::super::*;
19709 pub use {
19710 crate::vk::LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME as NAME,
19711 crate::vk::LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION as SPEC_VERSION,
19712 };
19713 }
19714}
19715#[doc = "Extensions tagged MESA"]
19716pub mod mesa {
19717 #[doc = "VK_MESA_image_alignment_control"]
19718 pub mod image_alignment_control {
19719 use super::super::*;
19720 pub use {
19721 crate::vk::MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME as NAME,
19722 crate::vk::MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION as SPEC_VERSION,
19723 };
19724 }
19725}
19726#[doc = "Extensions tagged MSFT"]
19727pub mod msft {
19728 #[doc = "VK_MSFT_layered_driver"]
19729 pub mod layered_driver {
19730 use super::super::*;
19731 pub use {
19732 crate::vk::MSFT_LAYERED_DRIVER_EXTENSION_NAME as NAME,
19733 crate::vk::MSFT_LAYERED_DRIVER_SPEC_VERSION as SPEC_VERSION,
19734 };
19735 }
19736}
19737#[doc = "Extensions tagged MVK"]
19738pub mod mvk {
19739 #[doc = "VK_MVK_ios_surface"]
19740 pub mod ios_surface {
19741 use super::super::*;
19742 pub use {
19743 crate::vk::MVK_IOS_SURFACE_EXTENSION_NAME as NAME,
19744 crate::vk::MVK_IOS_SURFACE_SPEC_VERSION as SPEC_VERSION,
19745 };
19746 #[doc = "VK_MVK_ios_surface instance-level functions"]
19747 #[derive(Clone)]
19748 pub struct Instance {
19749 pub(crate) fp: InstanceFn,
19750 pub(crate) handle: crate::vk::Instance,
19751 }
19752 impl Instance {
19753 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
19754 let handle = instance.handle();
19755 let fp = InstanceFn::load(|name| unsafe {
19756 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19757 entry.get_instance_proc_addr(handle, name.as_ptr()),
19758 )
19759 });
19760 Self { handle, fp }
19761 }
19762 #[inline]
19763 pub fn fp(&self) -> &InstanceFn {
19764 &self.fp
19765 }
19766 #[inline]
19767 pub fn instance(&self) -> crate::vk::Instance {
19768 self.handle
19769 }
19770 }
19771 #[derive(Clone)]
19772 #[doc = "Raw VK_MVK_ios_surface instance-level function pointers"]
19773 pub struct InstanceFn {
19774 pub create_ios_surface_mvk: PFN_vkCreateIOSSurfaceMVK,
19775 }
19776 unsafe impl Send for InstanceFn {}
19777 unsafe impl Sync for InstanceFn {}
19778 impl InstanceFn {
19779 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19780 Self::load_erased(&mut f)
19781 }
19782 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19783 Self {
19784 create_ios_surface_mvk: unsafe {
19785 unsafe extern "system" fn create_ios_surface_mvk(
19786 _instance: crate::vk::Instance,
19787 _p_create_info: *const IOSSurfaceCreateInfoMVK<'_>,
19788 _p_allocator: *const AllocationCallbacks<'_>,
19789 _p_surface: *mut SurfaceKHR,
19790 ) -> Result {
19791 panic!(concat!(
19792 "Unable to load ",
19793 stringify!(create_ios_surface_mvk)
19794 ))
19795 }
19796 let val = _f(c"vkCreateIOSSurfaceMVK");
19797 if val.is_null() {
19798 create_ios_surface_mvk
19799 } else {
19800 ::core::mem::transmute::<*const c_void, PFN_vkCreateIOSSurfaceMVK>(val)
19801 }
19802 },
19803 }
19804 }
19805 }
19806 }
19807 #[doc = "VK_MVK_macos_surface"]
19808 pub mod macos_surface {
19809 use super::super::*;
19810 pub use {
19811 crate::vk::MVK_MACOS_SURFACE_EXTENSION_NAME as NAME,
19812 crate::vk::MVK_MACOS_SURFACE_SPEC_VERSION as SPEC_VERSION,
19813 };
19814 #[doc = "VK_MVK_macos_surface instance-level functions"]
19815 #[derive(Clone)]
19816 pub struct Instance {
19817 pub(crate) fp: InstanceFn,
19818 pub(crate) handle: crate::vk::Instance,
19819 }
19820 impl Instance {
19821 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
19822 let handle = instance.handle();
19823 let fp = InstanceFn::load(|name| unsafe {
19824 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19825 entry.get_instance_proc_addr(handle, name.as_ptr()),
19826 )
19827 });
19828 Self { handle, fp }
19829 }
19830 #[inline]
19831 pub fn fp(&self) -> &InstanceFn {
19832 &self.fp
19833 }
19834 #[inline]
19835 pub fn instance(&self) -> crate::vk::Instance {
19836 self.handle
19837 }
19838 }
19839 #[derive(Clone)]
19840 #[doc = "Raw VK_MVK_macos_surface instance-level function pointers"]
19841 pub struct InstanceFn {
19842 pub create_mac_os_surface_mvk: PFN_vkCreateMacOSSurfaceMVK,
19843 }
19844 unsafe impl Send for InstanceFn {}
19845 unsafe impl Sync for InstanceFn {}
19846 impl InstanceFn {
19847 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19848 Self::load_erased(&mut f)
19849 }
19850 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19851 Self {
19852 create_mac_os_surface_mvk: unsafe {
19853 unsafe extern "system" fn create_mac_os_surface_mvk(
19854 _instance: crate::vk::Instance,
19855 _p_create_info: *const MacOSSurfaceCreateInfoMVK<'_>,
19856 _p_allocator: *const AllocationCallbacks<'_>,
19857 _p_surface: *mut SurfaceKHR,
19858 ) -> Result {
19859 panic!(concat!(
19860 "Unable to load ",
19861 stringify!(create_mac_os_surface_mvk)
19862 ))
19863 }
19864 let val = _f(c"vkCreateMacOSSurfaceMVK");
19865 if val.is_null() {
19866 create_mac_os_surface_mvk
19867 } else {
19868 ::core::mem::transmute::<*const c_void, PFN_vkCreateMacOSSurfaceMVK>(
19869 val,
19870 )
19871 }
19872 },
19873 }
19874 }
19875 }
19876 }
19877}
19878#[doc = "Extensions tagged NN"]
19879pub mod nn {
19880 #[doc = "VK_NN_vi_surface"]
19881 pub mod vi_surface {
19882 use super::super::*;
19883 pub use {
19884 crate::vk::NN_VI_SURFACE_EXTENSION_NAME as NAME,
19885 crate::vk::NN_VI_SURFACE_SPEC_VERSION as SPEC_VERSION,
19886 };
19887 #[doc = "VK_NN_vi_surface instance-level functions"]
19888 #[derive(Clone)]
19889 pub struct Instance {
19890 pub(crate) fp: InstanceFn,
19891 pub(crate) handle: crate::vk::Instance,
19892 }
19893 impl Instance {
19894 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
19895 let handle = instance.handle();
19896 let fp = InstanceFn::load(|name| unsafe {
19897 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19898 entry.get_instance_proc_addr(handle, name.as_ptr()),
19899 )
19900 });
19901 Self { handle, fp }
19902 }
19903 #[inline]
19904 pub fn fp(&self) -> &InstanceFn {
19905 &self.fp
19906 }
19907 #[inline]
19908 pub fn instance(&self) -> crate::vk::Instance {
19909 self.handle
19910 }
19911 }
19912 #[derive(Clone)]
19913 #[doc = "Raw VK_NN_vi_surface instance-level function pointers"]
19914 pub struct InstanceFn {
19915 pub create_vi_surface_nn: PFN_vkCreateViSurfaceNN,
19916 }
19917 unsafe impl Send for InstanceFn {}
19918 unsafe impl Sync for InstanceFn {}
19919 impl InstanceFn {
19920 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
19921 Self::load_erased(&mut f)
19922 }
19923 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
19924 Self {
19925 create_vi_surface_nn: unsafe {
19926 unsafe extern "system" fn create_vi_surface_nn(
19927 _instance: crate::vk::Instance,
19928 _p_create_info: *const ViSurfaceCreateInfoNN<'_>,
19929 _p_allocator: *const AllocationCallbacks<'_>,
19930 _p_surface: *mut SurfaceKHR,
19931 ) -> Result {
19932 panic!(concat!("Unable to load ", stringify!(create_vi_surface_nn)))
19933 }
19934 let val = _f(c"vkCreateViSurfaceNN");
19935 if val.is_null() {
19936 create_vi_surface_nn
19937 } else {
19938 ::core::mem::transmute::<*const c_void, PFN_vkCreateViSurfaceNN>(val)
19939 }
19940 },
19941 }
19942 }
19943 }
19944 }
19945}
19946#[doc = "Extensions tagged NV"]
19947pub mod nv {
19948 #[doc = "VK_NV_glsl_shader"]
19949 pub mod glsl_shader {
19950 use super::super::*;
19951 pub use {
19952 crate::vk::NV_GLSL_SHADER_EXTENSION_NAME as NAME,
19953 crate::vk::NV_GLSL_SHADER_SPEC_VERSION as SPEC_VERSION,
19954 };
19955 }
19956 #[doc = "VK_NV_dedicated_allocation"]
19957 pub mod dedicated_allocation {
19958 use super::super::*;
19959 pub use {
19960 crate::vk::NV_DEDICATED_ALLOCATION_EXTENSION_NAME as NAME,
19961 crate::vk::NV_DEDICATED_ALLOCATION_SPEC_VERSION as SPEC_VERSION,
19962 };
19963 }
19964 #[doc = "VK_NV_corner_sampled_image"]
19965 pub mod corner_sampled_image {
19966 use super::super::*;
19967 pub use {
19968 crate::vk::NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME as NAME,
19969 crate::vk::NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION as SPEC_VERSION,
19970 };
19971 }
19972 #[doc = "VK_NV_external_memory_capabilities"]
19973 pub mod external_memory_capabilities {
19974 use super::super::*;
19975 pub use {
19976 crate::vk::NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME as NAME,
19977 crate::vk::NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION as SPEC_VERSION,
19978 };
19979 #[doc = "VK_NV_external_memory_capabilities instance-level functions"]
19980 #[derive(Clone)]
19981 pub struct Instance {
19982 pub(crate) fp: InstanceFn,
19983 pub(crate) handle: crate::vk::Instance,
19984 }
19985 impl Instance {
19986 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
19987 let handle = instance.handle();
19988 let fp = InstanceFn::load(|name| unsafe {
19989 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
19990 entry.get_instance_proc_addr(handle, name.as_ptr()),
19991 )
19992 });
19993 Self { handle, fp }
19994 }
19995 #[inline]
19996 pub fn fp(&self) -> &InstanceFn {
19997 &self.fp
19998 }
19999 #[inline]
20000 pub fn instance(&self) -> crate::vk::Instance {
20001 self.handle
20002 }
20003 }
20004 #[derive(Clone)]
20005 #[doc = "Raw VK_NV_external_memory_capabilities instance-level function pointers"]
20006 pub struct InstanceFn {
20007 pub get_physical_device_external_image_format_properties_nv:
20008 PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV,
20009 }
20010 unsafe impl Send for InstanceFn {}
20011 unsafe impl Sync for InstanceFn {}
20012 impl InstanceFn {
20013 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
20014 Self::load_erased(&mut f)
20015 }
20016 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
20017 Self {
20018 get_physical_device_external_image_format_properties_nv: unsafe {
20019 unsafe extern "system" fn get_physical_device_external_image_format_properties_nv(
20020 _physical_device: PhysicalDevice,
20021 _format: Format,
20022 _ty: ImageType,
20023 _tiling: ImageTiling,
20024 _usage: ImageUsageFlags,
20025 _flags: ImageCreateFlags,
20026 _external_handle_type: ExternalMemoryHandleTypeFlagsNV,
20027 _p_external_image_format_properties : * mut ExternalImageFormatPropertiesNV,
20028 ) -> Result {
20029 panic!(concat!(
20030 "Unable to load ",
20031 stringify!(get_physical_device_external_image_format_properties_nv)
20032 ))
20033 }
20034 let val = _f(c"vkGetPhysicalDeviceExternalImageFormatPropertiesNV");
20035 if val.is_null() {
20036 get_physical_device_external_image_format_properties_nv
20037 } else {
20038 ::core::mem::transmute::<
20039 *const c_void,
20040 PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV,
20041 >(val)
20042 }
20043 },
20044 }
20045 }
20046 }
20047 }
20048 #[doc = "VK_NV_external_memory"]
20049 pub mod external_memory {
20050 use super::super::*;
20051 pub use {
20052 crate::vk::NV_EXTERNAL_MEMORY_EXTENSION_NAME as NAME,
20053 crate::vk::NV_EXTERNAL_MEMORY_SPEC_VERSION as SPEC_VERSION,
20054 };
20055 }
20056 #[doc = "VK_NV_external_memory_win32"]
20057 pub mod external_memory_win32 {
20058 use super::super::*;
20059 pub use {
20060 crate::vk::NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME as NAME,
20061 crate::vk::NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION as SPEC_VERSION,
20062 };
20063 #[doc = "VK_NV_external_memory_win32 device-level functions"]
20064 #[derive(Clone)]
20065 pub struct Device {
20066 pub(crate) fp: DeviceFn,
20067 pub(crate) handle: crate::vk::Device,
20068 }
20069 impl Device {
20070 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
20071 let handle = device.handle();
20072 let fp = DeviceFn::load(|name| unsafe {
20073 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
20074 instance.get_device_proc_addr(handle, name.as_ptr()),
20075 )
20076 });
20077 Self { handle, fp }
20078 }
20079 #[inline]
20080 pub fn fp(&self) -> &DeviceFn {
20081 &self.fp
20082 }
20083 #[inline]
20084 pub fn device(&self) -> crate::vk::Device {
20085 self.handle
20086 }
20087 }
20088 #[derive(Clone)]
20089 #[doc = "Raw VK_NV_external_memory_win32 device-level function pointers"]
20090 pub struct DeviceFn {
20091 pub get_memory_win32_handle_nv: PFN_vkGetMemoryWin32HandleNV,
20092 }
20093 unsafe impl Send for DeviceFn {}
20094 unsafe impl Sync for DeviceFn {}
20095 impl DeviceFn {
20096 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
20097 Self::load_erased(&mut f)
20098 }
20099 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
20100 Self {
20101 get_memory_win32_handle_nv: unsafe {
20102 unsafe extern "system" fn get_memory_win32_handle_nv(
20103 _device: crate::vk::Device,
20104 _memory: DeviceMemory,
20105 _handle_type: ExternalMemoryHandleTypeFlagsNV,
20106 _p_handle: *mut HANDLE,
20107 ) -> Result {
20108 panic!(concat!(
20109 "Unable to load ",
20110 stringify!(get_memory_win32_handle_nv)
20111 ))
20112 }
20113 let val = _f(c"vkGetMemoryWin32HandleNV");
20114 if val.is_null() {
20115 get_memory_win32_handle_nv
20116 } else {
20117 ::core::mem::transmute::<*const c_void, PFN_vkGetMemoryWin32HandleNV>(
20118 val,
20119 )
20120 }
20121 },
20122 }
20123 }
20124 }
20125 }
20126 #[doc = "VK_NV_win32_keyed_mutex"]
20127 pub mod win32_keyed_mutex {
20128 use super::super::*;
20129 pub use {
20130 crate::vk::NV_WIN32_KEYED_MUTEX_EXTENSION_NAME as NAME,
20131 crate::vk::NV_WIN32_KEYED_MUTEX_SPEC_VERSION as SPEC_VERSION,
20132 };
20133 }
20134 #[doc = "VK_NV_clip_space_w_scaling"]
20135 pub mod clip_space_w_scaling {
20136 use super::super::*;
20137 pub use {
20138 crate::vk::NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME as NAME,
20139 crate::vk::NV_CLIP_SPACE_W_SCALING_SPEC_VERSION as SPEC_VERSION,
20140 };
20141 #[doc = "VK_NV_clip_space_w_scaling device-level functions"]
20142 #[derive(Clone)]
20143 pub struct Device {
20144 pub(crate) fp: DeviceFn,
20145 pub(crate) handle: crate::vk::Device,
20146 }
20147 impl Device {
20148 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
20149 let handle = device.handle();
20150 let fp = DeviceFn::load(|name| unsafe {
20151 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
20152 instance.get_device_proc_addr(handle, name.as_ptr()),
20153 )
20154 });
20155 Self { handle, fp }
20156 }
20157 #[inline]
20158 pub fn fp(&self) -> &DeviceFn {
20159 &self.fp
20160 }
20161 #[inline]
20162 pub fn device(&self) -> crate::vk::Device {
20163 self.handle
20164 }
20165 }
20166 #[derive(Clone)]
20167 #[doc = "Raw VK_NV_clip_space_w_scaling device-level function pointers"]
20168 pub struct DeviceFn {
20169 pub cmd_set_viewport_w_scaling_nv: PFN_vkCmdSetViewportWScalingNV,
20170 }
20171 unsafe impl Send for DeviceFn {}
20172 unsafe impl Sync for DeviceFn {}
20173 impl DeviceFn {
20174 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
20175 Self::load_erased(&mut f)
20176 }
20177 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
20178 Self {
20179 cmd_set_viewport_w_scaling_nv: unsafe {
20180 unsafe extern "system" fn cmd_set_viewport_w_scaling_nv(
20181 _command_buffer: CommandBuffer,
20182 _first_viewport: u32,
20183 _viewport_count: u32,
20184 _p_viewport_w_scalings: *const ViewportWScalingNV,
20185 ) {
20186 panic!(concat!(
20187 "Unable to load ",
20188 stringify!(cmd_set_viewport_w_scaling_nv)
20189 ))
20190 }
20191 let val = _f(c"vkCmdSetViewportWScalingNV");
20192 if val.is_null() {
20193 cmd_set_viewport_w_scaling_nv
20194 } else {
20195 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetViewportWScalingNV>(
20196 val,
20197 )
20198 }
20199 },
20200 }
20201 }
20202 }
20203 }
20204 #[doc = "VK_NV_sample_mask_override_coverage"]
20205 pub mod sample_mask_override_coverage {
20206 use super::super::*;
20207 pub use {
20208 crate::vk::NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME as NAME,
20209 crate::vk::NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION as SPEC_VERSION,
20210 };
20211 }
20212 #[doc = "VK_NV_geometry_shader_passthrough"]
20213 pub mod geometry_shader_passthrough {
20214 use super::super::*;
20215 pub use {
20216 crate::vk::NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME as NAME,
20217 crate::vk::NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION as SPEC_VERSION,
20218 };
20219 }
20220 #[doc = "VK_NV_viewport_array2"]
20221 pub mod viewport_array2 {
20222 use super::super::*;
20223 pub use {
20224 crate::vk::NV_VIEWPORT_ARRAY_2_EXTENSION_NAME as NAME,
20225 crate::vk::NV_VIEWPORT_ARRAY_2_SPEC_VERSION as SPEC_VERSION,
20226 };
20227 }
20228 #[doc = "VK_NV_viewport_swizzle"]
20229 pub mod viewport_swizzle {
20230 use super::super::*;
20231 pub use {
20232 crate::vk::NV_VIEWPORT_SWIZZLE_EXTENSION_NAME as NAME,
20233 crate::vk::NV_VIEWPORT_SWIZZLE_SPEC_VERSION as SPEC_VERSION,
20234 };
20235 }
20236 #[doc = "VK_NV_fragment_coverage_to_color"]
20237 pub mod fragment_coverage_to_color {
20238 use super::super::*;
20239 pub use {
20240 crate::vk::NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME as NAME,
20241 crate::vk::NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION as SPEC_VERSION,
20242 };
20243 }
20244 #[doc = "VK_NV_framebuffer_mixed_samples"]
20245 pub mod framebuffer_mixed_samples {
20246 use super::super::*;
20247 pub use {
20248 crate::vk::NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME as NAME,
20249 crate::vk::NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION as SPEC_VERSION,
20250 };
20251 }
20252 #[doc = "VK_NV_fill_rectangle"]
20253 pub mod fill_rectangle {
20254 use super::super::*;
20255 pub use {
20256 crate::vk::NV_FILL_RECTANGLE_EXTENSION_NAME as NAME,
20257 crate::vk::NV_FILL_RECTANGLE_SPEC_VERSION as SPEC_VERSION,
20258 };
20259 }
20260 #[doc = "VK_NV_shader_sm_builtins"]
20261 pub mod shader_sm_builtins {
20262 use super::super::*;
20263 pub use {
20264 crate::vk::NV_SHADER_SM_BUILTINS_EXTENSION_NAME as NAME,
20265 crate::vk::NV_SHADER_SM_BUILTINS_SPEC_VERSION as SPEC_VERSION,
20266 };
20267 }
20268 #[doc = "VK_NV_shading_rate_image"]
20269 pub mod shading_rate_image {
20270 use super::super::*;
20271 pub use {
20272 crate::vk::NV_SHADING_RATE_IMAGE_EXTENSION_NAME as NAME,
20273 crate::vk::NV_SHADING_RATE_IMAGE_SPEC_VERSION as SPEC_VERSION,
20274 };
20275 #[doc = "VK_NV_shading_rate_image device-level functions"]
20276 #[derive(Clone)]
20277 pub struct Device {
20278 pub(crate) fp: DeviceFn,
20279 pub(crate) handle: crate::vk::Device,
20280 }
20281 impl Device {
20282 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
20283 let handle = device.handle();
20284 let fp = DeviceFn::load(|name| unsafe {
20285 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
20286 instance.get_device_proc_addr(handle, name.as_ptr()),
20287 )
20288 });
20289 Self { handle, fp }
20290 }
20291 #[inline]
20292 pub fn fp(&self) -> &DeviceFn {
20293 &self.fp
20294 }
20295 #[inline]
20296 pub fn device(&self) -> crate::vk::Device {
20297 self.handle
20298 }
20299 }
20300 #[derive(Clone)]
20301 #[doc = "Raw VK_NV_shading_rate_image device-level function pointers"]
20302 pub struct DeviceFn {
20303 pub cmd_bind_shading_rate_image_nv: PFN_vkCmdBindShadingRateImageNV,
20304 pub cmd_set_viewport_shading_rate_palette_nv: PFN_vkCmdSetViewportShadingRatePaletteNV,
20305 pub cmd_set_coarse_sample_order_nv: PFN_vkCmdSetCoarseSampleOrderNV,
20306 }
20307 unsafe impl Send for DeviceFn {}
20308 unsafe impl Sync for DeviceFn {}
20309 impl DeviceFn {
20310 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
20311 Self::load_erased(&mut f)
20312 }
20313 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
20314 Self {
20315 cmd_bind_shading_rate_image_nv: unsafe {
20316 unsafe extern "system" fn cmd_bind_shading_rate_image_nv(
20317 _command_buffer: CommandBuffer,
20318 _image_view: ImageView,
20319 _image_layout: ImageLayout,
20320 ) {
20321 panic!(concat!(
20322 "Unable to load ",
20323 stringify!(cmd_bind_shading_rate_image_nv)
20324 ))
20325 }
20326 let val = _f(c"vkCmdBindShadingRateImageNV");
20327 if val.is_null() {
20328 cmd_bind_shading_rate_image_nv
20329 } else {
20330 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindShadingRateImageNV>(
20331 val,
20332 )
20333 }
20334 },
20335 cmd_set_viewport_shading_rate_palette_nv: unsafe {
20336 unsafe extern "system" fn cmd_set_viewport_shading_rate_palette_nv(
20337 _command_buffer: CommandBuffer,
20338 _first_viewport: u32,
20339 _viewport_count: u32,
20340 _p_shading_rate_palettes: *const ShadingRatePaletteNV<'_>,
20341 ) {
20342 panic!(concat!(
20343 "Unable to load ",
20344 stringify!(cmd_set_viewport_shading_rate_palette_nv)
20345 ))
20346 }
20347 let val = _f(c"vkCmdSetViewportShadingRatePaletteNV");
20348 if val.is_null() {
20349 cmd_set_viewport_shading_rate_palette_nv
20350 } else {
20351 ::core::mem::transmute::<
20352 *const c_void,
20353 PFN_vkCmdSetViewportShadingRatePaletteNV,
20354 >(val)
20355 }
20356 },
20357 cmd_set_coarse_sample_order_nv: unsafe {
20358 unsafe extern "system" fn cmd_set_coarse_sample_order_nv(
20359 _command_buffer: CommandBuffer,
20360 _sample_order_type: CoarseSampleOrderTypeNV,
20361 _custom_sample_order_count: u32,
20362 _p_custom_sample_orders: *const CoarseSampleOrderCustomNV<'_>,
20363 ) {
20364 panic!(concat!(
20365 "Unable to load ",
20366 stringify!(cmd_set_coarse_sample_order_nv)
20367 ))
20368 }
20369 let val = _f(c"vkCmdSetCoarseSampleOrderNV");
20370 if val.is_null() {
20371 cmd_set_coarse_sample_order_nv
20372 } else {
20373 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetCoarseSampleOrderNV>(
20374 val,
20375 )
20376 }
20377 },
20378 }
20379 }
20380 }
20381 }
20382 #[doc = "VK_NV_ray_tracing"]
20383 pub mod ray_tracing {
20384 use super::super::*;
20385 pub use {
20386 crate::vk::NV_RAY_TRACING_EXTENSION_NAME as NAME,
20387 crate::vk::NV_RAY_TRACING_SPEC_VERSION as SPEC_VERSION,
20388 };
20389 #[doc = "VK_NV_ray_tracing device-level functions"]
20390 #[derive(Clone)]
20391 pub struct Device {
20392 pub(crate) fp: DeviceFn,
20393 pub(crate) handle: crate::vk::Device,
20394 }
20395 impl Device {
20396 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
20397 let handle = device.handle();
20398 let fp = DeviceFn::load(|name| unsafe {
20399 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
20400 instance.get_device_proc_addr(handle, name.as_ptr()),
20401 )
20402 });
20403 Self { handle, fp }
20404 }
20405 #[inline]
20406 pub fn fp(&self) -> &DeviceFn {
20407 &self.fp
20408 }
20409 #[inline]
20410 pub fn device(&self) -> crate::vk::Device {
20411 self.handle
20412 }
20413 }
20414 #[derive(Clone)]
20415 #[doc = "Raw VK_NV_ray_tracing device-level function pointers"]
20416 pub struct DeviceFn {
20417 pub create_acceleration_structure_nv: PFN_vkCreateAccelerationStructureNV,
20418 pub destroy_acceleration_structure_nv: PFN_vkDestroyAccelerationStructureNV,
20419 pub get_acceleration_structure_memory_requirements_nv:
20420 PFN_vkGetAccelerationStructureMemoryRequirementsNV,
20421 pub bind_acceleration_structure_memory_nv: PFN_vkBindAccelerationStructureMemoryNV,
20422 pub cmd_build_acceleration_structure_nv: PFN_vkCmdBuildAccelerationStructureNV,
20423 pub cmd_copy_acceleration_structure_nv: PFN_vkCmdCopyAccelerationStructureNV,
20424 pub cmd_trace_rays_nv: PFN_vkCmdTraceRaysNV,
20425 pub create_ray_tracing_pipelines_nv: PFN_vkCreateRayTracingPipelinesNV,
20426 pub get_ray_tracing_shader_group_handles_nv: PFN_vkGetRayTracingShaderGroupHandlesKHR,
20427 pub get_acceleration_structure_handle_nv: PFN_vkGetAccelerationStructureHandleNV,
20428 pub cmd_write_acceleration_structures_properties_nv:
20429 PFN_vkCmdWriteAccelerationStructuresPropertiesNV,
20430 pub compile_deferred_nv: PFN_vkCompileDeferredNV,
20431 }
20432 unsafe impl Send for DeviceFn {}
20433 unsafe impl Sync for DeviceFn {}
20434 impl DeviceFn {
20435 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
20436 Self::load_erased(&mut f)
20437 }
20438 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
20439 Self {
20440 create_acceleration_structure_nv: unsafe {
20441 unsafe extern "system" fn create_acceleration_structure_nv(
20442 _device: crate::vk::Device,
20443 _p_create_info: *const AccelerationStructureCreateInfoNV<'_>,
20444 _p_allocator: *const AllocationCallbacks<'_>,
20445 _p_acceleration_structure: *mut AccelerationStructureNV,
20446 ) -> Result {
20447 panic!(concat!(
20448 "Unable to load ",
20449 stringify!(create_acceleration_structure_nv)
20450 ))
20451 }
20452 let val = _f(c"vkCreateAccelerationStructureNV");
20453 if val.is_null() {
20454 create_acceleration_structure_nv
20455 } else {
20456 ::core::mem::transmute::<
20457 *const c_void,
20458 PFN_vkCreateAccelerationStructureNV,
20459 >(val)
20460 }
20461 },
20462 destroy_acceleration_structure_nv: unsafe {
20463 unsafe extern "system" fn destroy_acceleration_structure_nv(
20464 _device: crate::vk::Device,
20465 _acceleration_structure: AccelerationStructureNV,
20466 _p_allocator: *const AllocationCallbacks<'_>,
20467 ) {
20468 panic!(concat!(
20469 "Unable to load ",
20470 stringify!(destroy_acceleration_structure_nv)
20471 ))
20472 }
20473 let val = _f(c"vkDestroyAccelerationStructureNV");
20474 if val.is_null() {
20475 destroy_acceleration_structure_nv
20476 } else {
20477 ::core::mem::transmute::<
20478 *const c_void,
20479 PFN_vkDestroyAccelerationStructureNV,
20480 >(val)
20481 }
20482 },
20483 get_acceleration_structure_memory_requirements_nv: unsafe {
20484 unsafe extern "system" fn get_acceleration_structure_memory_requirements_nv(
20485 _device: crate::vk::Device,
20486 _p_info: *const AccelerationStructureMemoryRequirementsInfoNV<'_>,
20487 _p_memory_requirements: *mut MemoryRequirements2KHR<'_>,
20488 ) {
20489 panic!(concat!(
20490 "Unable to load ",
20491 stringify!(get_acceleration_structure_memory_requirements_nv)
20492 ))
20493 }
20494 let val = _f(c"vkGetAccelerationStructureMemoryRequirementsNV");
20495 if val.is_null() {
20496 get_acceleration_structure_memory_requirements_nv
20497 } else {
20498 ::core::mem::transmute::<
20499 *const c_void,
20500 PFN_vkGetAccelerationStructureMemoryRequirementsNV,
20501 >(val)
20502 }
20503 },
20504 bind_acceleration_structure_memory_nv: unsafe {
20505 unsafe extern "system" fn bind_acceleration_structure_memory_nv(
20506 _device: crate::vk::Device,
20507 _bind_info_count: u32,
20508 _p_bind_infos: *const BindAccelerationStructureMemoryInfoNV<'_>,
20509 ) -> Result {
20510 panic!(concat!(
20511 "Unable to load ",
20512 stringify!(bind_acceleration_structure_memory_nv)
20513 ))
20514 }
20515 let val = _f(c"vkBindAccelerationStructureMemoryNV");
20516 if val.is_null() {
20517 bind_acceleration_structure_memory_nv
20518 } else {
20519 ::core::mem::transmute::<
20520 *const c_void,
20521 PFN_vkBindAccelerationStructureMemoryNV,
20522 >(val)
20523 }
20524 },
20525 cmd_build_acceleration_structure_nv: unsafe {
20526 unsafe extern "system" fn cmd_build_acceleration_structure_nv(
20527 _command_buffer: CommandBuffer,
20528 _p_info: *const AccelerationStructureInfoNV<'_>,
20529 _instance_data: Buffer,
20530 _instance_offset: DeviceSize,
20531 _update: Bool32,
20532 _dst: AccelerationStructureNV,
20533 _src: AccelerationStructureNV,
20534 _scratch: Buffer,
20535 _scratch_offset: DeviceSize,
20536 ) {
20537 panic!(concat!(
20538 "Unable to load ",
20539 stringify!(cmd_build_acceleration_structure_nv)
20540 ))
20541 }
20542 let val = _f(c"vkCmdBuildAccelerationStructureNV");
20543 if val.is_null() {
20544 cmd_build_acceleration_structure_nv
20545 } else {
20546 ::core::mem::transmute::<
20547 *const c_void,
20548 PFN_vkCmdBuildAccelerationStructureNV,
20549 >(val)
20550 }
20551 },
20552 cmd_copy_acceleration_structure_nv: unsafe {
20553 unsafe extern "system" fn cmd_copy_acceleration_structure_nv(
20554 _command_buffer: CommandBuffer,
20555 _dst: AccelerationStructureNV,
20556 _src: AccelerationStructureNV,
20557 _mode: CopyAccelerationStructureModeKHR,
20558 ) {
20559 panic!(concat!(
20560 "Unable to load ",
20561 stringify!(cmd_copy_acceleration_structure_nv)
20562 ))
20563 }
20564 let val = _f(c"vkCmdCopyAccelerationStructureNV");
20565 if val.is_null() {
20566 cmd_copy_acceleration_structure_nv
20567 } else {
20568 ::core::mem::transmute::<
20569 *const c_void,
20570 PFN_vkCmdCopyAccelerationStructureNV,
20571 >(val)
20572 }
20573 },
20574 cmd_trace_rays_nv: unsafe {
20575 unsafe extern "system" fn cmd_trace_rays_nv(
20576 _command_buffer: CommandBuffer,
20577 _raygen_shader_binding_table_buffer: Buffer,
20578 _raygen_shader_binding_offset: DeviceSize,
20579 _miss_shader_binding_table_buffer: Buffer,
20580 _miss_shader_binding_offset: DeviceSize,
20581 _miss_shader_binding_stride: DeviceSize,
20582 _hit_shader_binding_table_buffer: Buffer,
20583 _hit_shader_binding_offset: DeviceSize,
20584 _hit_shader_binding_stride: DeviceSize,
20585 _callable_shader_binding_table_buffer: Buffer,
20586 _callable_shader_binding_offset: DeviceSize,
20587 _callable_shader_binding_stride: DeviceSize,
20588 _width: u32,
20589 _height: u32,
20590 _depth: u32,
20591 ) {
20592 panic!(concat!("Unable to load ", stringify!(cmd_trace_rays_nv)))
20593 }
20594 let val = _f(c"vkCmdTraceRaysNV");
20595 if val.is_null() {
20596 cmd_trace_rays_nv
20597 } else {
20598 ::core::mem::transmute::<*const c_void, PFN_vkCmdTraceRaysNV>(val)
20599 }
20600 },
20601 create_ray_tracing_pipelines_nv: unsafe {
20602 unsafe extern "system" fn create_ray_tracing_pipelines_nv(
20603 _device: crate::vk::Device,
20604 _pipeline_cache: PipelineCache,
20605 _create_info_count: u32,
20606 _p_create_infos: *const RayTracingPipelineCreateInfoNV<'_>,
20607 _p_allocator: *const AllocationCallbacks<'_>,
20608 _p_pipelines: *mut Pipeline,
20609 ) -> Result {
20610 panic!(concat!(
20611 "Unable to load ",
20612 stringify!(create_ray_tracing_pipelines_nv)
20613 ))
20614 }
20615 let val = _f(c"vkCreateRayTracingPipelinesNV");
20616 if val.is_null() {
20617 create_ray_tracing_pipelines_nv
20618 } else {
20619 ::core::mem::transmute::<*const c_void, PFN_vkCreateRayTracingPipelinesNV>(
20620 val,
20621 )
20622 }
20623 },
20624 get_ray_tracing_shader_group_handles_nv: unsafe {
20625 unsafe extern "system" fn get_ray_tracing_shader_group_handles_nv(
20626 _device: crate::vk::Device,
20627 _pipeline: Pipeline,
20628 _first_group: u32,
20629 _group_count: u32,
20630 _data_size: usize,
20631 _p_data: *mut c_void,
20632 ) -> Result {
20633 panic!(concat!(
20634 "Unable to load ",
20635 stringify!(get_ray_tracing_shader_group_handles_nv)
20636 ))
20637 }
20638 let val = _f(c"vkGetRayTracingShaderGroupHandlesNV");
20639 if val.is_null() {
20640 get_ray_tracing_shader_group_handles_nv
20641 } else {
20642 ::core::mem::transmute::<
20643 *const c_void,
20644 PFN_vkGetRayTracingShaderGroupHandlesKHR,
20645 >(val)
20646 }
20647 },
20648 get_acceleration_structure_handle_nv: unsafe {
20649 unsafe extern "system" fn get_acceleration_structure_handle_nv(
20650 _device: crate::vk::Device,
20651 _acceleration_structure: AccelerationStructureNV,
20652 _data_size: usize,
20653 _p_data: *mut c_void,
20654 ) -> Result {
20655 panic!(concat!(
20656 "Unable to load ",
20657 stringify!(get_acceleration_structure_handle_nv)
20658 ))
20659 }
20660 let val = _f(c"vkGetAccelerationStructureHandleNV");
20661 if val.is_null() {
20662 get_acceleration_structure_handle_nv
20663 } else {
20664 ::core::mem::transmute::<
20665 *const c_void,
20666 PFN_vkGetAccelerationStructureHandleNV,
20667 >(val)
20668 }
20669 },
20670 cmd_write_acceleration_structures_properties_nv: unsafe {
20671 unsafe extern "system" fn cmd_write_acceleration_structures_properties_nv(
20672 _command_buffer: CommandBuffer,
20673 _acceleration_structure_count: u32,
20674 _p_acceleration_structures: *const AccelerationStructureNV,
20675 _query_type: QueryType,
20676 _query_pool: QueryPool,
20677 _first_query: u32,
20678 ) {
20679 panic!(concat!(
20680 "Unable to load ",
20681 stringify!(cmd_write_acceleration_structures_properties_nv)
20682 ))
20683 }
20684 let val = _f(c"vkCmdWriteAccelerationStructuresPropertiesNV");
20685 if val.is_null() {
20686 cmd_write_acceleration_structures_properties_nv
20687 } else {
20688 ::core::mem::transmute::<
20689 *const c_void,
20690 PFN_vkCmdWriteAccelerationStructuresPropertiesNV,
20691 >(val)
20692 }
20693 },
20694 compile_deferred_nv: unsafe {
20695 unsafe extern "system" fn compile_deferred_nv(
20696 _device: crate::vk::Device,
20697 _pipeline: Pipeline,
20698 _shader: u32,
20699 ) -> Result {
20700 panic!(concat!("Unable to load ", stringify!(compile_deferred_nv)))
20701 }
20702 let val = _f(c"vkCompileDeferredNV");
20703 if val.is_null() {
20704 compile_deferred_nv
20705 } else {
20706 ::core::mem::transmute::<*const c_void, PFN_vkCompileDeferredNV>(val)
20707 }
20708 },
20709 }
20710 }
20711 }
20712 }
20713 #[doc = "VK_NV_representative_fragment_test"]
20714 pub mod representative_fragment_test {
20715 use super::super::*;
20716 pub use {
20717 crate::vk::NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME as NAME,
20718 crate::vk::NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION as SPEC_VERSION,
20719 };
20720 }
20721 #[doc = "VK_NV_shader_subgroup_partitioned"]
20722 pub mod shader_subgroup_partitioned {
20723 use super::super::*;
20724 pub use {
20725 crate::vk::NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME as NAME,
20726 crate::vk::NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION as SPEC_VERSION,
20727 };
20728 }
20729 #[doc = "VK_NV_compute_shader_derivatives"]
20730 pub mod compute_shader_derivatives {
20731 use super::super::*;
20732 pub use {
20733 crate::vk::NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME as NAME,
20734 crate::vk::NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION as SPEC_VERSION,
20735 };
20736 }
20737 #[doc = "VK_NV_mesh_shader"]
20738 pub mod mesh_shader {
20739 use super::super::*;
20740 pub use {
20741 crate::vk::NV_MESH_SHADER_EXTENSION_NAME as NAME,
20742 crate::vk::NV_MESH_SHADER_SPEC_VERSION as SPEC_VERSION,
20743 };
20744 #[doc = "VK_NV_mesh_shader device-level functions"]
20745 #[derive(Clone)]
20746 pub struct Device {
20747 pub(crate) fp: DeviceFn,
20748 pub(crate) handle: crate::vk::Device,
20749 }
20750 impl Device {
20751 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
20752 let handle = device.handle();
20753 let fp = DeviceFn::load(|name| unsafe {
20754 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
20755 instance.get_device_proc_addr(handle, name.as_ptr()),
20756 )
20757 });
20758 Self { handle, fp }
20759 }
20760 #[inline]
20761 pub fn fp(&self) -> &DeviceFn {
20762 &self.fp
20763 }
20764 #[inline]
20765 pub fn device(&self) -> crate::vk::Device {
20766 self.handle
20767 }
20768 }
20769 #[derive(Clone)]
20770 #[doc = "Raw VK_NV_mesh_shader device-level function pointers"]
20771 pub struct DeviceFn {
20772 pub cmd_draw_mesh_tasks_nv: PFN_vkCmdDrawMeshTasksNV,
20773 pub cmd_draw_mesh_tasks_indirect_nv: PFN_vkCmdDrawMeshTasksIndirectNV,
20774 pub cmd_draw_mesh_tasks_indirect_count_nv: PFN_vkCmdDrawMeshTasksIndirectCountNV,
20775 }
20776 unsafe impl Send for DeviceFn {}
20777 unsafe impl Sync for DeviceFn {}
20778 impl DeviceFn {
20779 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
20780 Self::load_erased(&mut f)
20781 }
20782 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
20783 Self {
20784 cmd_draw_mesh_tasks_nv: unsafe {
20785 unsafe extern "system" fn cmd_draw_mesh_tasks_nv(
20786 _command_buffer: CommandBuffer,
20787 _task_count: u32,
20788 _first_task: u32,
20789 ) {
20790 panic!(concat!(
20791 "Unable to load ",
20792 stringify!(cmd_draw_mesh_tasks_nv)
20793 ))
20794 }
20795 let val = _f(c"vkCmdDrawMeshTasksNV");
20796 if val.is_null() {
20797 cmd_draw_mesh_tasks_nv
20798 } else {
20799 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawMeshTasksNV>(val)
20800 }
20801 },
20802 cmd_draw_mesh_tasks_indirect_nv: unsafe {
20803 unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_nv(
20804 _command_buffer: CommandBuffer,
20805 _buffer: Buffer,
20806 _offset: DeviceSize,
20807 _draw_count: u32,
20808 _stride: u32,
20809 ) {
20810 panic!(concat!(
20811 "Unable to load ",
20812 stringify!(cmd_draw_mesh_tasks_indirect_nv)
20813 ))
20814 }
20815 let val = _f(c"vkCmdDrawMeshTasksIndirectNV");
20816 if val.is_null() {
20817 cmd_draw_mesh_tasks_indirect_nv
20818 } else {
20819 ::core::mem::transmute::<*const c_void, PFN_vkCmdDrawMeshTasksIndirectNV>(
20820 val,
20821 )
20822 }
20823 },
20824 cmd_draw_mesh_tasks_indirect_count_nv: unsafe {
20825 unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_count_nv(
20826 _command_buffer: CommandBuffer,
20827 _buffer: Buffer,
20828 _offset: DeviceSize,
20829 _count_buffer: Buffer,
20830 _count_buffer_offset: DeviceSize,
20831 _max_draw_count: u32,
20832 _stride: u32,
20833 ) {
20834 panic!(concat!(
20835 "Unable to load ",
20836 stringify!(cmd_draw_mesh_tasks_indirect_count_nv)
20837 ))
20838 }
20839 let val = _f(c"vkCmdDrawMeshTasksIndirectCountNV");
20840 if val.is_null() {
20841 cmd_draw_mesh_tasks_indirect_count_nv
20842 } else {
20843 ::core::mem::transmute::<
20844 *const c_void,
20845 PFN_vkCmdDrawMeshTasksIndirectCountNV,
20846 >(val)
20847 }
20848 },
20849 }
20850 }
20851 }
20852 }
20853 #[doc = "VK_NV_fragment_shader_barycentric"]
20854 pub mod fragment_shader_barycentric {
20855 use super::super::*;
20856 pub use {
20857 crate::vk::NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME as NAME,
20858 crate::vk::NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION as SPEC_VERSION,
20859 };
20860 }
20861 #[doc = "VK_NV_shader_image_footprint"]
20862 pub mod shader_image_footprint {
20863 use super::super::*;
20864 pub use {
20865 crate::vk::NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME as NAME,
20866 crate::vk::NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION as SPEC_VERSION,
20867 };
20868 }
20869 #[doc = "VK_NV_scissor_exclusive"]
20870 pub mod scissor_exclusive {
20871 use super::super::*;
20872 pub use {
20873 crate::vk::NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME as NAME,
20874 crate::vk::NV_SCISSOR_EXCLUSIVE_SPEC_VERSION as SPEC_VERSION,
20875 };
20876 #[doc = "VK_NV_scissor_exclusive device-level functions"]
20877 #[derive(Clone)]
20878 pub struct Device {
20879 pub(crate) fp: DeviceFn,
20880 pub(crate) handle: crate::vk::Device,
20881 }
20882 impl Device {
20883 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
20884 let handle = device.handle();
20885 let fp = DeviceFn::load(|name| unsafe {
20886 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
20887 instance.get_device_proc_addr(handle, name.as_ptr()),
20888 )
20889 });
20890 Self { handle, fp }
20891 }
20892 #[inline]
20893 pub fn fp(&self) -> &DeviceFn {
20894 &self.fp
20895 }
20896 #[inline]
20897 pub fn device(&self) -> crate::vk::Device {
20898 self.handle
20899 }
20900 }
20901 #[derive(Clone)]
20902 #[doc = "Raw VK_NV_scissor_exclusive device-level function pointers"]
20903 pub struct DeviceFn {
20904 pub cmd_set_exclusive_scissor_enable_nv: PFN_vkCmdSetExclusiveScissorEnableNV,
20905 pub cmd_set_exclusive_scissor_nv: PFN_vkCmdSetExclusiveScissorNV,
20906 }
20907 unsafe impl Send for DeviceFn {}
20908 unsafe impl Sync for DeviceFn {}
20909 impl DeviceFn {
20910 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
20911 Self::load_erased(&mut f)
20912 }
20913 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
20914 Self {
20915 cmd_set_exclusive_scissor_enable_nv: unsafe {
20916 unsafe extern "system" fn cmd_set_exclusive_scissor_enable_nv(
20917 _command_buffer: CommandBuffer,
20918 _first_exclusive_scissor: u32,
20919 _exclusive_scissor_count: u32,
20920 _p_exclusive_scissor_enables: *const Bool32,
20921 ) {
20922 panic!(concat!(
20923 "Unable to load ",
20924 stringify!(cmd_set_exclusive_scissor_enable_nv)
20925 ))
20926 }
20927 let val = _f(c"vkCmdSetExclusiveScissorEnableNV");
20928 if val.is_null() {
20929 cmd_set_exclusive_scissor_enable_nv
20930 } else {
20931 ::core::mem::transmute::<
20932 *const c_void,
20933 PFN_vkCmdSetExclusiveScissorEnableNV,
20934 >(val)
20935 }
20936 },
20937 cmd_set_exclusive_scissor_nv: unsafe {
20938 unsafe extern "system" fn cmd_set_exclusive_scissor_nv(
20939 _command_buffer: CommandBuffer,
20940 _first_exclusive_scissor: u32,
20941 _exclusive_scissor_count: u32,
20942 _p_exclusive_scissors: *const Rect2D,
20943 ) {
20944 panic!(concat!(
20945 "Unable to load ",
20946 stringify!(cmd_set_exclusive_scissor_nv)
20947 ))
20948 }
20949 let val = _f(c"vkCmdSetExclusiveScissorNV");
20950 if val.is_null() {
20951 cmd_set_exclusive_scissor_nv
20952 } else {
20953 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetExclusiveScissorNV>(
20954 val,
20955 )
20956 }
20957 },
20958 }
20959 }
20960 }
20961 }
20962 #[doc = "VK_NV_device_diagnostic_checkpoints"]
20963 pub mod device_diagnostic_checkpoints {
20964 use super::super::*;
20965 pub use {
20966 crate::vk::NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME as NAME,
20967 crate::vk::NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION as SPEC_VERSION,
20968 };
20969 #[doc = "VK_NV_device_diagnostic_checkpoints device-level functions"]
20970 #[derive(Clone)]
20971 pub struct Device {
20972 pub(crate) fp: DeviceFn,
20973 pub(crate) handle: crate::vk::Device,
20974 }
20975 impl Device {
20976 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
20977 let handle = device.handle();
20978 let fp = DeviceFn::load(|name| unsafe {
20979 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
20980 instance.get_device_proc_addr(handle, name.as_ptr()),
20981 )
20982 });
20983 Self { handle, fp }
20984 }
20985 #[inline]
20986 pub fn fp(&self) -> &DeviceFn {
20987 &self.fp
20988 }
20989 #[inline]
20990 pub fn device(&self) -> crate::vk::Device {
20991 self.handle
20992 }
20993 }
20994 #[derive(Clone)]
20995 #[doc = "Raw VK_NV_device_diagnostic_checkpoints device-level function pointers"]
20996 pub struct DeviceFn {
20997 pub cmd_set_checkpoint_nv: PFN_vkCmdSetCheckpointNV,
20998 pub get_queue_checkpoint_data_nv: PFN_vkGetQueueCheckpointDataNV,
20999 pub get_queue_checkpoint_data2_nv: PFN_vkGetQueueCheckpointData2NV,
21000 }
21001 unsafe impl Send for DeviceFn {}
21002 unsafe impl Sync for DeviceFn {}
21003 impl DeviceFn {
21004 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21005 Self::load_erased(&mut f)
21006 }
21007 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21008 Self {
21009 cmd_set_checkpoint_nv: unsafe {
21010 unsafe extern "system" fn cmd_set_checkpoint_nv(
21011 _command_buffer: CommandBuffer,
21012 _p_checkpoint_marker: *const c_void,
21013 ) {
21014 panic!(concat!(
21015 "Unable to load ",
21016 stringify!(cmd_set_checkpoint_nv)
21017 ))
21018 }
21019 let val = _f(c"vkCmdSetCheckpointNV");
21020 if val.is_null() {
21021 cmd_set_checkpoint_nv
21022 } else {
21023 ::core::mem::transmute::<*const c_void, PFN_vkCmdSetCheckpointNV>(val)
21024 }
21025 },
21026 get_queue_checkpoint_data_nv: unsafe {
21027 unsafe extern "system" fn get_queue_checkpoint_data_nv(
21028 _queue: Queue,
21029 _p_checkpoint_data_count: *mut u32,
21030 _p_checkpoint_data: *mut CheckpointDataNV<'_>,
21031 ) {
21032 panic!(concat!(
21033 "Unable to load ",
21034 stringify!(get_queue_checkpoint_data_nv)
21035 ))
21036 }
21037 let val = _f(c"vkGetQueueCheckpointDataNV");
21038 if val.is_null() {
21039 get_queue_checkpoint_data_nv
21040 } else {
21041 ::core::mem::transmute::<*const c_void, PFN_vkGetQueueCheckpointDataNV>(
21042 val,
21043 )
21044 }
21045 },
21046 get_queue_checkpoint_data2_nv: unsafe {
21047 unsafe extern "system" fn get_queue_checkpoint_data2_nv(
21048 _queue: Queue,
21049 _p_checkpoint_data_count: *mut u32,
21050 _p_checkpoint_data: *mut CheckpointData2NV<'_>,
21051 ) {
21052 panic!(concat!(
21053 "Unable to load ",
21054 stringify!(get_queue_checkpoint_data2_nv)
21055 ))
21056 }
21057 let val = _f(c"vkGetQueueCheckpointData2NV");
21058 if val.is_null() {
21059 get_queue_checkpoint_data2_nv
21060 } else {
21061 ::core::mem::transmute::<*const c_void, PFN_vkGetQueueCheckpointData2NV>(
21062 val,
21063 )
21064 }
21065 },
21066 }
21067 }
21068 }
21069 }
21070 #[doc = "VK_NV_dedicated_allocation_image_aliasing"]
21071 pub mod dedicated_allocation_image_aliasing {
21072 use super::super::*;
21073 pub use {
21074 crate::vk::NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME as NAME,
21075 crate::vk::NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION as SPEC_VERSION,
21076 };
21077 }
21078 #[doc = "VK_NV_cooperative_matrix"]
21079 pub mod cooperative_matrix {
21080 use super::super::*;
21081 pub use {
21082 crate::vk::NV_COOPERATIVE_MATRIX_EXTENSION_NAME as NAME,
21083 crate::vk::NV_COOPERATIVE_MATRIX_SPEC_VERSION as SPEC_VERSION,
21084 };
21085 #[doc = "VK_NV_cooperative_matrix instance-level functions"]
21086 #[derive(Clone)]
21087 pub struct Instance {
21088 pub(crate) fp: InstanceFn,
21089 pub(crate) handle: crate::vk::Instance,
21090 }
21091 impl Instance {
21092 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
21093 let handle = instance.handle();
21094 let fp = InstanceFn::load(|name| unsafe {
21095 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21096 entry.get_instance_proc_addr(handle, name.as_ptr()),
21097 )
21098 });
21099 Self { handle, fp }
21100 }
21101 #[inline]
21102 pub fn fp(&self) -> &InstanceFn {
21103 &self.fp
21104 }
21105 #[inline]
21106 pub fn instance(&self) -> crate::vk::Instance {
21107 self.handle
21108 }
21109 }
21110 #[derive(Clone)]
21111 #[doc = "Raw VK_NV_cooperative_matrix instance-level function pointers"]
21112 pub struct InstanceFn {
21113 pub get_physical_device_cooperative_matrix_properties_nv:
21114 PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV,
21115 }
21116 unsafe impl Send for InstanceFn {}
21117 unsafe impl Sync for InstanceFn {}
21118 impl InstanceFn {
21119 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21120 Self::load_erased(&mut f)
21121 }
21122 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21123 Self {
21124 get_physical_device_cooperative_matrix_properties_nv: unsafe {
21125 unsafe extern "system" fn get_physical_device_cooperative_matrix_properties_nv(
21126 _physical_device: PhysicalDevice,
21127 _p_property_count: *mut u32,
21128 _p_properties: *mut CooperativeMatrixPropertiesNV<'_>,
21129 ) -> Result {
21130 panic!(concat!(
21131 "Unable to load ",
21132 stringify!(get_physical_device_cooperative_matrix_properties_nv)
21133 ))
21134 }
21135 let val = _f(c"vkGetPhysicalDeviceCooperativeMatrixPropertiesNV");
21136 if val.is_null() {
21137 get_physical_device_cooperative_matrix_properties_nv
21138 } else {
21139 ::core::mem::transmute::<
21140 *const c_void,
21141 PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV,
21142 >(val)
21143 }
21144 },
21145 }
21146 }
21147 }
21148 }
21149 #[doc = "VK_NV_coverage_reduction_mode"]
21150 pub mod coverage_reduction_mode {
21151 use super::super::*;
21152 pub use {
21153 crate::vk::NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME as NAME,
21154 crate::vk::NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION as SPEC_VERSION,
21155 };
21156 #[doc = "VK_NV_coverage_reduction_mode instance-level functions"]
21157 #[derive(Clone)]
21158 pub struct Instance {
21159 pub(crate) fp: InstanceFn,
21160 pub(crate) handle: crate::vk::Instance,
21161 }
21162 impl Instance {
21163 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
21164 let handle = instance.handle();
21165 let fp = InstanceFn::load(|name| unsafe {
21166 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21167 entry.get_instance_proc_addr(handle, name.as_ptr()),
21168 )
21169 });
21170 Self { handle, fp }
21171 }
21172 #[inline]
21173 pub fn fp(&self) -> &InstanceFn {
21174 &self.fp
21175 }
21176 #[inline]
21177 pub fn instance(&self) -> crate::vk::Instance {
21178 self.handle
21179 }
21180 }
21181 #[derive(Clone)]
21182 #[doc = "Raw VK_NV_coverage_reduction_mode instance-level function pointers"]
21183 pub struct InstanceFn {
21184 pub get_physical_device_supported_framebuffer_mixed_samples_combinations_nv:
21185 PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV,
21186 }
21187 unsafe impl Send for InstanceFn {}
21188 unsafe impl Sync for InstanceFn {}
21189 impl InstanceFn {
21190 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21191 Self::load_erased(&mut f)
21192 }
21193 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21194 Self {
21195 get_physical_device_supported_framebuffer_mixed_samples_combinations_nv: unsafe {
21196 unsafe extern "system" fn get_physical_device_supported_framebuffer_mixed_samples_combinations_nv(
21197 _physical_device: PhysicalDevice,
21198 _p_combination_count: *mut u32,
21199 _p_combinations: *mut FramebufferMixedSamplesCombinationNV<'_>,
21200 ) -> Result {
21201 panic ! (concat ! ("Unable to load " , stringify ! (get_physical_device_supported_framebuffer_mixed_samples_combinations_nv)))
21202 }
21203 let val = _f(
21204 c"vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV",
21205 );
21206 if val.is_null() {
21207 get_physical_device_supported_framebuffer_mixed_samples_combinations_nv
21208 } else {
21209 :: core :: mem :: transmute :: < * const c_void , PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV > (val)
21210 }
21211 },
21212 }
21213 }
21214 }
21215 }
21216 #[doc = "VK_NV_device_generated_commands"]
21217 pub mod device_generated_commands {
21218 use super::super::*;
21219 pub use {
21220 crate::vk::NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME as NAME,
21221 crate::vk::NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION as SPEC_VERSION,
21222 };
21223 #[doc = "VK_NV_device_generated_commands device-level functions"]
21224 #[derive(Clone)]
21225 pub struct Device {
21226 pub(crate) fp: DeviceFn,
21227 pub(crate) handle: crate::vk::Device,
21228 }
21229 impl Device {
21230 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
21231 let handle = device.handle();
21232 let fp = DeviceFn::load(|name| unsafe {
21233 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21234 instance.get_device_proc_addr(handle, name.as_ptr()),
21235 )
21236 });
21237 Self { handle, fp }
21238 }
21239 #[inline]
21240 pub fn fp(&self) -> &DeviceFn {
21241 &self.fp
21242 }
21243 #[inline]
21244 pub fn device(&self) -> crate::vk::Device {
21245 self.handle
21246 }
21247 }
21248 #[derive(Clone)]
21249 #[doc = "Raw VK_NV_device_generated_commands device-level function pointers"]
21250 pub struct DeviceFn {
21251 pub get_generated_commands_memory_requirements_nv:
21252 PFN_vkGetGeneratedCommandsMemoryRequirementsNV,
21253 pub cmd_preprocess_generated_commands_nv: PFN_vkCmdPreprocessGeneratedCommandsNV,
21254 pub cmd_execute_generated_commands_nv: PFN_vkCmdExecuteGeneratedCommandsNV,
21255 pub cmd_bind_pipeline_shader_group_nv: PFN_vkCmdBindPipelineShaderGroupNV,
21256 pub create_indirect_commands_layout_nv: PFN_vkCreateIndirectCommandsLayoutNV,
21257 pub destroy_indirect_commands_layout_nv: PFN_vkDestroyIndirectCommandsLayoutNV,
21258 }
21259 unsafe impl Send for DeviceFn {}
21260 unsafe impl Sync for DeviceFn {}
21261 impl DeviceFn {
21262 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21263 Self::load_erased(&mut f)
21264 }
21265 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21266 Self {
21267 get_generated_commands_memory_requirements_nv: unsafe {
21268 unsafe extern "system" fn get_generated_commands_memory_requirements_nv(
21269 _device: crate::vk::Device,
21270 _p_info: *const GeneratedCommandsMemoryRequirementsInfoNV<'_>,
21271 _p_memory_requirements: *mut MemoryRequirements2<'_>,
21272 ) {
21273 panic!(concat!(
21274 "Unable to load ",
21275 stringify!(get_generated_commands_memory_requirements_nv)
21276 ))
21277 }
21278 let val = _f(c"vkGetGeneratedCommandsMemoryRequirementsNV");
21279 if val.is_null() {
21280 get_generated_commands_memory_requirements_nv
21281 } else {
21282 ::core::mem::transmute::<
21283 *const c_void,
21284 PFN_vkGetGeneratedCommandsMemoryRequirementsNV,
21285 >(val)
21286 }
21287 },
21288 cmd_preprocess_generated_commands_nv: unsafe {
21289 unsafe extern "system" fn cmd_preprocess_generated_commands_nv(
21290 _command_buffer: CommandBuffer,
21291 _p_generated_commands_info: *const GeneratedCommandsInfoNV<'_>,
21292 ) {
21293 panic!(concat!(
21294 "Unable to load ",
21295 stringify!(cmd_preprocess_generated_commands_nv)
21296 ))
21297 }
21298 let val = _f(c"vkCmdPreprocessGeneratedCommandsNV");
21299 if val.is_null() {
21300 cmd_preprocess_generated_commands_nv
21301 } else {
21302 ::core::mem::transmute::<
21303 *const c_void,
21304 PFN_vkCmdPreprocessGeneratedCommandsNV,
21305 >(val)
21306 }
21307 },
21308 cmd_execute_generated_commands_nv: unsafe {
21309 unsafe extern "system" fn cmd_execute_generated_commands_nv(
21310 _command_buffer: CommandBuffer,
21311 _is_preprocessed: Bool32,
21312 _p_generated_commands_info: *const GeneratedCommandsInfoNV<'_>,
21313 ) {
21314 panic!(concat!(
21315 "Unable to load ",
21316 stringify!(cmd_execute_generated_commands_nv)
21317 ))
21318 }
21319 let val = _f(c"vkCmdExecuteGeneratedCommandsNV");
21320 if val.is_null() {
21321 cmd_execute_generated_commands_nv
21322 } else {
21323 ::core::mem::transmute::<
21324 *const c_void,
21325 PFN_vkCmdExecuteGeneratedCommandsNV,
21326 >(val)
21327 }
21328 },
21329 cmd_bind_pipeline_shader_group_nv: unsafe {
21330 unsafe extern "system" fn cmd_bind_pipeline_shader_group_nv(
21331 _command_buffer: CommandBuffer,
21332 _pipeline_bind_point: PipelineBindPoint,
21333 _pipeline: Pipeline,
21334 _group_index: u32,
21335 ) {
21336 panic!(concat!(
21337 "Unable to load ",
21338 stringify!(cmd_bind_pipeline_shader_group_nv)
21339 ))
21340 }
21341 let val = _f(c"vkCmdBindPipelineShaderGroupNV");
21342 if val.is_null() {
21343 cmd_bind_pipeline_shader_group_nv
21344 } else {
21345 ::core::mem::transmute::<
21346 *const c_void,
21347 PFN_vkCmdBindPipelineShaderGroupNV,
21348 >(val)
21349 }
21350 },
21351 create_indirect_commands_layout_nv: unsafe {
21352 unsafe extern "system" fn create_indirect_commands_layout_nv(
21353 _device: crate::vk::Device,
21354 _p_create_info: *const IndirectCommandsLayoutCreateInfoNV<'_>,
21355 _p_allocator: *const AllocationCallbacks<'_>,
21356 _p_indirect_commands_layout: *mut IndirectCommandsLayoutNV,
21357 ) -> Result {
21358 panic!(concat!(
21359 "Unable to load ",
21360 stringify!(create_indirect_commands_layout_nv)
21361 ))
21362 }
21363 let val = _f(c"vkCreateIndirectCommandsLayoutNV");
21364 if val.is_null() {
21365 create_indirect_commands_layout_nv
21366 } else {
21367 ::core::mem::transmute::<
21368 *const c_void,
21369 PFN_vkCreateIndirectCommandsLayoutNV,
21370 >(val)
21371 }
21372 },
21373 destroy_indirect_commands_layout_nv: unsafe {
21374 unsafe extern "system" fn destroy_indirect_commands_layout_nv(
21375 _device: crate::vk::Device,
21376 _indirect_commands_layout: IndirectCommandsLayoutNV,
21377 _p_allocator: *const AllocationCallbacks<'_>,
21378 ) {
21379 panic!(concat!(
21380 "Unable to load ",
21381 stringify!(destroy_indirect_commands_layout_nv)
21382 ))
21383 }
21384 let val = _f(c"vkDestroyIndirectCommandsLayoutNV");
21385 if val.is_null() {
21386 destroy_indirect_commands_layout_nv
21387 } else {
21388 ::core::mem::transmute::<
21389 *const c_void,
21390 PFN_vkDestroyIndirectCommandsLayoutNV,
21391 >(val)
21392 }
21393 },
21394 }
21395 }
21396 }
21397 }
21398 #[doc = "VK_NV_inherited_viewport_scissor"]
21399 pub mod inherited_viewport_scissor {
21400 use super::super::*;
21401 pub use {
21402 crate::vk::NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME as NAME,
21403 crate::vk::NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION as SPEC_VERSION,
21404 };
21405 }
21406 #[doc = "VK_NV_present_barrier"]
21407 pub mod present_barrier {
21408 use super::super::*;
21409 pub use {
21410 crate::vk::NV_PRESENT_BARRIER_EXTENSION_NAME as NAME,
21411 crate::vk::NV_PRESENT_BARRIER_SPEC_VERSION as SPEC_VERSION,
21412 };
21413 }
21414 #[doc = "VK_NV_device_diagnostics_config"]
21415 pub mod device_diagnostics_config {
21416 use super::super::*;
21417 pub use {
21418 crate::vk::NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME as NAME,
21419 crate::vk::NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION as SPEC_VERSION,
21420 };
21421 }
21422 #[doc = "VK_NV_cuda_kernel_launch"]
21423 pub mod cuda_kernel_launch {
21424 use super::super::*;
21425 pub use {
21426 crate::vk::NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME as NAME,
21427 crate::vk::NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION as SPEC_VERSION,
21428 };
21429 #[doc = "VK_NV_cuda_kernel_launch device-level functions"]
21430 #[derive(Clone)]
21431 pub struct Device {
21432 pub(crate) fp: DeviceFn,
21433 pub(crate) handle: crate::vk::Device,
21434 }
21435 impl Device {
21436 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
21437 let handle = device.handle();
21438 let fp = DeviceFn::load(|name| unsafe {
21439 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21440 instance.get_device_proc_addr(handle, name.as_ptr()),
21441 )
21442 });
21443 Self { handle, fp }
21444 }
21445 #[inline]
21446 pub fn fp(&self) -> &DeviceFn {
21447 &self.fp
21448 }
21449 #[inline]
21450 pub fn device(&self) -> crate::vk::Device {
21451 self.handle
21452 }
21453 }
21454 #[derive(Clone)]
21455 #[doc = "Raw VK_NV_cuda_kernel_launch device-level function pointers"]
21456 pub struct DeviceFn {
21457 pub create_cuda_module_nv: PFN_vkCreateCudaModuleNV,
21458 pub get_cuda_module_cache_nv: PFN_vkGetCudaModuleCacheNV,
21459 pub create_cuda_function_nv: PFN_vkCreateCudaFunctionNV,
21460 pub destroy_cuda_module_nv: PFN_vkDestroyCudaModuleNV,
21461 pub destroy_cuda_function_nv: PFN_vkDestroyCudaFunctionNV,
21462 pub cmd_cuda_launch_kernel_nv: PFN_vkCmdCudaLaunchKernelNV,
21463 }
21464 unsafe impl Send for DeviceFn {}
21465 unsafe impl Sync for DeviceFn {}
21466 impl DeviceFn {
21467 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21468 Self::load_erased(&mut f)
21469 }
21470 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21471 Self {
21472 create_cuda_module_nv: unsafe {
21473 unsafe extern "system" fn create_cuda_module_nv(
21474 _device: crate::vk::Device,
21475 _p_create_info: *const CudaModuleCreateInfoNV<'_>,
21476 _p_allocator: *const AllocationCallbacks<'_>,
21477 _p_module: *mut CudaModuleNV,
21478 ) -> Result {
21479 panic!(concat!(
21480 "Unable to load ",
21481 stringify!(create_cuda_module_nv)
21482 ))
21483 }
21484 let val = _f(c"vkCreateCudaModuleNV");
21485 if val.is_null() {
21486 create_cuda_module_nv
21487 } else {
21488 ::core::mem::transmute::<*const c_void, PFN_vkCreateCudaModuleNV>(val)
21489 }
21490 },
21491 get_cuda_module_cache_nv: unsafe {
21492 unsafe extern "system" fn get_cuda_module_cache_nv(
21493 _device: crate::vk::Device,
21494 _module: CudaModuleNV,
21495 _p_cache_size: *mut usize,
21496 _p_cache_data: *mut c_void,
21497 ) -> Result {
21498 panic!(concat!(
21499 "Unable to load ",
21500 stringify!(get_cuda_module_cache_nv)
21501 ))
21502 }
21503 let val = _f(c"vkGetCudaModuleCacheNV");
21504 if val.is_null() {
21505 get_cuda_module_cache_nv
21506 } else {
21507 ::core::mem::transmute::<*const c_void, PFN_vkGetCudaModuleCacheNV>(val)
21508 }
21509 },
21510 create_cuda_function_nv: unsafe {
21511 unsafe extern "system" fn create_cuda_function_nv(
21512 _device: crate::vk::Device,
21513 _p_create_info: *const CudaFunctionCreateInfoNV<'_>,
21514 _p_allocator: *const AllocationCallbacks<'_>,
21515 _p_function: *mut CudaFunctionNV,
21516 ) -> Result {
21517 panic!(concat!(
21518 "Unable to load ",
21519 stringify!(create_cuda_function_nv)
21520 ))
21521 }
21522 let val = _f(c"vkCreateCudaFunctionNV");
21523 if val.is_null() {
21524 create_cuda_function_nv
21525 } else {
21526 ::core::mem::transmute::<*const c_void, PFN_vkCreateCudaFunctionNV>(val)
21527 }
21528 },
21529 destroy_cuda_module_nv: unsafe {
21530 unsafe extern "system" fn destroy_cuda_module_nv(
21531 _device: crate::vk::Device,
21532 _module: CudaModuleNV,
21533 _p_allocator: *const AllocationCallbacks<'_>,
21534 ) {
21535 panic!(concat!(
21536 "Unable to load ",
21537 stringify!(destroy_cuda_module_nv)
21538 ))
21539 }
21540 let val = _f(c"vkDestroyCudaModuleNV");
21541 if val.is_null() {
21542 destroy_cuda_module_nv
21543 } else {
21544 ::core::mem::transmute::<*const c_void, PFN_vkDestroyCudaModuleNV>(val)
21545 }
21546 },
21547 destroy_cuda_function_nv: unsafe {
21548 unsafe extern "system" fn destroy_cuda_function_nv(
21549 _device: crate::vk::Device,
21550 _function: CudaFunctionNV,
21551 _p_allocator: *const AllocationCallbacks<'_>,
21552 ) {
21553 panic!(concat!(
21554 "Unable to load ",
21555 stringify!(destroy_cuda_function_nv)
21556 ))
21557 }
21558 let val = _f(c"vkDestroyCudaFunctionNV");
21559 if val.is_null() {
21560 destroy_cuda_function_nv
21561 } else {
21562 ::core::mem::transmute::<*const c_void, PFN_vkDestroyCudaFunctionNV>(
21563 val,
21564 )
21565 }
21566 },
21567 cmd_cuda_launch_kernel_nv: unsafe {
21568 unsafe extern "system" fn cmd_cuda_launch_kernel_nv(
21569 _command_buffer: CommandBuffer,
21570 _p_launch_info: *const CudaLaunchInfoNV<'_>,
21571 ) {
21572 panic!(concat!(
21573 "Unable to load ",
21574 stringify!(cmd_cuda_launch_kernel_nv)
21575 ))
21576 }
21577 let val = _f(c"vkCmdCudaLaunchKernelNV");
21578 if val.is_null() {
21579 cmd_cuda_launch_kernel_nv
21580 } else {
21581 ::core::mem::transmute::<*const c_void, PFN_vkCmdCudaLaunchKernelNV>(
21582 val,
21583 )
21584 }
21585 },
21586 }
21587 }
21588 }
21589 }
21590 #[doc = "VK_NV_low_latency"]
21591 pub mod low_latency {
21592 use super::super::*;
21593 pub use {
21594 crate::vk::NV_LOW_LATENCY_EXTENSION_NAME as NAME,
21595 crate::vk::NV_LOW_LATENCY_SPEC_VERSION as SPEC_VERSION,
21596 };
21597 }
21598 #[doc = "VK_NV_fragment_shading_rate_enums"]
21599 pub mod fragment_shading_rate_enums {
21600 use super::super::*;
21601 pub use {
21602 crate::vk::NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME as NAME,
21603 crate::vk::NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION as SPEC_VERSION,
21604 };
21605 #[doc = "VK_NV_fragment_shading_rate_enums device-level functions"]
21606 #[derive(Clone)]
21607 pub struct Device {
21608 pub(crate) fp: DeviceFn,
21609 pub(crate) handle: crate::vk::Device,
21610 }
21611 impl Device {
21612 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
21613 let handle = device.handle();
21614 let fp = DeviceFn::load(|name| unsafe {
21615 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21616 instance.get_device_proc_addr(handle, name.as_ptr()),
21617 )
21618 });
21619 Self { handle, fp }
21620 }
21621 #[inline]
21622 pub fn fp(&self) -> &DeviceFn {
21623 &self.fp
21624 }
21625 #[inline]
21626 pub fn device(&self) -> crate::vk::Device {
21627 self.handle
21628 }
21629 }
21630 #[derive(Clone)]
21631 #[doc = "Raw VK_NV_fragment_shading_rate_enums device-level function pointers"]
21632 pub struct DeviceFn {
21633 pub cmd_set_fragment_shading_rate_enum_nv: PFN_vkCmdSetFragmentShadingRateEnumNV,
21634 }
21635 unsafe impl Send for DeviceFn {}
21636 unsafe impl Sync for DeviceFn {}
21637 impl DeviceFn {
21638 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21639 Self::load_erased(&mut f)
21640 }
21641 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21642 Self {
21643 cmd_set_fragment_shading_rate_enum_nv: unsafe {
21644 unsafe extern "system" fn cmd_set_fragment_shading_rate_enum_nv(
21645 _command_buffer: CommandBuffer,
21646 _shading_rate: FragmentShadingRateNV,
21647 _combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2usize],
21648 ) {
21649 panic!(concat!(
21650 "Unable to load ",
21651 stringify!(cmd_set_fragment_shading_rate_enum_nv)
21652 ))
21653 }
21654 let val = _f(c"vkCmdSetFragmentShadingRateEnumNV");
21655 if val.is_null() {
21656 cmd_set_fragment_shading_rate_enum_nv
21657 } else {
21658 ::core::mem::transmute::<
21659 *const c_void,
21660 PFN_vkCmdSetFragmentShadingRateEnumNV,
21661 >(val)
21662 }
21663 },
21664 }
21665 }
21666 }
21667 }
21668 #[doc = "VK_NV_ray_tracing_motion_blur"]
21669 pub mod ray_tracing_motion_blur {
21670 use super::super::*;
21671 pub use {
21672 crate::vk::NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME as NAME,
21673 crate::vk::NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION as SPEC_VERSION,
21674 };
21675 }
21676 #[doc = "VK_NV_acquire_winrt_display"]
21677 pub mod acquire_winrt_display {
21678 use super::super::*;
21679 pub use {
21680 crate::vk::NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME as NAME,
21681 crate::vk::NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION as SPEC_VERSION,
21682 };
21683 #[doc = "VK_NV_acquire_winrt_display instance-level functions"]
21684 #[derive(Clone)]
21685 pub struct Instance {
21686 pub(crate) fp: InstanceFn,
21687 pub(crate) handle: crate::vk::Instance,
21688 }
21689 impl Instance {
21690 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
21691 let handle = instance.handle();
21692 let fp = InstanceFn::load(|name| unsafe {
21693 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21694 entry.get_instance_proc_addr(handle, name.as_ptr()),
21695 )
21696 });
21697 Self { handle, fp }
21698 }
21699 #[inline]
21700 pub fn fp(&self) -> &InstanceFn {
21701 &self.fp
21702 }
21703 #[inline]
21704 pub fn instance(&self) -> crate::vk::Instance {
21705 self.handle
21706 }
21707 }
21708 #[derive(Clone)]
21709 #[doc = "Raw VK_NV_acquire_winrt_display instance-level function pointers"]
21710 pub struct InstanceFn {
21711 pub acquire_winrt_display_nv: PFN_vkAcquireWinrtDisplayNV,
21712 pub get_winrt_display_nv: PFN_vkGetWinrtDisplayNV,
21713 }
21714 unsafe impl Send for InstanceFn {}
21715 unsafe impl Sync for InstanceFn {}
21716 impl InstanceFn {
21717 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21718 Self::load_erased(&mut f)
21719 }
21720 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21721 Self {
21722 acquire_winrt_display_nv: unsafe {
21723 unsafe extern "system" fn acquire_winrt_display_nv(
21724 _physical_device: PhysicalDevice,
21725 _display: DisplayKHR,
21726 ) -> Result {
21727 panic!(concat!(
21728 "Unable to load ",
21729 stringify!(acquire_winrt_display_nv)
21730 ))
21731 }
21732 let val = _f(c"vkAcquireWinrtDisplayNV");
21733 if val.is_null() {
21734 acquire_winrt_display_nv
21735 } else {
21736 ::core::mem::transmute::<*const c_void, PFN_vkAcquireWinrtDisplayNV>(
21737 val,
21738 )
21739 }
21740 },
21741 get_winrt_display_nv: unsafe {
21742 unsafe extern "system" fn get_winrt_display_nv(
21743 _physical_device: PhysicalDevice,
21744 _device_relative_id: u32,
21745 _p_display: *mut DisplayKHR,
21746 ) -> Result {
21747 panic!(concat!("Unable to load ", stringify!(get_winrt_display_nv)))
21748 }
21749 let val = _f(c"vkGetWinrtDisplayNV");
21750 if val.is_null() {
21751 get_winrt_display_nv
21752 } else {
21753 ::core::mem::transmute::<*const c_void, PFN_vkGetWinrtDisplayNV>(val)
21754 }
21755 },
21756 }
21757 }
21758 }
21759 }
21760 #[doc = "VK_NV_external_memory_rdma"]
21761 pub mod external_memory_rdma {
21762 use super::super::*;
21763 pub use {
21764 crate::vk::NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME as NAME,
21765 crate::vk::NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION as SPEC_VERSION,
21766 };
21767 #[doc = "VK_NV_external_memory_rdma device-level functions"]
21768 #[derive(Clone)]
21769 pub struct Device {
21770 pub(crate) fp: DeviceFn,
21771 pub(crate) handle: crate::vk::Device,
21772 }
21773 impl Device {
21774 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
21775 let handle = device.handle();
21776 let fp = DeviceFn::load(|name| unsafe {
21777 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21778 instance.get_device_proc_addr(handle, name.as_ptr()),
21779 )
21780 });
21781 Self { handle, fp }
21782 }
21783 #[inline]
21784 pub fn fp(&self) -> &DeviceFn {
21785 &self.fp
21786 }
21787 #[inline]
21788 pub fn device(&self) -> crate::vk::Device {
21789 self.handle
21790 }
21791 }
21792 #[derive(Clone)]
21793 #[doc = "Raw VK_NV_external_memory_rdma device-level function pointers"]
21794 pub struct DeviceFn {
21795 pub get_memory_remote_address_nv: PFN_vkGetMemoryRemoteAddressNV,
21796 }
21797 unsafe impl Send for DeviceFn {}
21798 unsafe impl Sync for DeviceFn {}
21799 impl DeviceFn {
21800 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21801 Self::load_erased(&mut f)
21802 }
21803 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21804 Self {
21805 get_memory_remote_address_nv: unsafe {
21806 unsafe extern "system" fn get_memory_remote_address_nv(
21807 _device: crate::vk::Device,
21808 _p_memory_get_remote_address_info: *const MemoryGetRemoteAddressInfoNV<
21809 '_,
21810 >,
21811 _p_address: *mut RemoteAddressNV,
21812 ) -> Result {
21813 panic!(concat!(
21814 "Unable to load ",
21815 stringify!(get_memory_remote_address_nv)
21816 ))
21817 }
21818 let val = _f(c"vkGetMemoryRemoteAddressNV");
21819 if val.is_null() {
21820 get_memory_remote_address_nv
21821 } else {
21822 ::core::mem::transmute::<*const c_void, PFN_vkGetMemoryRemoteAddressNV>(
21823 val,
21824 )
21825 }
21826 },
21827 }
21828 }
21829 }
21830 }
21831 #[doc = "VK_NV_displacement_micromap"]
21832 pub mod displacement_micromap {
21833 use super::super::*;
21834 pub use {
21835 crate::vk::NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME as NAME,
21836 crate::vk::NV_DISPLACEMENT_MICROMAP_SPEC_VERSION as SPEC_VERSION,
21837 };
21838 }
21839 #[doc = "VK_NV_copy_memory_indirect"]
21840 pub mod copy_memory_indirect {
21841 use super::super::*;
21842 pub use {
21843 crate::vk::NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME as NAME,
21844 crate::vk::NV_COPY_MEMORY_INDIRECT_SPEC_VERSION as SPEC_VERSION,
21845 };
21846 #[doc = "VK_NV_copy_memory_indirect device-level functions"]
21847 #[derive(Clone)]
21848 pub struct Device {
21849 pub(crate) fp: DeviceFn,
21850 pub(crate) handle: crate::vk::Device,
21851 }
21852 impl Device {
21853 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
21854 let handle = device.handle();
21855 let fp = DeviceFn::load(|name| unsafe {
21856 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21857 instance.get_device_proc_addr(handle, name.as_ptr()),
21858 )
21859 });
21860 Self { handle, fp }
21861 }
21862 #[inline]
21863 pub fn fp(&self) -> &DeviceFn {
21864 &self.fp
21865 }
21866 #[inline]
21867 pub fn device(&self) -> crate::vk::Device {
21868 self.handle
21869 }
21870 }
21871 #[derive(Clone)]
21872 #[doc = "Raw VK_NV_copy_memory_indirect device-level function pointers"]
21873 pub struct DeviceFn {
21874 pub cmd_copy_memory_indirect_nv: PFN_vkCmdCopyMemoryIndirectNV,
21875 pub cmd_copy_memory_to_image_indirect_nv: PFN_vkCmdCopyMemoryToImageIndirectNV,
21876 }
21877 unsafe impl Send for DeviceFn {}
21878 unsafe impl Sync for DeviceFn {}
21879 impl DeviceFn {
21880 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21881 Self::load_erased(&mut f)
21882 }
21883 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21884 Self {
21885 cmd_copy_memory_indirect_nv: unsafe {
21886 unsafe extern "system" fn cmd_copy_memory_indirect_nv(
21887 _command_buffer: CommandBuffer,
21888 _copy_buffer_address: DeviceAddress,
21889 _copy_count: u32,
21890 _stride: u32,
21891 ) {
21892 panic!(concat!(
21893 "Unable to load ",
21894 stringify!(cmd_copy_memory_indirect_nv)
21895 ))
21896 }
21897 let val = _f(c"vkCmdCopyMemoryIndirectNV");
21898 if val.is_null() {
21899 cmd_copy_memory_indirect_nv
21900 } else {
21901 ::core::mem::transmute::<*const c_void, PFN_vkCmdCopyMemoryIndirectNV>(
21902 val,
21903 )
21904 }
21905 },
21906 cmd_copy_memory_to_image_indirect_nv: unsafe {
21907 unsafe extern "system" fn cmd_copy_memory_to_image_indirect_nv(
21908 _command_buffer: CommandBuffer,
21909 _copy_buffer_address: DeviceAddress,
21910 _copy_count: u32,
21911 _stride: u32,
21912 _dst_image: Image,
21913 _dst_image_layout: ImageLayout,
21914 _p_image_subresources: *const ImageSubresourceLayers,
21915 ) {
21916 panic!(concat!(
21917 "Unable to load ",
21918 stringify!(cmd_copy_memory_to_image_indirect_nv)
21919 ))
21920 }
21921 let val = _f(c"vkCmdCopyMemoryToImageIndirectNV");
21922 if val.is_null() {
21923 cmd_copy_memory_to_image_indirect_nv
21924 } else {
21925 ::core::mem::transmute::<
21926 *const c_void,
21927 PFN_vkCmdCopyMemoryToImageIndirectNV,
21928 >(val)
21929 }
21930 },
21931 }
21932 }
21933 }
21934 }
21935 #[doc = "VK_NV_memory_decompression"]
21936 pub mod memory_decompression {
21937 use super::super::*;
21938 pub use {
21939 crate::vk::NV_MEMORY_DECOMPRESSION_EXTENSION_NAME as NAME,
21940 crate::vk::NV_MEMORY_DECOMPRESSION_SPEC_VERSION as SPEC_VERSION,
21941 };
21942 #[doc = "VK_NV_memory_decompression device-level functions"]
21943 #[derive(Clone)]
21944 pub struct Device {
21945 pub(crate) fp: DeviceFn,
21946 pub(crate) handle: crate::vk::Device,
21947 }
21948 impl Device {
21949 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
21950 let handle = device.handle();
21951 let fp = DeviceFn::load(|name| unsafe {
21952 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
21953 instance.get_device_proc_addr(handle, name.as_ptr()),
21954 )
21955 });
21956 Self { handle, fp }
21957 }
21958 #[inline]
21959 pub fn fp(&self) -> &DeviceFn {
21960 &self.fp
21961 }
21962 #[inline]
21963 pub fn device(&self) -> crate::vk::Device {
21964 self.handle
21965 }
21966 }
21967 #[derive(Clone)]
21968 #[doc = "Raw VK_NV_memory_decompression device-level function pointers"]
21969 pub struct DeviceFn {
21970 pub cmd_decompress_memory_nv: PFN_vkCmdDecompressMemoryNV,
21971 pub cmd_decompress_memory_indirect_count_nv: PFN_vkCmdDecompressMemoryIndirectCountNV,
21972 }
21973 unsafe impl Send for DeviceFn {}
21974 unsafe impl Sync for DeviceFn {}
21975 impl DeviceFn {
21976 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
21977 Self::load_erased(&mut f)
21978 }
21979 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
21980 Self {
21981 cmd_decompress_memory_nv: unsafe {
21982 unsafe extern "system" fn cmd_decompress_memory_nv(
21983 _command_buffer: CommandBuffer,
21984 _decompress_region_count: u32,
21985 _p_decompress_memory_regions: *const DecompressMemoryRegionNV,
21986 ) {
21987 panic!(concat!(
21988 "Unable to load ",
21989 stringify!(cmd_decompress_memory_nv)
21990 ))
21991 }
21992 let val = _f(c"vkCmdDecompressMemoryNV");
21993 if val.is_null() {
21994 cmd_decompress_memory_nv
21995 } else {
21996 ::core::mem::transmute::<*const c_void, PFN_vkCmdDecompressMemoryNV>(
21997 val,
21998 )
21999 }
22000 },
22001 cmd_decompress_memory_indirect_count_nv: unsafe {
22002 unsafe extern "system" fn cmd_decompress_memory_indirect_count_nv(
22003 _command_buffer: CommandBuffer,
22004 _indirect_commands_address: DeviceAddress,
22005 _indirect_commands_count_address: DeviceAddress,
22006 _stride: u32,
22007 ) {
22008 panic!(concat!(
22009 "Unable to load ",
22010 stringify!(cmd_decompress_memory_indirect_count_nv)
22011 ))
22012 }
22013 let val = _f(c"vkCmdDecompressMemoryIndirectCountNV");
22014 if val.is_null() {
22015 cmd_decompress_memory_indirect_count_nv
22016 } else {
22017 ::core::mem::transmute::<
22018 *const c_void,
22019 PFN_vkCmdDecompressMemoryIndirectCountNV,
22020 >(val)
22021 }
22022 },
22023 }
22024 }
22025 }
22026 }
22027 #[doc = "VK_NV_device_generated_commands_compute"]
22028 pub mod device_generated_commands_compute {
22029 use super::super::*;
22030 pub use {
22031 crate::vk::NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME as NAME,
22032 crate::vk::NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION as SPEC_VERSION,
22033 };
22034 #[doc = "VK_NV_device_generated_commands_compute device-level functions"]
22035 #[derive(Clone)]
22036 pub struct Device {
22037 pub(crate) fp: DeviceFn,
22038 pub(crate) handle: crate::vk::Device,
22039 }
22040 impl Device {
22041 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
22042 let handle = device.handle();
22043 let fp = DeviceFn::load(|name| unsafe {
22044 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22045 instance.get_device_proc_addr(handle, name.as_ptr()),
22046 )
22047 });
22048 Self { handle, fp }
22049 }
22050 #[inline]
22051 pub fn fp(&self) -> &DeviceFn {
22052 &self.fp
22053 }
22054 #[inline]
22055 pub fn device(&self) -> crate::vk::Device {
22056 self.handle
22057 }
22058 }
22059 #[derive(Clone)]
22060 #[doc = "Raw VK_NV_device_generated_commands_compute device-level function pointers"]
22061 pub struct DeviceFn {
22062 pub get_pipeline_indirect_memory_requirements_nv:
22063 PFN_vkGetPipelineIndirectMemoryRequirementsNV,
22064 pub cmd_update_pipeline_indirect_buffer_nv: PFN_vkCmdUpdatePipelineIndirectBufferNV,
22065 pub get_pipeline_indirect_device_address_nv: PFN_vkGetPipelineIndirectDeviceAddressNV,
22066 }
22067 unsafe impl Send for DeviceFn {}
22068 unsafe impl Sync for DeviceFn {}
22069 impl DeviceFn {
22070 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22071 Self::load_erased(&mut f)
22072 }
22073 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22074 Self {
22075 get_pipeline_indirect_memory_requirements_nv: unsafe {
22076 unsafe extern "system" fn get_pipeline_indirect_memory_requirements_nv(
22077 _device: crate::vk::Device,
22078 _p_create_info: *const ComputePipelineCreateInfo<'_>,
22079 _p_memory_requirements: *mut MemoryRequirements2<'_>,
22080 ) {
22081 panic!(concat!(
22082 "Unable to load ",
22083 stringify!(get_pipeline_indirect_memory_requirements_nv)
22084 ))
22085 }
22086 let val = _f(c"vkGetPipelineIndirectMemoryRequirementsNV");
22087 if val.is_null() {
22088 get_pipeline_indirect_memory_requirements_nv
22089 } else {
22090 ::core::mem::transmute::<
22091 *const c_void,
22092 PFN_vkGetPipelineIndirectMemoryRequirementsNV,
22093 >(val)
22094 }
22095 },
22096 cmd_update_pipeline_indirect_buffer_nv: unsafe {
22097 unsafe extern "system" fn cmd_update_pipeline_indirect_buffer_nv(
22098 _command_buffer: CommandBuffer,
22099 _pipeline_bind_point: PipelineBindPoint,
22100 _pipeline: Pipeline,
22101 ) {
22102 panic!(concat!(
22103 "Unable to load ",
22104 stringify!(cmd_update_pipeline_indirect_buffer_nv)
22105 ))
22106 }
22107 let val = _f(c"vkCmdUpdatePipelineIndirectBufferNV");
22108 if val.is_null() {
22109 cmd_update_pipeline_indirect_buffer_nv
22110 } else {
22111 ::core::mem::transmute::<
22112 *const c_void,
22113 PFN_vkCmdUpdatePipelineIndirectBufferNV,
22114 >(val)
22115 }
22116 },
22117 get_pipeline_indirect_device_address_nv: unsafe {
22118 unsafe extern "system" fn get_pipeline_indirect_device_address_nv(
22119 _device: crate::vk::Device,
22120 _p_info: *const PipelineIndirectDeviceAddressInfoNV<'_>,
22121 ) -> DeviceAddress {
22122 panic!(concat!(
22123 "Unable to load ",
22124 stringify!(get_pipeline_indirect_device_address_nv)
22125 ))
22126 }
22127 let val = _f(c"vkGetPipelineIndirectDeviceAddressNV");
22128 if val.is_null() {
22129 get_pipeline_indirect_device_address_nv
22130 } else {
22131 ::core::mem::transmute::<
22132 *const c_void,
22133 PFN_vkGetPipelineIndirectDeviceAddressNV,
22134 >(val)
22135 }
22136 },
22137 }
22138 }
22139 }
22140 }
22141 #[doc = "VK_NV_ray_tracing_linear_swept_spheres"]
22142 pub mod ray_tracing_linear_swept_spheres {
22143 use super::super::*;
22144 pub use {
22145 crate::vk::NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_EXTENSION_NAME as NAME,
22146 crate::vk::NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_SPEC_VERSION as SPEC_VERSION,
22147 };
22148 }
22149 #[doc = "VK_NV_linear_color_attachment"]
22150 pub mod linear_color_attachment {
22151 use super::super::*;
22152 pub use {
22153 crate::vk::NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME as NAME,
22154 crate::vk::NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION as SPEC_VERSION,
22155 };
22156 }
22157 #[doc = "VK_NV_optical_flow"]
22158 pub mod optical_flow {
22159 use super::super::*;
22160 pub use {
22161 crate::vk::NV_OPTICAL_FLOW_EXTENSION_NAME as NAME,
22162 crate::vk::NV_OPTICAL_FLOW_SPEC_VERSION as SPEC_VERSION,
22163 };
22164 #[doc = "VK_NV_optical_flow instance-level functions"]
22165 #[derive(Clone)]
22166 pub struct Instance {
22167 pub(crate) fp: InstanceFn,
22168 pub(crate) handle: crate::vk::Instance,
22169 }
22170 impl Instance {
22171 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
22172 let handle = instance.handle();
22173 let fp = InstanceFn::load(|name| unsafe {
22174 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22175 entry.get_instance_proc_addr(handle, name.as_ptr()),
22176 )
22177 });
22178 Self { handle, fp }
22179 }
22180 #[inline]
22181 pub fn fp(&self) -> &InstanceFn {
22182 &self.fp
22183 }
22184 #[inline]
22185 pub fn instance(&self) -> crate::vk::Instance {
22186 self.handle
22187 }
22188 }
22189 #[derive(Clone)]
22190 #[doc = "Raw VK_NV_optical_flow instance-level function pointers"]
22191 pub struct InstanceFn {
22192 pub get_physical_device_optical_flow_image_formats_nv:
22193 PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV,
22194 }
22195 unsafe impl Send for InstanceFn {}
22196 unsafe impl Sync for InstanceFn {}
22197 impl InstanceFn {
22198 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22199 Self::load_erased(&mut f)
22200 }
22201 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22202 Self {
22203 get_physical_device_optical_flow_image_formats_nv: unsafe {
22204 unsafe extern "system" fn get_physical_device_optical_flow_image_formats_nv(
22205 _physical_device: PhysicalDevice,
22206 _p_optical_flow_image_format_info: *const OpticalFlowImageFormatInfoNV<
22207 '_,
22208 >,
22209 _p_format_count: *mut u32,
22210 _p_image_format_properties: *mut OpticalFlowImageFormatPropertiesNV<'_>,
22211 ) -> Result {
22212 panic!(concat!(
22213 "Unable to load ",
22214 stringify!(get_physical_device_optical_flow_image_formats_nv)
22215 ))
22216 }
22217 let val = _f(c"vkGetPhysicalDeviceOpticalFlowImageFormatsNV");
22218 if val.is_null() {
22219 get_physical_device_optical_flow_image_formats_nv
22220 } else {
22221 ::core::mem::transmute::<
22222 *const c_void,
22223 PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV,
22224 >(val)
22225 }
22226 },
22227 }
22228 }
22229 }
22230 #[doc = "VK_NV_optical_flow device-level functions"]
22231 #[derive(Clone)]
22232 pub struct Device {
22233 pub(crate) fp: DeviceFn,
22234 pub(crate) handle: crate::vk::Device,
22235 }
22236 impl Device {
22237 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
22238 let handle = device.handle();
22239 let fp = DeviceFn::load(|name| unsafe {
22240 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22241 instance.get_device_proc_addr(handle, name.as_ptr()),
22242 )
22243 });
22244 Self { handle, fp }
22245 }
22246 #[inline]
22247 pub fn fp(&self) -> &DeviceFn {
22248 &self.fp
22249 }
22250 #[inline]
22251 pub fn device(&self) -> crate::vk::Device {
22252 self.handle
22253 }
22254 }
22255 #[derive(Clone)]
22256 #[doc = "Raw VK_NV_optical_flow device-level function pointers"]
22257 pub struct DeviceFn {
22258 pub create_optical_flow_session_nv: PFN_vkCreateOpticalFlowSessionNV,
22259 pub destroy_optical_flow_session_nv: PFN_vkDestroyOpticalFlowSessionNV,
22260 pub bind_optical_flow_session_image_nv: PFN_vkBindOpticalFlowSessionImageNV,
22261 pub cmd_optical_flow_execute_nv: PFN_vkCmdOpticalFlowExecuteNV,
22262 }
22263 unsafe impl Send for DeviceFn {}
22264 unsafe impl Sync for DeviceFn {}
22265 impl DeviceFn {
22266 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22267 Self::load_erased(&mut f)
22268 }
22269 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22270 Self {
22271 create_optical_flow_session_nv: unsafe {
22272 unsafe extern "system" fn create_optical_flow_session_nv(
22273 _device: crate::vk::Device,
22274 _p_create_info: *const OpticalFlowSessionCreateInfoNV<'_>,
22275 _p_allocator: *const AllocationCallbacks<'_>,
22276 _p_session: *mut OpticalFlowSessionNV,
22277 ) -> Result {
22278 panic!(concat!(
22279 "Unable to load ",
22280 stringify!(create_optical_flow_session_nv)
22281 ))
22282 }
22283 let val = _f(c"vkCreateOpticalFlowSessionNV");
22284 if val.is_null() {
22285 create_optical_flow_session_nv
22286 } else {
22287 ::core::mem::transmute::<*const c_void, PFN_vkCreateOpticalFlowSessionNV>(
22288 val,
22289 )
22290 }
22291 },
22292 destroy_optical_flow_session_nv: unsafe {
22293 unsafe extern "system" fn destroy_optical_flow_session_nv(
22294 _device: crate::vk::Device,
22295 _session: OpticalFlowSessionNV,
22296 _p_allocator: *const AllocationCallbacks<'_>,
22297 ) {
22298 panic!(concat!(
22299 "Unable to load ",
22300 stringify!(destroy_optical_flow_session_nv)
22301 ))
22302 }
22303 let val = _f(c"vkDestroyOpticalFlowSessionNV");
22304 if val.is_null() {
22305 destroy_optical_flow_session_nv
22306 } else {
22307 ::core::mem::transmute::<*const c_void, PFN_vkDestroyOpticalFlowSessionNV>(
22308 val,
22309 )
22310 }
22311 },
22312 bind_optical_flow_session_image_nv: unsafe {
22313 unsafe extern "system" fn bind_optical_flow_session_image_nv(
22314 _device: crate::vk::Device,
22315 _session: OpticalFlowSessionNV,
22316 _binding_point: OpticalFlowSessionBindingPointNV,
22317 _view: ImageView,
22318 _layout: ImageLayout,
22319 ) -> Result {
22320 panic!(concat!(
22321 "Unable to load ",
22322 stringify!(bind_optical_flow_session_image_nv)
22323 ))
22324 }
22325 let val = _f(c"vkBindOpticalFlowSessionImageNV");
22326 if val.is_null() {
22327 bind_optical_flow_session_image_nv
22328 } else {
22329 ::core::mem::transmute::<
22330 *const c_void,
22331 PFN_vkBindOpticalFlowSessionImageNV,
22332 >(val)
22333 }
22334 },
22335 cmd_optical_flow_execute_nv: unsafe {
22336 unsafe extern "system" fn cmd_optical_flow_execute_nv(
22337 _command_buffer: CommandBuffer,
22338 _session: OpticalFlowSessionNV,
22339 _p_execute_info: *const OpticalFlowExecuteInfoNV<'_>,
22340 ) {
22341 panic!(concat!(
22342 "Unable to load ",
22343 stringify!(cmd_optical_flow_execute_nv)
22344 ))
22345 }
22346 let val = _f(c"vkCmdOpticalFlowExecuteNV");
22347 if val.is_null() {
22348 cmd_optical_flow_execute_nv
22349 } else {
22350 ::core::mem::transmute::<*const c_void, PFN_vkCmdOpticalFlowExecuteNV>(
22351 val,
22352 )
22353 }
22354 },
22355 }
22356 }
22357 }
22358 }
22359 #[doc = "VK_NV_ray_tracing_invocation_reorder"]
22360 pub mod ray_tracing_invocation_reorder {
22361 use super::super::*;
22362 pub use {
22363 crate::vk::NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME as NAME,
22364 crate::vk::NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION as SPEC_VERSION,
22365 };
22366 }
22367 #[doc = "VK_NV_cooperative_vector"]
22368 pub mod cooperative_vector {
22369 use super::super::*;
22370 pub use {
22371 crate::vk::NV_COOPERATIVE_VECTOR_EXTENSION_NAME as NAME,
22372 crate::vk::NV_COOPERATIVE_VECTOR_SPEC_VERSION as SPEC_VERSION,
22373 };
22374 #[doc = "VK_NV_cooperative_vector instance-level functions"]
22375 #[derive(Clone)]
22376 pub struct Instance {
22377 pub(crate) fp: InstanceFn,
22378 pub(crate) handle: crate::vk::Instance,
22379 }
22380 impl Instance {
22381 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
22382 let handle = instance.handle();
22383 let fp = InstanceFn::load(|name| unsafe {
22384 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22385 entry.get_instance_proc_addr(handle, name.as_ptr()),
22386 )
22387 });
22388 Self { handle, fp }
22389 }
22390 #[inline]
22391 pub fn fp(&self) -> &InstanceFn {
22392 &self.fp
22393 }
22394 #[inline]
22395 pub fn instance(&self) -> crate::vk::Instance {
22396 self.handle
22397 }
22398 }
22399 #[derive(Clone)]
22400 #[doc = "Raw VK_NV_cooperative_vector instance-level function pointers"]
22401 pub struct InstanceFn {
22402 pub get_physical_device_cooperative_vector_properties_nv:
22403 PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV,
22404 }
22405 unsafe impl Send for InstanceFn {}
22406 unsafe impl Sync for InstanceFn {}
22407 impl InstanceFn {
22408 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22409 Self::load_erased(&mut f)
22410 }
22411 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22412 Self {
22413 get_physical_device_cooperative_vector_properties_nv: unsafe {
22414 unsafe extern "system" fn get_physical_device_cooperative_vector_properties_nv(
22415 _physical_device: PhysicalDevice,
22416 _p_property_count: *mut u32,
22417 _p_properties: *mut CooperativeVectorPropertiesNV<'_>,
22418 ) -> Result {
22419 panic!(concat!(
22420 "Unable to load ",
22421 stringify!(get_physical_device_cooperative_vector_properties_nv)
22422 ))
22423 }
22424 let val = _f(c"vkGetPhysicalDeviceCooperativeVectorPropertiesNV");
22425 if val.is_null() {
22426 get_physical_device_cooperative_vector_properties_nv
22427 } else {
22428 ::core::mem::transmute::<
22429 *const c_void,
22430 PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV,
22431 >(val)
22432 }
22433 },
22434 }
22435 }
22436 }
22437 #[doc = "VK_NV_cooperative_vector device-level functions"]
22438 #[derive(Clone)]
22439 pub struct Device {
22440 pub(crate) fp: DeviceFn,
22441 pub(crate) handle: crate::vk::Device,
22442 }
22443 impl Device {
22444 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
22445 let handle = device.handle();
22446 let fp = DeviceFn::load(|name| unsafe {
22447 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22448 instance.get_device_proc_addr(handle, name.as_ptr()),
22449 )
22450 });
22451 Self { handle, fp }
22452 }
22453 #[inline]
22454 pub fn fp(&self) -> &DeviceFn {
22455 &self.fp
22456 }
22457 #[inline]
22458 pub fn device(&self) -> crate::vk::Device {
22459 self.handle
22460 }
22461 }
22462 #[derive(Clone)]
22463 #[doc = "Raw VK_NV_cooperative_vector device-level function pointers"]
22464 pub struct DeviceFn {
22465 pub convert_cooperative_vector_matrix_nv: PFN_vkConvertCooperativeVectorMatrixNV,
22466 pub cmd_convert_cooperative_vector_matrix_nv: PFN_vkCmdConvertCooperativeVectorMatrixNV,
22467 }
22468 unsafe impl Send for DeviceFn {}
22469 unsafe impl Sync for DeviceFn {}
22470 impl DeviceFn {
22471 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22472 Self::load_erased(&mut f)
22473 }
22474 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22475 Self {
22476 convert_cooperative_vector_matrix_nv: unsafe {
22477 unsafe extern "system" fn convert_cooperative_vector_matrix_nv(
22478 _device: crate::vk::Device,
22479 _p_info: *const ConvertCooperativeVectorMatrixInfoNV<'_>,
22480 ) -> Result {
22481 panic!(concat!(
22482 "Unable to load ",
22483 stringify!(convert_cooperative_vector_matrix_nv)
22484 ))
22485 }
22486 let val = _f(c"vkConvertCooperativeVectorMatrixNV");
22487 if val.is_null() {
22488 convert_cooperative_vector_matrix_nv
22489 } else {
22490 ::core::mem::transmute::<
22491 *const c_void,
22492 PFN_vkConvertCooperativeVectorMatrixNV,
22493 >(val)
22494 }
22495 },
22496 cmd_convert_cooperative_vector_matrix_nv: unsafe {
22497 unsafe extern "system" fn cmd_convert_cooperative_vector_matrix_nv(
22498 _command_buffer: CommandBuffer,
22499 _info_count: u32,
22500 _p_infos: *const ConvertCooperativeVectorMatrixInfoNV<'_>,
22501 ) {
22502 panic!(concat!(
22503 "Unable to load ",
22504 stringify!(cmd_convert_cooperative_vector_matrix_nv)
22505 ))
22506 }
22507 let val = _f(c"vkCmdConvertCooperativeVectorMatrixNV");
22508 if val.is_null() {
22509 cmd_convert_cooperative_vector_matrix_nv
22510 } else {
22511 ::core::mem::transmute::<
22512 *const c_void,
22513 PFN_vkCmdConvertCooperativeVectorMatrixNV,
22514 >(val)
22515 }
22516 },
22517 }
22518 }
22519 }
22520 }
22521 #[doc = "VK_NV_extended_sparse_address_space"]
22522 pub mod extended_sparse_address_space {
22523 use super::super::*;
22524 pub use {
22525 crate::vk::NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME as NAME,
22526 crate::vk::NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION as SPEC_VERSION,
22527 };
22528 }
22529 #[doc = "VK_NV_low_latency2"]
22530 pub mod low_latency2 {
22531 use super::super::*;
22532 pub use {
22533 crate::vk::NV_LOW_LATENCY_2_EXTENSION_NAME as NAME,
22534 crate::vk::NV_LOW_LATENCY_2_SPEC_VERSION as SPEC_VERSION,
22535 };
22536 #[doc = "VK_NV_low_latency2 device-level functions"]
22537 #[derive(Clone)]
22538 pub struct Device {
22539 pub(crate) fp: DeviceFn,
22540 pub(crate) handle: crate::vk::Device,
22541 }
22542 impl Device {
22543 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
22544 let handle = device.handle();
22545 let fp = DeviceFn::load(|name| unsafe {
22546 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22547 instance.get_device_proc_addr(handle, name.as_ptr()),
22548 )
22549 });
22550 Self { handle, fp }
22551 }
22552 #[inline]
22553 pub fn fp(&self) -> &DeviceFn {
22554 &self.fp
22555 }
22556 #[inline]
22557 pub fn device(&self) -> crate::vk::Device {
22558 self.handle
22559 }
22560 }
22561 #[derive(Clone)]
22562 #[doc = "Raw VK_NV_low_latency2 device-level function pointers"]
22563 pub struct DeviceFn {
22564 pub set_latency_sleep_mode_nv: PFN_vkSetLatencySleepModeNV,
22565 pub latency_sleep_nv: PFN_vkLatencySleepNV,
22566 pub set_latency_marker_nv: PFN_vkSetLatencyMarkerNV,
22567 pub get_latency_timings_nv: PFN_vkGetLatencyTimingsNV,
22568 pub queue_notify_out_of_band_nv: PFN_vkQueueNotifyOutOfBandNV,
22569 }
22570 unsafe impl Send for DeviceFn {}
22571 unsafe impl Sync for DeviceFn {}
22572 impl DeviceFn {
22573 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22574 Self::load_erased(&mut f)
22575 }
22576 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22577 Self {
22578 set_latency_sleep_mode_nv: unsafe {
22579 unsafe extern "system" fn set_latency_sleep_mode_nv(
22580 _device: crate::vk::Device,
22581 _swapchain: SwapchainKHR,
22582 _p_sleep_mode_info: *const LatencySleepModeInfoNV<'_>,
22583 ) -> Result {
22584 panic!(concat!(
22585 "Unable to load ",
22586 stringify!(set_latency_sleep_mode_nv)
22587 ))
22588 }
22589 let val = _f(c"vkSetLatencySleepModeNV");
22590 if val.is_null() {
22591 set_latency_sleep_mode_nv
22592 } else {
22593 ::core::mem::transmute::<*const c_void, PFN_vkSetLatencySleepModeNV>(
22594 val,
22595 )
22596 }
22597 },
22598 latency_sleep_nv: unsafe {
22599 unsafe extern "system" fn latency_sleep_nv(
22600 _device: crate::vk::Device,
22601 _swapchain: SwapchainKHR,
22602 _p_sleep_info: *const LatencySleepInfoNV<'_>,
22603 ) -> Result {
22604 panic!(concat!("Unable to load ", stringify!(latency_sleep_nv)))
22605 }
22606 let val = _f(c"vkLatencySleepNV");
22607 if val.is_null() {
22608 latency_sleep_nv
22609 } else {
22610 ::core::mem::transmute::<*const c_void, PFN_vkLatencySleepNV>(val)
22611 }
22612 },
22613 set_latency_marker_nv: unsafe {
22614 unsafe extern "system" fn set_latency_marker_nv(
22615 _device: crate::vk::Device,
22616 _swapchain: SwapchainKHR,
22617 _p_latency_marker_info: *const SetLatencyMarkerInfoNV<'_>,
22618 ) {
22619 panic!(concat!(
22620 "Unable to load ",
22621 stringify!(set_latency_marker_nv)
22622 ))
22623 }
22624 let val = _f(c"vkSetLatencyMarkerNV");
22625 if val.is_null() {
22626 set_latency_marker_nv
22627 } else {
22628 ::core::mem::transmute::<*const c_void, PFN_vkSetLatencyMarkerNV>(val)
22629 }
22630 },
22631 get_latency_timings_nv: unsafe {
22632 unsafe extern "system" fn get_latency_timings_nv(
22633 _device: crate::vk::Device,
22634 _swapchain: SwapchainKHR,
22635 _p_latency_marker_info: *mut GetLatencyMarkerInfoNV<'_>,
22636 ) {
22637 panic!(concat!(
22638 "Unable to load ",
22639 stringify!(get_latency_timings_nv)
22640 ))
22641 }
22642 let val = _f(c"vkGetLatencyTimingsNV");
22643 if val.is_null() {
22644 get_latency_timings_nv
22645 } else {
22646 ::core::mem::transmute::<*const c_void, PFN_vkGetLatencyTimingsNV>(val)
22647 }
22648 },
22649 queue_notify_out_of_band_nv: unsafe {
22650 unsafe extern "system" fn queue_notify_out_of_band_nv(
22651 _queue: Queue,
22652 _p_queue_type_info: *const OutOfBandQueueTypeInfoNV<'_>,
22653 ) {
22654 panic!(concat!(
22655 "Unable to load ",
22656 stringify!(queue_notify_out_of_band_nv)
22657 ))
22658 }
22659 let val = _f(c"vkQueueNotifyOutOfBandNV");
22660 if val.is_null() {
22661 queue_notify_out_of_band_nv
22662 } else {
22663 ::core::mem::transmute::<*const c_void, PFN_vkQueueNotifyOutOfBandNV>(
22664 val,
22665 )
22666 }
22667 },
22668 }
22669 }
22670 }
22671 }
22672 #[doc = "VK_NV_per_stage_descriptor_set"]
22673 pub mod per_stage_descriptor_set {
22674 use super::super::*;
22675 pub use {
22676 crate::vk::NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME as NAME,
22677 crate::vk::NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION as SPEC_VERSION,
22678 };
22679 }
22680 #[doc = "VK_NV_descriptor_pool_overallocation"]
22681 pub mod descriptor_pool_overallocation {
22682 use super::super::*;
22683 pub use {
22684 crate::vk::NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME as NAME,
22685 crate::vk::NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION as SPEC_VERSION,
22686 };
22687 }
22688 #[doc = "VK_NV_display_stereo"]
22689 pub mod display_stereo {
22690 use super::super::*;
22691 pub use {
22692 crate::vk::NV_DISPLAY_STEREO_EXTENSION_NAME as NAME,
22693 crate::vk::NV_DISPLAY_STEREO_SPEC_VERSION as SPEC_VERSION,
22694 };
22695 }
22696 #[doc = "VK_NV_raw_access_chains"]
22697 pub mod raw_access_chains {
22698 use super::super::*;
22699 pub use {
22700 crate::vk::NV_RAW_ACCESS_CHAINS_EXTENSION_NAME as NAME,
22701 crate::vk::NV_RAW_ACCESS_CHAINS_SPEC_VERSION as SPEC_VERSION,
22702 };
22703 }
22704 #[doc = "VK_NV_external_compute_queue"]
22705 pub mod external_compute_queue {
22706 use super::super::*;
22707 pub use {
22708 crate::vk::NV_EXTERNAL_COMPUTE_QUEUE_EXTENSION_NAME as NAME,
22709 crate::vk::NV_EXTERNAL_COMPUTE_QUEUE_SPEC_VERSION as SPEC_VERSION,
22710 };
22711 #[doc = "VK_NV_external_compute_queue instance-level functions"]
22712 #[derive(Clone)]
22713 pub struct Instance {
22714 pub(crate) fp: InstanceFn,
22715 pub(crate) handle: crate::vk::Instance,
22716 }
22717 impl Instance {
22718 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
22719 let handle = instance.handle();
22720 let fp = InstanceFn::load(|name| unsafe {
22721 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22722 entry.get_instance_proc_addr(handle, name.as_ptr()),
22723 )
22724 });
22725 Self { handle, fp }
22726 }
22727 #[inline]
22728 pub fn fp(&self) -> &InstanceFn {
22729 &self.fp
22730 }
22731 #[inline]
22732 pub fn instance(&self) -> crate::vk::Instance {
22733 self.handle
22734 }
22735 }
22736 #[derive(Clone)]
22737 #[doc = "Raw VK_NV_external_compute_queue instance-level function pointers"]
22738 pub struct InstanceFn {
22739 pub get_external_compute_queue_data_nv: PFN_vkGetExternalComputeQueueDataNV,
22740 }
22741 unsafe impl Send for InstanceFn {}
22742 unsafe impl Sync for InstanceFn {}
22743 impl InstanceFn {
22744 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22745 Self::load_erased(&mut f)
22746 }
22747 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22748 Self {
22749 get_external_compute_queue_data_nv: unsafe {
22750 unsafe extern "system" fn get_external_compute_queue_data_nv(
22751 _external_queue: ExternalComputeQueueNV,
22752 _params: *mut ExternalComputeQueueDataParamsNV<'_>,
22753 _p_data: *mut c_void,
22754 ) {
22755 panic!(concat!(
22756 "Unable to load ",
22757 stringify!(get_external_compute_queue_data_nv)
22758 ))
22759 }
22760 let val = _f(c"vkGetExternalComputeQueueDataNV");
22761 if val.is_null() {
22762 get_external_compute_queue_data_nv
22763 } else {
22764 ::core::mem::transmute::<
22765 *const c_void,
22766 PFN_vkGetExternalComputeQueueDataNV,
22767 >(val)
22768 }
22769 },
22770 }
22771 }
22772 }
22773 #[doc = "VK_NV_external_compute_queue device-level functions"]
22774 #[derive(Clone)]
22775 pub struct Device {
22776 pub(crate) fp: DeviceFn,
22777 pub(crate) handle: crate::vk::Device,
22778 }
22779 impl Device {
22780 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
22781 let handle = device.handle();
22782 let fp = DeviceFn::load(|name| unsafe {
22783 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22784 instance.get_device_proc_addr(handle, name.as_ptr()),
22785 )
22786 });
22787 Self { handle, fp }
22788 }
22789 #[inline]
22790 pub fn fp(&self) -> &DeviceFn {
22791 &self.fp
22792 }
22793 #[inline]
22794 pub fn device(&self) -> crate::vk::Device {
22795 self.handle
22796 }
22797 }
22798 #[derive(Clone)]
22799 #[doc = "Raw VK_NV_external_compute_queue device-level function pointers"]
22800 pub struct DeviceFn {
22801 pub create_external_compute_queue_nv: PFN_vkCreateExternalComputeQueueNV,
22802 pub destroy_external_compute_queue_nv: PFN_vkDestroyExternalComputeQueueNV,
22803 }
22804 unsafe impl Send for DeviceFn {}
22805 unsafe impl Sync for DeviceFn {}
22806 impl DeviceFn {
22807 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22808 Self::load_erased(&mut f)
22809 }
22810 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22811 Self {
22812 create_external_compute_queue_nv: unsafe {
22813 unsafe extern "system" fn create_external_compute_queue_nv(
22814 _device: crate::vk::Device,
22815 _p_create_info: *const ExternalComputeQueueCreateInfoNV<'_>,
22816 _p_allocator: *const AllocationCallbacks<'_>,
22817 _p_external_queue: *mut ExternalComputeQueueNV,
22818 ) -> Result {
22819 panic!(concat!(
22820 "Unable to load ",
22821 stringify!(create_external_compute_queue_nv)
22822 ))
22823 }
22824 let val = _f(c"vkCreateExternalComputeQueueNV");
22825 if val.is_null() {
22826 create_external_compute_queue_nv
22827 } else {
22828 ::core::mem::transmute::<
22829 *const c_void,
22830 PFN_vkCreateExternalComputeQueueNV,
22831 >(val)
22832 }
22833 },
22834 destroy_external_compute_queue_nv: unsafe {
22835 unsafe extern "system" fn destroy_external_compute_queue_nv(
22836 _device: crate::vk::Device,
22837 _external_queue: ExternalComputeQueueNV,
22838 _p_allocator: *const AllocationCallbacks<'_>,
22839 ) {
22840 panic!(concat!(
22841 "Unable to load ",
22842 stringify!(destroy_external_compute_queue_nv)
22843 ))
22844 }
22845 let val = _f(c"vkDestroyExternalComputeQueueNV");
22846 if val.is_null() {
22847 destroy_external_compute_queue_nv
22848 } else {
22849 ::core::mem::transmute::<
22850 *const c_void,
22851 PFN_vkDestroyExternalComputeQueueNV,
22852 >(val)
22853 }
22854 },
22855 }
22856 }
22857 }
22858 }
22859 #[doc = "VK_NV_command_buffer_inheritance"]
22860 pub mod command_buffer_inheritance {
22861 use super::super::*;
22862 pub use {
22863 crate::vk::NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME as NAME,
22864 crate::vk::NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION as SPEC_VERSION,
22865 };
22866 }
22867 #[doc = "VK_NV_shader_atomic_float16_vector"]
22868 pub mod shader_atomic_float16_vector {
22869 use super::super::*;
22870 pub use {
22871 crate::vk::NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME as NAME,
22872 crate::vk::NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION as SPEC_VERSION,
22873 };
22874 }
22875 #[doc = "VK_NV_ray_tracing_validation"]
22876 pub mod ray_tracing_validation {
22877 use super::super::*;
22878 pub use {
22879 crate::vk::NV_RAY_TRACING_VALIDATION_EXTENSION_NAME as NAME,
22880 crate::vk::NV_RAY_TRACING_VALIDATION_SPEC_VERSION as SPEC_VERSION,
22881 };
22882 }
22883 #[doc = "VK_NV_cluster_acceleration_structure"]
22884 pub mod cluster_acceleration_structure {
22885 use super::super::*;
22886 pub use {
22887 crate::vk::NV_CLUSTER_ACCELERATION_STRUCTURE_EXTENSION_NAME as NAME,
22888 crate::vk::NV_CLUSTER_ACCELERATION_STRUCTURE_SPEC_VERSION as SPEC_VERSION,
22889 };
22890 #[doc = "VK_NV_cluster_acceleration_structure device-level functions"]
22891 #[derive(Clone)]
22892 pub struct Device {
22893 pub(crate) fp: DeviceFn,
22894 pub(crate) handle: crate::vk::Device,
22895 }
22896 impl Device {
22897 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
22898 let handle = device.handle();
22899 let fp = DeviceFn::load(|name| unsafe {
22900 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22901 instance.get_device_proc_addr(handle, name.as_ptr()),
22902 )
22903 });
22904 Self { handle, fp }
22905 }
22906 #[inline]
22907 pub fn fp(&self) -> &DeviceFn {
22908 &self.fp
22909 }
22910 #[inline]
22911 pub fn device(&self) -> crate::vk::Device {
22912 self.handle
22913 }
22914 }
22915 #[derive(Clone)]
22916 #[doc = "Raw VK_NV_cluster_acceleration_structure device-level function pointers"]
22917 pub struct DeviceFn {
22918 pub get_cluster_acceleration_structure_build_sizes_nv:
22919 PFN_vkGetClusterAccelerationStructureBuildSizesNV,
22920 pub cmd_build_cluster_acceleration_structure_indirect_nv:
22921 PFN_vkCmdBuildClusterAccelerationStructureIndirectNV,
22922 }
22923 unsafe impl Send for DeviceFn {}
22924 unsafe impl Sync for DeviceFn {}
22925 impl DeviceFn {
22926 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
22927 Self::load_erased(&mut f)
22928 }
22929 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
22930 Self {
22931 get_cluster_acceleration_structure_build_sizes_nv: unsafe {
22932 unsafe extern "system" fn get_cluster_acceleration_structure_build_sizes_nv(
22933 _device: crate::vk::Device,
22934 _p_info: *const ClusterAccelerationStructureInputInfoNV<'_>,
22935 _p_size_info: *mut AccelerationStructureBuildSizesInfoKHR<'_>,
22936 ) {
22937 panic!(concat!(
22938 "Unable to load ",
22939 stringify!(get_cluster_acceleration_structure_build_sizes_nv)
22940 ))
22941 }
22942 let val = _f(c"vkGetClusterAccelerationStructureBuildSizesNV");
22943 if val.is_null() {
22944 get_cluster_acceleration_structure_build_sizes_nv
22945 } else {
22946 ::core::mem::transmute::<
22947 *const c_void,
22948 PFN_vkGetClusterAccelerationStructureBuildSizesNV,
22949 >(val)
22950 }
22951 },
22952 cmd_build_cluster_acceleration_structure_indirect_nv: unsafe {
22953 unsafe extern "system" fn cmd_build_cluster_acceleration_structure_indirect_nv(
22954 _command_buffer: CommandBuffer,
22955 _p_command_infos: *const ClusterAccelerationStructureCommandsInfoNV<'_>,
22956 ) {
22957 panic!(concat!(
22958 "Unable to load ",
22959 stringify!(cmd_build_cluster_acceleration_structure_indirect_nv)
22960 ))
22961 }
22962 let val = _f(c"vkCmdBuildClusterAccelerationStructureIndirectNV");
22963 if val.is_null() {
22964 cmd_build_cluster_acceleration_structure_indirect_nv
22965 } else {
22966 ::core::mem::transmute::<
22967 *const c_void,
22968 PFN_vkCmdBuildClusterAccelerationStructureIndirectNV,
22969 >(val)
22970 }
22971 },
22972 }
22973 }
22974 }
22975 }
22976 #[doc = "VK_NV_partitioned_acceleration_structure"]
22977 pub mod partitioned_acceleration_structure {
22978 use super::super::*;
22979 pub use {
22980 crate::vk::NV_PARTITIONED_ACCELERATION_STRUCTURE_EXTENSION_NAME as NAME,
22981 crate::vk::NV_PARTITIONED_ACCELERATION_STRUCTURE_SPEC_VERSION as SPEC_VERSION,
22982 };
22983 #[doc = "VK_NV_partitioned_acceleration_structure device-level functions"]
22984 #[derive(Clone)]
22985 pub struct Device {
22986 pub(crate) fp: DeviceFn,
22987 pub(crate) handle: crate::vk::Device,
22988 }
22989 impl Device {
22990 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
22991 let handle = device.handle();
22992 let fp = DeviceFn::load(|name| unsafe {
22993 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
22994 instance.get_device_proc_addr(handle, name.as_ptr()),
22995 )
22996 });
22997 Self { handle, fp }
22998 }
22999 #[inline]
23000 pub fn fp(&self) -> &DeviceFn {
23001 &self.fp
23002 }
23003 #[inline]
23004 pub fn device(&self) -> crate::vk::Device {
23005 self.handle
23006 }
23007 }
23008 #[derive(Clone)]
23009 #[doc = "Raw VK_NV_partitioned_acceleration_structure device-level function pointers"]
23010 pub struct DeviceFn {
23011 pub get_partitioned_acceleration_structures_build_sizes_nv:
23012 PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV,
23013 pub cmd_build_partitioned_acceleration_structures_nv:
23014 PFN_vkCmdBuildPartitionedAccelerationStructuresNV,
23015 }
23016 unsafe impl Send for DeviceFn {}
23017 unsafe impl Sync for DeviceFn {}
23018 impl DeviceFn {
23019 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23020 Self::load_erased(&mut f)
23021 }
23022 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23023 Self {
23024 get_partitioned_acceleration_structures_build_sizes_nv: unsafe {
23025 unsafe extern "system" fn get_partitioned_acceleration_structures_build_sizes_nv(
23026 _device: crate::vk::Device,
23027 _p_info: *const PartitionedAccelerationStructureInstancesInputNV<'_>,
23028 _p_size_info: *mut AccelerationStructureBuildSizesInfoKHR<'_>,
23029 ) {
23030 panic!(concat!(
23031 "Unable to load ",
23032 stringify!(get_partitioned_acceleration_structures_build_sizes_nv)
23033 ))
23034 }
23035 let val = _f(c"vkGetPartitionedAccelerationStructuresBuildSizesNV");
23036 if val.is_null() {
23037 get_partitioned_acceleration_structures_build_sizes_nv
23038 } else {
23039 ::core::mem::transmute::<
23040 *const c_void,
23041 PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV,
23042 >(val)
23043 }
23044 },
23045 cmd_build_partitioned_acceleration_structures_nv: unsafe {
23046 unsafe extern "system" fn cmd_build_partitioned_acceleration_structures_nv(
23047 _command_buffer: CommandBuffer,
23048 _p_build_info: *const BuildPartitionedAccelerationStructureInfoNV<'_>,
23049 ) {
23050 panic!(concat!(
23051 "Unable to load ",
23052 stringify!(cmd_build_partitioned_acceleration_structures_nv)
23053 ))
23054 }
23055 let val = _f(c"vkCmdBuildPartitionedAccelerationStructuresNV");
23056 if val.is_null() {
23057 cmd_build_partitioned_acceleration_structures_nv
23058 } else {
23059 ::core::mem::transmute::<
23060 *const c_void,
23061 PFN_vkCmdBuildPartitionedAccelerationStructuresNV,
23062 >(val)
23063 }
23064 },
23065 }
23066 }
23067 }
23068 }
23069 #[doc = "VK_NV_push_constant_bank"]
23070 pub mod push_constant_bank {
23071 use super::super::*;
23072 pub use {
23073 crate::vk::NV_PUSH_CONSTANT_BANK_EXTENSION_NAME as NAME,
23074 crate::vk::NV_PUSH_CONSTANT_BANK_SPEC_VERSION as SPEC_VERSION,
23075 };
23076 }
23077 #[doc = "VK_NV_cooperative_matrix2"]
23078 pub mod cooperative_matrix2 {
23079 use super::super::*;
23080 pub use {
23081 crate::vk::NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME as NAME,
23082 crate::vk::NV_COOPERATIVE_MATRIX_2_SPEC_VERSION as SPEC_VERSION,
23083 };
23084 #[doc = "VK_NV_cooperative_matrix2 instance-level functions"]
23085 #[derive(Clone)]
23086 pub struct Instance {
23087 pub(crate) fp: InstanceFn,
23088 pub(crate) handle: crate::vk::Instance,
23089 }
23090 impl Instance {
23091 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
23092 let handle = instance.handle();
23093 let fp = InstanceFn::load(|name| unsafe {
23094 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23095 entry.get_instance_proc_addr(handle, name.as_ptr()),
23096 )
23097 });
23098 Self { handle, fp }
23099 }
23100 #[inline]
23101 pub fn fp(&self) -> &InstanceFn {
23102 &self.fp
23103 }
23104 #[inline]
23105 pub fn instance(&self) -> crate::vk::Instance {
23106 self.handle
23107 }
23108 }
23109 #[derive(Clone)]
23110 #[doc = "Raw VK_NV_cooperative_matrix2 instance-level function pointers"]
23111 pub struct InstanceFn {
23112 pub get_physical_device_cooperative_matrix_flexible_dimensions_properties_nv:
23113 PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV,
23114 }
23115 unsafe impl Send for InstanceFn {}
23116 unsafe impl Sync for InstanceFn {}
23117 impl InstanceFn {
23118 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23119 Self::load_erased(&mut f)
23120 }
23121 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23122 Self {
23123 get_physical_device_cooperative_matrix_flexible_dimensions_properties_nv: unsafe {
23124 unsafe extern "system" fn get_physical_device_cooperative_matrix_flexible_dimensions_properties_nv(
23125 _physical_device: PhysicalDevice,
23126 _p_property_count: *mut u32,
23127 _p_properties: *mut CooperativeMatrixFlexibleDimensionsPropertiesNV<'_>,
23128 ) -> Result {
23129 panic ! (concat ! ("Unable to load " , stringify ! (get_physical_device_cooperative_matrix_flexible_dimensions_properties_nv)))
23130 }
23131 let val = _f(
23132 c"vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV",
23133 );
23134 if val.is_null() {
23135 get_physical_device_cooperative_matrix_flexible_dimensions_properties_nv
23136 } else {
23137 :: core :: mem :: transmute :: < * const c_void , PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV > (val)
23138 }
23139 },
23140 }
23141 }
23142 }
23143 }
23144 #[doc = "VK_NV_present_metering"]
23145 pub mod present_metering {
23146 use super::super::*;
23147 pub use {
23148 crate::vk::NV_PRESENT_METERING_EXTENSION_NAME as NAME,
23149 crate::vk::NV_PRESENT_METERING_SPEC_VERSION as SPEC_VERSION,
23150 };
23151 }
23152 #[doc = "VK_NV_compute_occupancy_priority"]
23153 pub mod compute_occupancy_priority {
23154 use super::super::*;
23155 pub use {
23156 crate::vk::NV_COMPUTE_OCCUPANCY_PRIORITY_EXTENSION_NAME as NAME,
23157 crate::vk::NV_COMPUTE_OCCUPANCY_PRIORITY_SPEC_VERSION as SPEC_VERSION,
23158 };
23159 #[doc = "VK_NV_compute_occupancy_priority device-level functions"]
23160 #[derive(Clone)]
23161 pub struct Device {
23162 pub(crate) fp: DeviceFn,
23163 pub(crate) handle: crate::vk::Device,
23164 }
23165 impl Device {
23166 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
23167 let handle = device.handle();
23168 let fp = DeviceFn::load(|name| unsafe {
23169 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23170 instance.get_device_proc_addr(handle, name.as_ptr()),
23171 )
23172 });
23173 Self { handle, fp }
23174 }
23175 #[inline]
23176 pub fn fp(&self) -> &DeviceFn {
23177 &self.fp
23178 }
23179 #[inline]
23180 pub fn device(&self) -> crate::vk::Device {
23181 self.handle
23182 }
23183 }
23184 #[derive(Clone)]
23185 #[doc = "Raw VK_NV_compute_occupancy_priority device-level function pointers"]
23186 pub struct DeviceFn {
23187 pub cmd_set_compute_occupancy_priority_nv: PFN_vkCmdSetComputeOccupancyPriorityNV,
23188 }
23189 unsafe impl Send for DeviceFn {}
23190 unsafe impl Sync for DeviceFn {}
23191 impl DeviceFn {
23192 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23193 Self::load_erased(&mut f)
23194 }
23195 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23196 Self {
23197 cmd_set_compute_occupancy_priority_nv: unsafe {
23198 unsafe extern "system" fn cmd_set_compute_occupancy_priority_nv(
23199 _command_buffer: CommandBuffer,
23200 _p_parameters: *const ComputeOccupancyPriorityParametersNV<'_>,
23201 ) {
23202 panic!(concat!(
23203 "Unable to load ",
23204 stringify!(cmd_set_compute_occupancy_priority_nv)
23205 ))
23206 }
23207 let val = _f(c"vkCmdSetComputeOccupancyPriorityNV");
23208 if val.is_null() {
23209 cmd_set_compute_occupancy_priority_nv
23210 } else {
23211 ::core::mem::transmute::<
23212 *const c_void,
23213 PFN_vkCmdSetComputeOccupancyPriorityNV,
23214 >(val)
23215 }
23216 },
23217 }
23218 }
23219 }
23220 }
23221}
23222#[doc = "Extensions tagged NVX"]
23223pub mod nvx {
23224 #[doc = "VK_NVX_binary_import"]
23225 pub mod binary_import {
23226 use super::super::*;
23227 pub use {
23228 crate::vk::NVX_BINARY_IMPORT_EXTENSION_NAME as NAME,
23229 crate::vk::NVX_BINARY_IMPORT_SPEC_VERSION as SPEC_VERSION,
23230 };
23231 #[doc = "VK_NVX_binary_import device-level functions"]
23232 #[derive(Clone)]
23233 pub struct Device {
23234 pub(crate) fp: DeviceFn,
23235 pub(crate) handle: crate::vk::Device,
23236 }
23237 impl Device {
23238 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
23239 let handle = device.handle();
23240 let fp = DeviceFn::load(|name| unsafe {
23241 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23242 instance.get_device_proc_addr(handle, name.as_ptr()),
23243 )
23244 });
23245 Self { handle, fp }
23246 }
23247 #[inline]
23248 pub fn fp(&self) -> &DeviceFn {
23249 &self.fp
23250 }
23251 #[inline]
23252 pub fn device(&self) -> crate::vk::Device {
23253 self.handle
23254 }
23255 }
23256 #[derive(Clone)]
23257 #[doc = "Raw VK_NVX_binary_import device-level function pointers"]
23258 pub struct DeviceFn {
23259 pub create_cu_module_nvx: PFN_vkCreateCuModuleNVX,
23260 pub create_cu_function_nvx: PFN_vkCreateCuFunctionNVX,
23261 pub destroy_cu_module_nvx: PFN_vkDestroyCuModuleNVX,
23262 pub destroy_cu_function_nvx: PFN_vkDestroyCuFunctionNVX,
23263 pub cmd_cu_launch_kernel_nvx: PFN_vkCmdCuLaunchKernelNVX,
23264 }
23265 unsafe impl Send for DeviceFn {}
23266 unsafe impl Sync for DeviceFn {}
23267 impl DeviceFn {
23268 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23269 Self::load_erased(&mut f)
23270 }
23271 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23272 Self {
23273 create_cu_module_nvx: unsafe {
23274 unsafe extern "system" fn create_cu_module_nvx(
23275 _device: crate::vk::Device,
23276 _p_create_info: *const CuModuleCreateInfoNVX<'_>,
23277 _p_allocator: *const AllocationCallbacks<'_>,
23278 _p_module: *mut CuModuleNVX,
23279 ) -> Result {
23280 panic!(concat!("Unable to load ", stringify!(create_cu_module_nvx)))
23281 }
23282 let val = _f(c"vkCreateCuModuleNVX");
23283 if val.is_null() {
23284 create_cu_module_nvx
23285 } else {
23286 ::core::mem::transmute::<*const c_void, PFN_vkCreateCuModuleNVX>(val)
23287 }
23288 },
23289 create_cu_function_nvx: unsafe {
23290 unsafe extern "system" fn create_cu_function_nvx(
23291 _device: crate::vk::Device,
23292 _p_create_info: *const CuFunctionCreateInfoNVX<'_>,
23293 _p_allocator: *const AllocationCallbacks<'_>,
23294 _p_function: *mut CuFunctionNVX,
23295 ) -> Result {
23296 panic!(concat!(
23297 "Unable to load ",
23298 stringify!(create_cu_function_nvx)
23299 ))
23300 }
23301 let val = _f(c"vkCreateCuFunctionNVX");
23302 if val.is_null() {
23303 create_cu_function_nvx
23304 } else {
23305 ::core::mem::transmute::<*const c_void, PFN_vkCreateCuFunctionNVX>(val)
23306 }
23307 },
23308 destroy_cu_module_nvx: unsafe {
23309 unsafe extern "system" fn destroy_cu_module_nvx(
23310 _device: crate::vk::Device,
23311 _module: CuModuleNVX,
23312 _p_allocator: *const AllocationCallbacks<'_>,
23313 ) {
23314 panic!(concat!(
23315 "Unable to load ",
23316 stringify!(destroy_cu_module_nvx)
23317 ))
23318 }
23319 let val = _f(c"vkDestroyCuModuleNVX");
23320 if val.is_null() {
23321 destroy_cu_module_nvx
23322 } else {
23323 ::core::mem::transmute::<*const c_void, PFN_vkDestroyCuModuleNVX>(val)
23324 }
23325 },
23326 destroy_cu_function_nvx: unsafe {
23327 unsafe extern "system" fn destroy_cu_function_nvx(
23328 _device: crate::vk::Device,
23329 _function: CuFunctionNVX,
23330 _p_allocator: *const AllocationCallbacks<'_>,
23331 ) {
23332 panic!(concat!(
23333 "Unable to load ",
23334 stringify!(destroy_cu_function_nvx)
23335 ))
23336 }
23337 let val = _f(c"vkDestroyCuFunctionNVX");
23338 if val.is_null() {
23339 destroy_cu_function_nvx
23340 } else {
23341 ::core::mem::transmute::<*const c_void, PFN_vkDestroyCuFunctionNVX>(val)
23342 }
23343 },
23344 cmd_cu_launch_kernel_nvx: unsafe {
23345 unsafe extern "system" fn cmd_cu_launch_kernel_nvx(
23346 _command_buffer: CommandBuffer,
23347 _p_launch_info: *const CuLaunchInfoNVX<'_>,
23348 ) {
23349 panic!(concat!(
23350 "Unable to load ",
23351 stringify!(cmd_cu_launch_kernel_nvx)
23352 ))
23353 }
23354 let val = _f(c"vkCmdCuLaunchKernelNVX");
23355 if val.is_null() {
23356 cmd_cu_launch_kernel_nvx
23357 } else {
23358 ::core::mem::transmute::<*const c_void, PFN_vkCmdCuLaunchKernelNVX>(val)
23359 }
23360 },
23361 }
23362 }
23363 }
23364 }
23365 #[doc = "VK_NVX_image_view_handle"]
23366 pub mod image_view_handle {
23367 use super::super::*;
23368 pub use {
23369 crate::vk::NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME as NAME,
23370 crate::vk::NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION as SPEC_VERSION,
23371 };
23372 #[doc = "VK_NVX_image_view_handle device-level functions"]
23373 #[derive(Clone)]
23374 pub struct Device {
23375 pub(crate) fp: DeviceFn,
23376 pub(crate) handle: crate::vk::Device,
23377 }
23378 impl Device {
23379 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
23380 let handle = device.handle();
23381 let fp = DeviceFn::load(|name| unsafe {
23382 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23383 instance.get_device_proc_addr(handle, name.as_ptr()),
23384 )
23385 });
23386 Self { handle, fp }
23387 }
23388 #[inline]
23389 pub fn fp(&self) -> &DeviceFn {
23390 &self.fp
23391 }
23392 #[inline]
23393 pub fn device(&self) -> crate::vk::Device {
23394 self.handle
23395 }
23396 }
23397 #[derive(Clone)]
23398 #[doc = "Raw VK_NVX_image_view_handle device-level function pointers"]
23399 pub struct DeviceFn {
23400 pub get_image_view_handle_nvx: PFN_vkGetImageViewHandleNVX,
23401 pub get_image_view_handle64_nvx: PFN_vkGetImageViewHandle64NVX,
23402 pub get_image_view_address_nvx: PFN_vkGetImageViewAddressNVX,
23403 pub get_device_combined_image_sampler_index_nvx:
23404 PFN_vkGetDeviceCombinedImageSamplerIndexNVX,
23405 }
23406 unsafe impl Send for DeviceFn {}
23407 unsafe impl Sync for DeviceFn {}
23408 impl DeviceFn {
23409 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23410 Self::load_erased(&mut f)
23411 }
23412 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23413 Self {
23414 get_image_view_handle_nvx: unsafe {
23415 unsafe extern "system" fn get_image_view_handle_nvx(
23416 _device: crate::vk::Device,
23417 _p_info: *const ImageViewHandleInfoNVX<'_>,
23418 ) -> u32 {
23419 panic!(concat!(
23420 "Unable to load ",
23421 stringify!(get_image_view_handle_nvx)
23422 ))
23423 }
23424 let val = _f(c"vkGetImageViewHandleNVX");
23425 if val.is_null() {
23426 get_image_view_handle_nvx
23427 } else {
23428 ::core::mem::transmute::<*const c_void, PFN_vkGetImageViewHandleNVX>(
23429 val,
23430 )
23431 }
23432 },
23433 get_image_view_handle64_nvx: unsafe {
23434 unsafe extern "system" fn get_image_view_handle64_nvx(
23435 _device: crate::vk::Device,
23436 _p_info: *const ImageViewHandleInfoNVX<'_>,
23437 ) -> u64 {
23438 panic!(concat!(
23439 "Unable to load ",
23440 stringify!(get_image_view_handle64_nvx)
23441 ))
23442 }
23443 let val = _f(c"vkGetImageViewHandle64NVX");
23444 if val.is_null() {
23445 get_image_view_handle64_nvx
23446 } else {
23447 ::core::mem::transmute::<*const c_void, PFN_vkGetImageViewHandle64NVX>(
23448 val,
23449 )
23450 }
23451 },
23452 get_image_view_address_nvx: unsafe {
23453 unsafe extern "system" fn get_image_view_address_nvx(
23454 _device: crate::vk::Device,
23455 _image_view: ImageView,
23456 _p_properties: *mut ImageViewAddressPropertiesNVX<'_>,
23457 ) -> Result {
23458 panic!(concat!(
23459 "Unable to load ",
23460 stringify!(get_image_view_address_nvx)
23461 ))
23462 }
23463 let val = _f(c"vkGetImageViewAddressNVX");
23464 if val.is_null() {
23465 get_image_view_address_nvx
23466 } else {
23467 ::core::mem::transmute::<*const c_void, PFN_vkGetImageViewAddressNVX>(
23468 val,
23469 )
23470 }
23471 },
23472 get_device_combined_image_sampler_index_nvx: unsafe {
23473 unsafe extern "system" fn get_device_combined_image_sampler_index_nvx(
23474 _device: crate::vk::Device,
23475 _image_view_index: u64,
23476 _sampler_index: u64,
23477 ) -> u64 {
23478 panic!(concat!(
23479 "Unable to load ",
23480 stringify!(get_device_combined_image_sampler_index_nvx)
23481 ))
23482 }
23483 let val = _f(c"vkGetDeviceCombinedImageSamplerIndexNVX");
23484 if val.is_null() {
23485 get_device_combined_image_sampler_index_nvx
23486 } else {
23487 ::core::mem::transmute::<
23488 *const c_void,
23489 PFN_vkGetDeviceCombinedImageSamplerIndexNVX,
23490 >(val)
23491 }
23492 },
23493 }
23494 }
23495 }
23496 }
23497 #[doc = "VK_NVX_multiview_per_view_attributes"]
23498 pub mod multiview_per_view_attributes {
23499 use super::super::*;
23500 pub use {
23501 crate::vk::NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME as NAME,
23502 crate::vk::NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION as SPEC_VERSION,
23503 };
23504 }
23505}
23506#[doc = "Extensions tagged OHOS"]
23507pub mod ohos {
23508 #[doc = "VK_OHOS_external_memory"]
23509 pub mod external_memory {
23510 use super::super::*;
23511 pub use {
23512 crate::vk::OHOS_EXTERNAL_MEMORY_EXTENSION_NAME as NAME,
23513 crate::vk::OHOS_EXTERNAL_MEMORY_SPEC_VERSION as SPEC_VERSION,
23514 };
23515 #[doc = "VK_OHOS_external_memory device-level functions"]
23516 #[derive(Clone)]
23517 pub struct Device {
23518 pub(crate) fp: DeviceFn,
23519 pub(crate) handle: crate::vk::Device,
23520 }
23521 impl Device {
23522 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
23523 let handle = device.handle();
23524 let fp = DeviceFn::load(|name| unsafe {
23525 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23526 instance.get_device_proc_addr(handle, name.as_ptr()),
23527 )
23528 });
23529 Self { handle, fp }
23530 }
23531 #[inline]
23532 pub fn fp(&self) -> &DeviceFn {
23533 &self.fp
23534 }
23535 #[inline]
23536 pub fn device(&self) -> crate::vk::Device {
23537 self.handle
23538 }
23539 }
23540 #[derive(Clone)]
23541 #[doc = "Raw VK_OHOS_external_memory device-level function pointers"]
23542 pub struct DeviceFn {
23543 pub get_native_buffer_properties_ohos: PFN_vkGetNativeBufferPropertiesOHOS,
23544 pub get_memory_native_buffer_ohos: PFN_vkGetMemoryNativeBufferOHOS,
23545 }
23546 unsafe impl Send for DeviceFn {}
23547 unsafe impl Sync for DeviceFn {}
23548 impl DeviceFn {
23549 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23550 Self::load_erased(&mut f)
23551 }
23552 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23553 Self {
23554 get_native_buffer_properties_ohos: unsafe {
23555 unsafe extern "system" fn get_native_buffer_properties_ohos(
23556 _device: crate::vk::Device,
23557 _buffer: *const OH_NativeBuffer,
23558 _p_properties: *mut NativeBufferPropertiesOHOS<'_>,
23559 ) -> Result {
23560 panic!(concat!(
23561 "Unable to load ",
23562 stringify!(get_native_buffer_properties_ohos)
23563 ))
23564 }
23565 let val = _f(c"vkGetNativeBufferPropertiesOHOS");
23566 if val.is_null() {
23567 get_native_buffer_properties_ohos
23568 } else {
23569 ::core::mem::transmute::<
23570 *const c_void,
23571 PFN_vkGetNativeBufferPropertiesOHOS,
23572 >(val)
23573 }
23574 },
23575 get_memory_native_buffer_ohos: unsafe {
23576 unsafe extern "system" fn get_memory_native_buffer_ohos(
23577 _device: crate::vk::Device,
23578 _p_info: *const MemoryGetNativeBufferInfoOHOS<'_>,
23579 _p_buffer: *mut *mut OH_NativeBuffer,
23580 ) -> Result {
23581 panic!(concat!(
23582 "Unable to load ",
23583 stringify!(get_memory_native_buffer_ohos)
23584 ))
23585 }
23586 let val = _f(c"vkGetMemoryNativeBufferOHOS");
23587 if val.is_null() {
23588 get_memory_native_buffer_ohos
23589 } else {
23590 ::core::mem::transmute::<*const c_void, PFN_vkGetMemoryNativeBufferOHOS>(
23591 val,
23592 )
23593 }
23594 },
23595 }
23596 }
23597 }
23598 }
23599 #[doc = "VK_OHOS_surface"]
23600 pub mod surface {
23601 use super::super::*;
23602 pub use {
23603 crate::vk::OHOS_SURFACE_EXTENSION_NAME as NAME,
23604 crate::vk::OHOS_SURFACE_SPEC_VERSION as SPEC_VERSION,
23605 };
23606 #[doc = "VK_OHOS_surface instance-level functions"]
23607 #[derive(Clone)]
23608 pub struct Instance {
23609 pub(crate) fp: InstanceFn,
23610 pub(crate) handle: crate::vk::Instance,
23611 }
23612 impl Instance {
23613 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
23614 let handle = instance.handle();
23615 let fp = InstanceFn::load(|name| unsafe {
23616 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23617 entry.get_instance_proc_addr(handle, name.as_ptr()),
23618 )
23619 });
23620 Self { handle, fp }
23621 }
23622 #[inline]
23623 pub fn fp(&self) -> &InstanceFn {
23624 &self.fp
23625 }
23626 #[inline]
23627 pub fn instance(&self) -> crate::vk::Instance {
23628 self.handle
23629 }
23630 }
23631 #[derive(Clone)]
23632 #[doc = "Raw VK_OHOS_surface instance-level function pointers"]
23633 pub struct InstanceFn {
23634 pub create_surface_ohos: PFN_vkCreateSurfaceOHOS,
23635 }
23636 unsafe impl Send for InstanceFn {}
23637 unsafe impl Sync for InstanceFn {}
23638 impl InstanceFn {
23639 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23640 Self::load_erased(&mut f)
23641 }
23642 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23643 Self {
23644 create_surface_ohos: unsafe {
23645 unsafe extern "system" fn create_surface_ohos(
23646 _instance: crate::vk::Instance,
23647 _p_create_info: *const SurfaceCreateInfoOHOS<'_>,
23648 _p_allocator: *const AllocationCallbacks<'_>,
23649 _p_surface: *mut SurfaceKHR,
23650 ) -> Result {
23651 panic!(concat!("Unable to load ", stringify!(create_surface_ohos)))
23652 }
23653 let val = _f(c"vkCreateSurfaceOHOS");
23654 if val.is_null() {
23655 create_surface_ohos
23656 } else {
23657 ::core::mem::transmute::<*const c_void, PFN_vkCreateSurfaceOHOS>(val)
23658 }
23659 },
23660 }
23661 }
23662 }
23663 }
23664}
23665#[doc = "Extensions tagged QCOM"]
23666pub mod qcom {
23667 #[doc = "VK_QCOM_render_pass_shader_resolve"]
23668 pub mod render_pass_shader_resolve {
23669 use super::super::*;
23670 pub use {
23671 crate::vk::QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME as NAME,
23672 crate::vk::QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION as SPEC_VERSION,
23673 };
23674 }
23675 #[doc = "VK_QCOM_render_pass_transform"]
23676 pub mod render_pass_transform {
23677 use super::super::*;
23678 pub use {
23679 crate::vk::QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME as NAME,
23680 crate::vk::QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION as SPEC_VERSION,
23681 };
23682 }
23683 #[doc = "VK_QCOM_render_pass_store_ops"]
23684 pub mod render_pass_store_ops {
23685 use super::super::*;
23686 pub use {
23687 crate::vk::QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME as NAME,
23688 crate::vk::QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION as SPEC_VERSION,
23689 };
23690 }
23691 #[doc = "VK_QCOM_tile_shading"]
23692 pub mod tile_shading {
23693 use super::super::*;
23694 pub use {
23695 crate::vk::QCOM_TILE_SHADING_EXTENSION_NAME as NAME,
23696 crate::vk::QCOM_TILE_SHADING_SPEC_VERSION as SPEC_VERSION,
23697 };
23698 #[doc = "VK_QCOM_tile_shading device-level functions"]
23699 #[derive(Clone)]
23700 pub struct Device {
23701 pub(crate) fp: DeviceFn,
23702 pub(crate) handle: crate::vk::Device,
23703 }
23704 impl Device {
23705 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
23706 let handle = device.handle();
23707 let fp = DeviceFn::load(|name| unsafe {
23708 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23709 instance.get_device_proc_addr(handle, name.as_ptr()),
23710 )
23711 });
23712 Self { handle, fp }
23713 }
23714 #[inline]
23715 pub fn fp(&self) -> &DeviceFn {
23716 &self.fp
23717 }
23718 #[inline]
23719 pub fn device(&self) -> crate::vk::Device {
23720 self.handle
23721 }
23722 }
23723 #[derive(Clone)]
23724 #[doc = "Raw VK_QCOM_tile_shading device-level function pointers"]
23725 pub struct DeviceFn {
23726 pub cmd_dispatch_tile_qcom: PFN_vkCmdDispatchTileQCOM,
23727 pub cmd_begin_per_tile_execution_qcom: PFN_vkCmdBeginPerTileExecutionQCOM,
23728 pub cmd_end_per_tile_execution_qcom: PFN_vkCmdEndPerTileExecutionQCOM,
23729 }
23730 unsafe impl Send for DeviceFn {}
23731 unsafe impl Sync for DeviceFn {}
23732 impl DeviceFn {
23733 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23734 Self::load_erased(&mut f)
23735 }
23736 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23737 Self {
23738 cmd_dispatch_tile_qcom: unsafe {
23739 unsafe extern "system" fn cmd_dispatch_tile_qcom(
23740 _command_buffer: CommandBuffer,
23741 _p_dispatch_tile_info: *const DispatchTileInfoQCOM<'_>,
23742 ) {
23743 panic!(concat!(
23744 "Unable to load ",
23745 stringify!(cmd_dispatch_tile_qcom)
23746 ))
23747 }
23748 let val = _f(c"vkCmdDispatchTileQCOM");
23749 if val.is_null() {
23750 cmd_dispatch_tile_qcom
23751 } else {
23752 ::core::mem::transmute::<*const c_void, PFN_vkCmdDispatchTileQCOM>(val)
23753 }
23754 },
23755 cmd_begin_per_tile_execution_qcom: unsafe {
23756 unsafe extern "system" fn cmd_begin_per_tile_execution_qcom(
23757 _command_buffer: CommandBuffer,
23758 _p_per_tile_begin_info: *const PerTileBeginInfoQCOM<'_>,
23759 ) {
23760 panic!(concat!(
23761 "Unable to load ",
23762 stringify!(cmd_begin_per_tile_execution_qcom)
23763 ))
23764 }
23765 let val = _f(c"vkCmdBeginPerTileExecutionQCOM");
23766 if val.is_null() {
23767 cmd_begin_per_tile_execution_qcom
23768 } else {
23769 ::core::mem::transmute::<
23770 *const c_void,
23771 PFN_vkCmdBeginPerTileExecutionQCOM,
23772 >(val)
23773 }
23774 },
23775 cmd_end_per_tile_execution_qcom: unsafe {
23776 unsafe extern "system" fn cmd_end_per_tile_execution_qcom(
23777 _command_buffer: CommandBuffer,
23778 _p_per_tile_end_info: *const PerTileEndInfoQCOM<'_>,
23779 ) {
23780 panic!(concat!(
23781 "Unable to load ",
23782 stringify!(cmd_end_per_tile_execution_qcom)
23783 ))
23784 }
23785 let val = _f(c"vkCmdEndPerTileExecutionQCOM");
23786 if val.is_null() {
23787 cmd_end_per_tile_execution_qcom
23788 } else {
23789 ::core::mem::transmute::<*const c_void, PFN_vkCmdEndPerTileExecutionQCOM>(
23790 val,
23791 )
23792 }
23793 },
23794 }
23795 }
23796 }
23797 }
23798 #[doc = "VK_QCOM_rotated_copy_commands"]
23799 pub mod rotated_copy_commands {
23800 use super::super::*;
23801 pub use {
23802 crate::vk::QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME as NAME,
23803 crate::vk::QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION as SPEC_VERSION,
23804 };
23805 }
23806 #[doc = "VK_QCOM_fragment_density_map_offset"]
23807 pub mod fragment_density_map_offset {
23808 use super::super::*;
23809 pub use {
23810 crate::vk::QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME as NAME,
23811 crate::vk::QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION as SPEC_VERSION,
23812 };
23813 }
23814 #[doc = "VK_QCOM_image_processing"]
23815 pub mod image_processing {
23816 use super::super::*;
23817 pub use {
23818 crate::vk::QCOM_IMAGE_PROCESSING_EXTENSION_NAME as NAME,
23819 crate::vk::QCOM_IMAGE_PROCESSING_SPEC_VERSION as SPEC_VERSION,
23820 };
23821 }
23822 #[doc = "VK_QCOM_tile_properties"]
23823 pub mod tile_properties {
23824 use super::super::*;
23825 pub use {
23826 crate::vk::QCOM_TILE_PROPERTIES_EXTENSION_NAME as NAME,
23827 crate::vk::QCOM_TILE_PROPERTIES_SPEC_VERSION as SPEC_VERSION,
23828 };
23829 #[doc = "VK_QCOM_tile_properties device-level functions"]
23830 #[derive(Clone)]
23831 pub struct Device {
23832 pub(crate) fp: DeviceFn,
23833 pub(crate) handle: crate::vk::Device,
23834 }
23835 impl Device {
23836 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
23837 let handle = device.handle();
23838 let fp = DeviceFn::load(|name| unsafe {
23839 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23840 instance.get_device_proc_addr(handle, name.as_ptr()),
23841 )
23842 });
23843 Self { handle, fp }
23844 }
23845 #[inline]
23846 pub fn fp(&self) -> &DeviceFn {
23847 &self.fp
23848 }
23849 #[inline]
23850 pub fn device(&self) -> crate::vk::Device {
23851 self.handle
23852 }
23853 }
23854 #[derive(Clone)]
23855 #[doc = "Raw VK_QCOM_tile_properties device-level function pointers"]
23856 pub struct DeviceFn {
23857 pub get_framebuffer_tile_properties_qcom: PFN_vkGetFramebufferTilePropertiesQCOM,
23858 pub get_dynamic_rendering_tile_properties_qcom:
23859 PFN_vkGetDynamicRenderingTilePropertiesQCOM,
23860 }
23861 unsafe impl Send for DeviceFn {}
23862 unsafe impl Sync for DeviceFn {}
23863 impl DeviceFn {
23864 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
23865 Self::load_erased(&mut f)
23866 }
23867 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
23868 Self {
23869 get_framebuffer_tile_properties_qcom: unsafe {
23870 unsafe extern "system" fn get_framebuffer_tile_properties_qcom(
23871 _device: crate::vk::Device,
23872 _framebuffer: Framebuffer,
23873 _p_properties_count: *mut u32,
23874 _p_properties: *mut TilePropertiesQCOM<'_>,
23875 ) -> Result {
23876 panic!(concat!(
23877 "Unable to load ",
23878 stringify!(get_framebuffer_tile_properties_qcom)
23879 ))
23880 }
23881 let val = _f(c"vkGetFramebufferTilePropertiesQCOM");
23882 if val.is_null() {
23883 get_framebuffer_tile_properties_qcom
23884 } else {
23885 ::core::mem::transmute::<
23886 *const c_void,
23887 PFN_vkGetFramebufferTilePropertiesQCOM,
23888 >(val)
23889 }
23890 },
23891 get_dynamic_rendering_tile_properties_qcom: unsafe {
23892 unsafe extern "system" fn get_dynamic_rendering_tile_properties_qcom(
23893 _device: crate::vk::Device,
23894 _p_rendering_info: *const RenderingInfo<'_>,
23895 _p_properties: *mut TilePropertiesQCOM<'_>,
23896 ) -> Result {
23897 panic!(concat!(
23898 "Unable to load ",
23899 stringify!(get_dynamic_rendering_tile_properties_qcom)
23900 ))
23901 }
23902 let val = _f(c"vkGetDynamicRenderingTilePropertiesQCOM");
23903 if val.is_null() {
23904 get_dynamic_rendering_tile_properties_qcom
23905 } else {
23906 ::core::mem::transmute::<
23907 *const c_void,
23908 PFN_vkGetDynamicRenderingTilePropertiesQCOM,
23909 >(val)
23910 }
23911 },
23912 }
23913 }
23914 }
23915 }
23916 #[doc = "VK_QCOM_multiview_per_view_viewports"]
23917 pub mod multiview_per_view_viewports {
23918 use super::super::*;
23919 pub use {
23920 crate::vk::QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME as NAME,
23921 crate::vk::QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION as SPEC_VERSION,
23922 };
23923 }
23924 #[doc = "VK_QCOM_multiview_per_view_render_areas"]
23925 pub mod multiview_per_view_render_areas {
23926 use super::super::*;
23927 pub use {
23928 crate::vk::QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME as NAME,
23929 crate::vk::QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION as SPEC_VERSION,
23930 };
23931 }
23932 #[doc = "VK_QCOM_image_processing2"]
23933 pub mod image_processing2 {
23934 use super::super::*;
23935 pub use {
23936 crate::vk::QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME as NAME,
23937 crate::vk::QCOM_IMAGE_PROCESSING_2_SPEC_VERSION as SPEC_VERSION,
23938 };
23939 }
23940 #[doc = "VK_QCOM_filter_cubic_weights"]
23941 pub mod filter_cubic_weights {
23942 use super::super::*;
23943 pub use {
23944 crate::vk::QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME as NAME,
23945 crate::vk::QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION as SPEC_VERSION,
23946 };
23947 }
23948 #[doc = "VK_QCOM_ycbcr_degamma"]
23949 pub mod ycbcr_degamma {
23950 use super::super::*;
23951 pub use {
23952 crate::vk::QCOM_YCBCR_DEGAMMA_EXTENSION_NAME as NAME,
23953 crate::vk::QCOM_YCBCR_DEGAMMA_SPEC_VERSION as SPEC_VERSION,
23954 };
23955 }
23956 #[doc = "VK_QCOM_filter_cubic_clamp"]
23957 pub mod filter_cubic_clamp {
23958 use super::super::*;
23959 pub use {
23960 crate::vk::QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME as NAME,
23961 crate::vk::QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION as SPEC_VERSION,
23962 };
23963 }
23964 #[doc = "VK_QCOM_tile_memory_heap"]
23965 pub mod tile_memory_heap {
23966 use super::super::*;
23967 pub use {
23968 crate::vk::QCOM_TILE_MEMORY_HEAP_EXTENSION_NAME as NAME,
23969 crate::vk::QCOM_TILE_MEMORY_HEAP_SPEC_VERSION as SPEC_VERSION,
23970 };
23971 #[doc = "VK_QCOM_tile_memory_heap device-level functions"]
23972 #[derive(Clone)]
23973 pub struct Device {
23974 pub(crate) fp: DeviceFn,
23975 pub(crate) handle: crate::vk::Device,
23976 }
23977 impl Device {
23978 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
23979 let handle = device.handle();
23980 let fp = DeviceFn::load(|name| unsafe {
23981 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
23982 instance.get_device_proc_addr(handle, name.as_ptr()),
23983 )
23984 });
23985 Self { handle, fp }
23986 }
23987 #[inline]
23988 pub fn fp(&self) -> &DeviceFn {
23989 &self.fp
23990 }
23991 #[inline]
23992 pub fn device(&self) -> crate::vk::Device {
23993 self.handle
23994 }
23995 }
23996 #[derive(Clone)]
23997 #[doc = "Raw VK_QCOM_tile_memory_heap device-level function pointers"]
23998 pub struct DeviceFn {
23999 pub cmd_bind_tile_memory_qcom: PFN_vkCmdBindTileMemoryQCOM,
24000 }
24001 unsafe impl Send for DeviceFn {}
24002 unsafe impl Sync for DeviceFn {}
24003 impl DeviceFn {
24004 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
24005 Self::load_erased(&mut f)
24006 }
24007 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
24008 Self {
24009 cmd_bind_tile_memory_qcom: unsafe {
24010 unsafe extern "system" fn cmd_bind_tile_memory_qcom(
24011 _command_buffer: CommandBuffer,
24012 _p_tile_memory_bind_info: *const TileMemoryBindInfoQCOM<'_>,
24013 ) {
24014 panic!(concat!(
24015 "Unable to load ",
24016 stringify!(cmd_bind_tile_memory_qcom)
24017 ))
24018 }
24019 let val = _f(c"vkCmdBindTileMemoryQCOM");
24020 if val.is_null() {
24021 cmd_bind_tile_memory_qcom
24022 } else {
24023 ::core::mem::transmute::<*const c_void, PFN_vkCmdBindTileMemoryQCOM>(
24024 val,
24025 )
24026 }
24027 },
24028 }
24029 }
24030 }
24031 }
24032 #[doc = "VK_QCOM_data_graph_model"]
24033 pub mod data_graph_model {
24034 use super::super::*;
24035 pub use {
24036 crate::vk::QCOM_DATA_GRAPH_MODEL_EXTENSION_NAME as NAME,
24037 crate::vk::QCOM_DATA_GRAPH_MODEL_SPEC_VERSION as SPEC_VERSION,
24038 };
24039 }
24040}
24041#[doc = "Extensions tagged QNX"]
24042pub mod qnx {
24043 #[doc = "VK_QNX_screen_surface"]
24044 pub mod screen_surface {
24045 use super::super::*;
24046 pub use {
24047 crate::vk::QNX_SCREEN_SURFACE_EXTENSION_NAME as NAME,
24048 crate::vk::QNX_SCREEN_SURFACE_SPEC_VERSION as SPEC_VERSION,
24049 };
24050 #[doc = "VK_QNX_screen_surface instance-level functions"]
24051 #[derive(Clone)]
24052 pub struct Instance {
24053 pub(crate) fp: InstanceFn,
24054 pub(crate) handle: crate::vk::Instance,
24055 }
24056 impl Instance {
24057 pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self {
24058 let handle = instance.handle();
24059 let fp = InstanceFn::load(|name| unsafe {
24060 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
24061 entry.get_instance_proc_addr(handle, name.as_ptr()),
24062 )
24063 });
24064 Self { handle, fp }
24065 }
24066 #[inline]
24067 pub fn fp(&self) -> &InstanceFn {
24068 &self.fp
24069 }
24070 #[inline]
24071 pub fn instance(&self) -> crate::vk::Instance {
24072 self.handle
24073 }
24074 }
24075 #[derive(Clone)]
24076 #[doc = "Raw VK_QNX_screen_surface instance-level function pointers"]
24077 pub struct InstanceFn {
24078 pub create_screen_surface_qnx: PFN_vkCreateScreenSurfaceQNX,
24079 pub get_physical_device_screen_presentation_support_qnx:
24080 PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX,
24081 }
24082 unsafe impl Send for InstanceFn {}
24083 unsafe impl Sync for InstanceFn {}
24084 impl InstanceFn {
24085 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
24086 Self::load_erased(&mut f)
24087 }
24088 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
24089 Self {
24090 create_screen_surface_qnx: unsafe {
24091 unsafe extern "system" fn create_screen_surface_qnx(
24092 _instance: crate::vk::Instance,
24093 _p_create_info: *const ScreenSurfaceCreateInfoQNX<'_>,
24094 _p_allocator: *const AllocationCallbacks<'_>,
24095 _p_surface: *mut SurfaceKHR,
24096 ) -> Result {
24097 panic!(concat!(
24098 "Unable to load ",
24099 stringify!(create_screen_surface_qnx)
24100 ))
24101 }
24102 let val = _f(c"vkCreateScreenSurfaceQNX");
24103 if val.is_null() {
24104 create_screen_surface_qnx
24105 } else {
24106 ::core::mem::transmute::<*const c_void, PFN_vkCreateScreenSurfaceQNX>(
24107 val,
24108 )
24109 }
24110 },
24111 get_physical_device_screen_presentation_support_qnx: unsafe {
24112 unsafe extern "system" fn get_physical_device_screen_presentation_support_qnx(
24113 _physical_device: PhysicalDevice,
24114 _queue_family_index: u32,
24115 _window: *mut _screen_window,
24116 ) -> Bool32 {
24117 panic!(concat!(
24118 "Unable to load ",
24119 stringify!(get_physical_device_screen_presentation_support_qnx)
24120 ))
24121 }
24122 let val = _f(c"vkGetPhysicalDeviceScreenPresentationSupportQNX");
24123 if val.is_null() {
24124 get_physical_device_screen_presentation_support_qnx
24125 } else {
24126 ::core::mem::transmute::<
24127 *const c_void,
24128 PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX,
24129 >(val)
24130 }
24131 },
24132 }
24133 }
24134 }
24135 }
24136 #[doc = "VK_QNX_external_memory_screen_buffer"]
24137 pub mod external_memory_screen_buffer {
24138 use super::super::*;
24139 pub use {
24140 crate::vk::QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME as NAME,
24141 crate::vk::QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION as SPEC_VERSION,
24142 };
24143 #[doc = "VK_QNX_external_memory_screen_buffer device-level functions"]
24144 #[derive(Clone)]
24145 pub struct Device {
24146 pub(crate) fp: DeviceFn,
24147 pub(crate) handle: crate::vk::Device,
24148 }
24149 impl Device {
24150 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
24151 let handle = device.handle();
24152 let fp = DeviceFn::load(|name| unsafe {
24153 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
24154 instance.get_device_proc_addr(handle, name.as_ptr()),
24155 )
24156 });
24157 Self { handle, fp }
24158 }
24159 #[inline]
24160 pub fn fp(&self) -> &DeviceFn {
24161 &self.fp
24162 }
24163 #[inline]
24164 pub fn device(&self) -> crate::vk::Device {
24165 self.handle
24166 }
24167 }
24168 #[derive(Clone)]
24169 #[doc = "Raw VK_QNX_external_memory_screen_buffer device-level function pointers"]
24170 pub struct DeviceFn {
24171 pub get_screen_buffer_properties_qnx: PFN_vkGetScreenBufferPropertiesQNX,
24172 }
24173 unsafe impl Send for DeviceFn {}
24174 unsafe impl Sync for DeviceFn {}
24175 impl DeviceFn {
24176 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
24177 Self::load_erased(&mut f)
24178 }
24179 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
24180 Self {
24181 get_screen_buffer_properties_qnx: unsafe {
24182 unsafe extern "system" fn get_screen_buffer_properties_qnx(
24183 _device: crate::vk::Device,
24184 _buffer: *const _screen_buffer,
24185 _p_properties: *mut ScreenBufferPropertiesQNX<'_>,
24186 ) -> Result {
24187 panic!(concat!(
24188 "Unable to load ",
24189 stringify!(get_screen_buffer_properties_qnx)
24190 ))
24191 }
24192 let val = _f(c"vkGetScreenBufferPropertiesQNX");
24193 if val.is_null() {
24194 get_screen_buffer_properties_qnx
24195 } else {
24196 ::core::mem::transmute::<
24197 *const c_void,
24198 PFN_vkGetScreenBufferPropertiesQNX,
24199 >(val)
24200 }
24201 },
24202 }
24203 }
24204 }
24205 }
24206}
24207#[doc = "Extensions tagged SEC"]
24208pub mod sec {
24209 #[doc = "VK_SEC_amigo_profiling"]
24210 pub mod amigo_profiling {
24211 use super::super::*;
24212 pub use {
24213 crate::vk::SEC_AMIGO_PROFILING_EXTENSION_NAME as NAME,
24214 crate::vk::SEC_AMIGO_PROFILING_SPEC_VERSION as SPEC_VERSION,
24215 };
24216 }
24217 #[doc = "VK_SEC_pipeline_cache_incremental_mode"]
24218 pub mod pipeline_cache_incremental_mode {
24219 use super::super::*;
24220 pub use {
24221 crate::vk::SEC_PIPELINE_CACHE_INCREMENTAL_MODE_EXTENSION_NAME as NAME,
24222 crate::vk::SEC_PIPELINE_CACHE_INCREMENTAL_MODE_SPEC_VERSION as SPEC_VERSION,
24223 };
24224 }
24225}
24226#[doc = "Extensions tagged VALVE"]
24227pub mod valve {
24228 #[doc = "VK_VALVE_mutable_descriptor_type"]
24229 pub mod mutable_descriptor_type {
24230 use super::super::*;
24231 pub use {
24232 crate::vk::VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME as NAME,
24233 crate::vk::VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION as SPEC_VERSION,
24234 };
24235 }
24236 #[doc = "VK_VALVE_video_encode_rgb_conversion"]
24237 pub mod video_encode_rgb_conversion {
24238 use super::super::*;
24239 pub use {
24240 crate::vk::VALVE_VIDEO_ENCODE_RGB_CONVERSION_EXTENSION_NAME as NAME,
24241 crate::vk::VALVE_VIDEO_ENCODE_RGB_CONVERSION_SPEC_VERSION as SPEC_VERSION,
24242 };
24243 }
24244 #[doc = "VK_VALVE_descriptor_set_host_mapping"]
24245 pub mod descriptor_set_host_mapping {
24246 use super::super::*;
24247 pub use {
24248 crate::vk::VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME as NAME,
24249 crate::vk::VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION as SPEC_VERSION,
24250 };
24251 #[doc = "VK_VALVE_descriptor_set_host_mapping device-level functions"]
24252 #[derive(Clone)]
24253 pub struct Device {
24254 pub(crate) fp: DeviceFn,
24255 pub(crate) handle: crate::vk::Device,
24256 }
24257 impl Device {
24258 pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self {
24259 let handle = device.handle();
24260 let fp = DeviceFn::load(|name| unsafe {
24261 core::mem::transmute::<PFN_vkVoidFunction, *const c_void>(
24262 instance.get_device_proc_addr(handle, name.as_ptr()),
24263 )
24264 });
24265 Self { handle, fp }
24266 }
24267 #[inline]
24268 pub fn fp(&self) -> &DeviceFn {
24269 &self.fp
24270 }
24271 #[inline]
24272 pub fn device(&self) -> crate::vk::Device {
24273 self.handle
24274 }
24275 }
24276 #[derive(Clone)]
24277 #[doc = "Raw VK_VALVE_descriptor_set_host_mapping device-level function pointers"]
24278 pub struct DeviceFn {
24279 pub get_descriptor_set_layout_host_mapping_info_valve:
24280 PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE,
24281 pub get_descriptor_set_host_mapping_valve: PFN_vkGetDescriptorSetHostMappingVALVE,
24282 }
24283 unsafe impl Send for DeviceFn {}
24284 unsafe impl Sync for DeviceFn {}
24285 impl DeviceFn {
24286 pub fn load<F: FnMut(&CStr) -> *const c_void>(mut f: F) -> Self {
24287 Self::load_erased(&mut f)
24288 }
24289 fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self {
24290 Self {
24291 get_descriptor_set_layout_host_mapping_info_valve: unsafe {
24292 unsafe extern "system" fn get_descriptor_set_layout_host_mapping_info_valve(
24293 _device: crate::vk::Device,
24294 _p_binding_reference: *const DescriptorSetBindingReferenceVALVE<'_>,
24295 _p_host_mapping: *mut DescriptorSetLayoutHostMappingInfoVALVE<'_>,
24296 ) {
24297 panic!(concat!(
24298 "Unable to load ",
24299 stringify!(get_descriptor_set_layout_host_mapping_info_valve)
24300 ))
24301 }
24302 let val = _f(c"vkGetDescriptorSetLayoutHostMappingInfoVALVE");
24303 if val.is_null() {
24304 get_descriptor_set_layout_host_mapping_info_valve
24305 } else {
24306 ::core::mem::transmute::<
24307 *const c_void,
24308 PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE,
24309 >(val)
24310 }
24311 },
24312 get_descriptor_set_host_mapping_valve: unsafe {
24313 unsafe extern "system" fn get_descriptor_set_host_mapping_valve(
24314 _device: crate::vk::Device,
24315 _descriptor_set: DescriptorSet,
24316 _pp_data: *mut *mut c_void,
24317 ) {
24318 panic!(concat!(
24319 "Unable to load ",
24320 stringify!(get_descriptor_set_host_mapping_valve)
24321 ))
24322 }
24323 let val = _f(c"vkGetDescriptorSetHostMappingVALVE");
24324 if val.is_null() {
24325 get_descriptor_set_host_mapping_valve
24326 } else {
24327 ::core::mem::transmute::<
24328 *const c_void,
24329 PFN_vkGetDescriptorSetHostMappingVALVE,
24330 >(val)
24331 }
24332 },
24333 }
24334 }
24335 }
24336 }
24337 #[doc = "VK_VALVE_fragment_density_map_layered"]
24338 pub mod fragment_density_map_layered {
24339 use super::super::*;
24340 pub use {
24341 crate::vk::VALVE_FRAGMENT_DENSITY_MAP_LAYERED_EXTENSION_NAME as NAME,
24342 crate::vk::VALVE_FRAGMENT_DENSITY_MAP_LAYERED_SPEC_VERSION as SPEC_VERSION,
24343 };
24344 }
24345}