#![allow(non_camel_case_types, clippy::too_many_arguments)]
use std::ffi::CStr;
use std::mem::MaybeUninit;
use std::os::raw::{c_int, c_void};
use std::ptr::{self, copy_nonoverlapping as memcpy};
use super::*;
pub trait ConvertCStr {
fn from_cstr(string: &CStr) -> Self;
fn to_cstr(&self) -> &CStr;
}
impl ConvertCStr for ExtensionName {
#[inline]
fn from_cstr(string: &CStr) -> Self {
let mut name = [0; MAX_EXTENSION_NAME_SIZE];
let count = string.to_bytes().len();
unsafe { memcpy(string.as_ptr(), name.as_mut_ptr(), count) };
name
}
#[inline]
fn to_cstr(&self) -> &CStr {
unsafe { CStr::from_ptr(self.as_ptr()) }
}
}
pub trait AmdBufferMarkerExtension: DeviceV1_0 {
#[inline]
fn cmd_write_buffer_marker_amd(
&self,
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
dst_buffer: Buffer,
dst_offset: DeviceSize,
marker: u32,
) {
let __result = (self.commands().cmd_write_buffer_marker_amd)(
command_buffer,
pipeline_stage,
dst_buffer,
dst_offset,
marker,
);
}
}
impl AmdBufferMarkerExtension for crate::Device {}
pub trait AmdDeviceCoherentMemoryExtension: DeviceV1_0 {}
impl AmdDeviceCoherentMemoryExtension for crate::Device {}
pub trait AmdDisplayNativeHdrExtension: DeviceV1_0 {
#[inline]
fn set_local_dimming_amd(&self, swap_chain: SwapchainKHR, local_dimming_enable: bool) {
let __result = (self.commands().set_local_dimming_amd)(
self.handle(),
swap_chain,
if local_dimming_enable { TRUE } else { FALSE },
);
}
}
impl AmdDisplayNativeHdrExtension for crate::Device {}
pub trait AmdDrawIndirectCountExtension: DeviceV1_0 {
#[inline]
fn cmd_draw_indexed_indirect_count_amd(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_indexed_indirect_count_amd)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
#[inline]
fn cmd_draw_indirect_count_amd(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_indirect_count_amd)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
}
impl AmdDrawIndirectCountExtension for crate::Device {}
pub trait AmdGcnShaderExtension: DeviceV1_0 {}
impl AmdGcnShaderExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_KHR_shader_float16_int8`")]
pub trait AmdGpuShaderHalfFloatExtension: DeviceV1_0 {}
#[allow(deprecated)]
impl AmdGpuShaderHalfFloatExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_KHR_shader_float16_int8`")]
pub trait AmdGpuShaderInt16Extension: DeviceV1_0 {}
#[allow(deprecated)]
impl AmdGpuShaderInt16Extension for crate::Device {}
pub trait AmdMemoryOverallocationBehaviorExtension: DeviceV1_0 {}
impl AmdMemoryOverallocationBehaviorExtension for crate::Device {}
pub trait AmdMixedAttachmentSamplesExtension: DeviceV1_0 {}
impl AmdMixedAttachmentSamplesExtension for crate::Device {}
pub trait AmdNegativeViewportHeightExtension: DeviceV1_0 {}
impl AmdNegativeViewportHeightExtension for crate::Device {}
pub trait AmdPipelineCompilerControlExtension: DeviceV1_0 {}
impl AmdPipelineCompilerControlExtension for crate::Device {}
pub trait AmdRasterizationOrderExtension: DeviceV1_0 {}
impl AmdRasterizationOrderExtension for crate::Device {}
pub trait AmdShaderBallotExtension: DeviceV1_0 {}
impl AmdShaderBallotExtension for crate::Device {}
pub trait AmdShaderCorePropertiesExtension: DeviceV1_0 {}
impl AmdShaderCorePropertiesExtension for crate::Device {}
pub trait AmdShaderCoreProperties2Extension: DeviceV1_0 {}
impl AmdShaderCoreProperties2Extension for crate::Device {}
pub trait AmdShaderExplicitVertexParameterExtension: DeviceV1_0 {}
impl AmdShaderExplicitVertexParameterExtension for crate::Device {}
pub trait AmdShaderFragmentMaskExtension: DeviceV1_0 {}
impl AmdShaderFragmentMaskExtension for crate::Device {}
pub trait AmdShaderImageLoadStoreLodExtension: DeviceV1_0 {}
impl AmdShaderImageLoadStoreLodExtension for crate::Device {}
pub trait AmdShaderInfoExtension: DeviceV1_0 {
#[inline]
fn get_shader_info_amd(
&self,
pipeline: Pipeline,
shader_stage: ShaderStageFlags,
info_type: ShaderInfoTypeAMD,
) -> crate::VkResult<Vec<c_void>> {
let mut info_size = 0;
(self.commands().get_shader_info_amd)(
self.handle(),
pipeline,
shader_stage,
info_type,
&mut info_size,
ptr::null_mut(),
);
let mut info = Vec::with_capacity(info_size as usize);
let __result = (self.commands().get_shader_info_amd)(
self.handle(),
pipeline,
shader_stage,
info_type,
&mut info_size,
info.as_mut_ptr(),
);
debug_assert!(info.capacity() == info_size as usize);
unsafe { info.set_len(info_size as usize) };
if __result == Result::SUCCESS {
Ok(info)
} else {
Err(__result)
}
}
}
impl AmdShaderInfoExtension for crate::Device {}
pub trait AmdShaderTrinaryMinmaxExtension: DeviceV1_0 {}
impl AmdShaderTrinaryMinmaxExtension for crate::Device {}
pub trait AmdTextureGatherBiasLodExtension: DeviceV1_0 {}
impl AmdTextureGatherBiasLodExtension for crate::Device {}
pub trait AndroidExternalMemoryAndroidHardwareBufferExtension: DeviceV1_0 {
#[inline]
fn get_android_hardware_buffer_properties_android(
&self,
buffer: &AHardwareBuffer,
) -> crate::VkResult<AndroidHardwareBufferPropertiesANDROID> {
let mut properties = MaybeUninit::<AndroidHardwareBufferPropertiesANDROID>::uninit();
let __result = (self
.commands()
.get_android_hardware_buffer_properties_android)(
self.handle(),
buffer,
properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { properties.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_memory_android_hardware_buffer_android(
&self,
info: &MemoryGetAndroidHardwareBufferInfoANDROID,
) -> crate::VkResult<*mut AHardwareBuffer> {
let mut buffer = MaybeUninit::<*mut AHardwareBuffer>::uninit();
let __result = (self.commands().get_memory_android_hardware_buffer_android)(
self.handle(),
info,
buffer.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { buffer.assume_init() })
} else {
Err(__result)
}
}
}
impl AndroidExternalMemoryAndroidHardwareBufferExtension for crate::Device {}
pub trait Ext4444FormatsExtension: DeviceV1_0 {}
impl Ext4444FormatsExtension for crate::Device {}
pub trait ExtAcquireXlibDisplayExtension: InstanceV1_0 {
#[inline]
fn acquire_xlib_display_ext(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
) -> crate::VkResult<Display> {
let mut dpy = MaybeUninit::<Display>::uninit();
let __result =
(self.commands().acquire_xlib_display_ext)(physical_device, dpy.as_mut_ptr(), display);
if __result == Result::SUCCESS {
Ok(unsafe { dpy.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_rand_r_output_display_ext(
&self,
physical_device: PhysicalDevice,
rr_output: RROutput,
) -> crate::VkResult<(Display, DisplayKHR)> {
let mut dpy = MaybeUninit::<Display>::uninit();
let mut display = MaybeUninit::<DisplayKHR>::uninit();
let __result = (self.commands().get_rand_r_output_display_ext)(
physical_device,
dpy.as_mut_ptr(),
rr_output,
display.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok((unsafe { dpy.assume_init() }, unsafe {
display.assume_init()
}))
} else {
Err(__result)
}
}
}
impl ExtAcquireXlibDisplayExtension for crate::Instance {}
pub trait ExtAstcDecodeModeExtension: DeviceV1_0 {}
impl ExtAstcDecodeModeExtension for crate::Device {}
pub trait ExtBlendOperationAdvancedExtension: DeviceV1_0 {}
impl ExtBlendOperationAdvancedExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_KHR_buffer_device_address`")]
pub trait ExtBufferDeviceAddressExtension: DeviceV1_0 {
#[inline]
fn get_buffer_device_address_ext(&self, info: &BufferDeviceAddressInfo) {
let __result = (self.commands().get_buffer_device_address_ext)(self.handle(), info);
}
}
#[allow(deprecated)]
impl ExtBufferDeviceAddressExtension for crate::Device {}
pub trait ExtCalibratedTimestampsExtension: DeviceV1_0 {
#[inline]
fn get_calibrated_timestamps_ext(
&self,
timestamp_infos: &[impl Cast<Target = CalibratedTimestampInfoEXT>],
) -> crate::VkResult<(Vec<u64>, u64)> {
let mut timestamps = Vec::with_capacity(timestamp_infos.len() as usize);
let mut max_deviation = MaybeUninit::<u64>::uninit();
let __result = (self.commands().get_calibrated_timestamps_ext)(
self.handle(),
timestamp_infos.len() as u32,
timestamp_infos.as_ptr().cast(),
timestamps.as_mut_ptr(),
max_deviation.as_mut_ptr(),
);
debug_assert!(timestamps.capacity() == timestamp_infos.len() as usize);
unsafe { timestamps.set_len(timestamp_infos.len() as usize) };
if __result == Result::SUCCESS {
Ok((timestamps, unsafe { max_deviation.assume_init() }))
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_calibrateable_time_domains_ext(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<TimeDomainEXT>> {
let mut time_domain_count = 0;
(self
.commands()
.get_physical_device_calibrateable_time_domains_ext)(
physical_device,
&mut time_domain_count,
ptr::null_mut(),
);
let mut time_domains = Vec::with_capacity(time_domain_count as usize);
let __result = (self
.commands()
.get_physical_device_calibrateable_time_domains_ext)(
physical_device,
&mut time_domain_count,
time_domains.as_mut_ptr(),
);
debug_assert!(time_domains.capacity() == time_domain_count as usize);
unsafe { time_domains.set_len(time_domain_count as usize) };
if __result == Result::SUCCESS {
Ok(time_domains)
} else {
Err(__result)
}
}
}
impl ExtCalibratedTimestampsExtension for crate::Device {}
pub trait ExtConditionalRenderingExtension: DeviceV1_0 {
#[inline]
fn cmd_begin_conditional_rendering_ext(
&self,
command_buffer: CommandBuffer,
conditional_rendering_begin: &ConditionalRenderingBeginInfoEXT,
) {
let __result = (self.commands().cmd_begin_conditional_rendering_ext)(
command_buffer,
conditional_rendering_begin,
);
}
#[inline]
fn cmd_end_conditional_rendering_ext(&self, command_buffer: CommandBuffer) {
let __result = (self.commands().cmd_end_conditional_rendering_ext)(command_buffer);
}
}
impl ExtConditionalRenderingExtension for crate::Device {}
pub trait ExtConservativeRasterizationExtension: DeviceV1_0 {}
impl ExtConservativeRasterizationExtension for crate::Device {}
pub trait ExtCustomBorderColorExtension: DeviceV1_0 {}
impl ExtCustomBorderColorExtension for crate::Device {}
pub trait ExtDebugMarkerExtension: DeviceV1_0 {
#[inline]
fn cmd_debug_marker_begin_ext(
&self,
command_buffer: CommandBuffer,
marker_info: &DebugMarkerMarkerInfoEXT,
) {
let __result = (self.commands().cmd_debug_marker_begin_ext)(command_buffer, marker_info);
}
#[inline]
fn cmd_debug_marker_end_ext(&self, command_buffer: CommandBuffer) {
let __result = (self.commands().cmd_debug_marker_end_ext)(command_buffer);
}
#[inline]
fn cmd_debug_marker_insert_ext(
&self,
command_buffer: CommandBuffer,
marker_info: &DebugMarkerMarkerInfoEXT,
) {
let __result = (self.commands().cmd_debug_marker_insert_ext)(command_buffer, marker_info);
}
#[inline]
fn debug_marker_set_object_name_ext(
&self,
name_info: &DebugMarkerObjectNameInfoEXT,
) -> crate::VkResult<()> {
let __result = (self.commands().debug_marker_set_object_name_ext)(self.handle(), name_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn debug_marker_set_object_tag_ext(
&self,
tag_info: &DebugMarkerObjectTagInfoEXT,
) -> crate::VkResult<()> {
let __result = (self.commands().debug_marker_set_object_tag_ext)(self.handle(), tag_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl ExtDebugMarkerExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_EXT_debug_utils`")]
pub trait ExtDebugReportExtension: InstanceV1_0 {
#[inline]
fn create_debug_report_callback_ext(
&self,
create_info: &DebugReportCallbackCreateInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DebugReportCallbackEXT> {
let mut callback = MaybeUninit::<DebugReportCallbackEXT>::uninit();
let __result = (self.commands().create_debug_report_callback_ext)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
callback.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { callback.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn debug_report_message_ext(
&self,
flags: DebugReportFlagsEXT,
object_type: DebugReportObjectTypeEXT,
object: u64,
location: usize,
message_code: i32,
layer_prefix: &[u8],
message: &[u8],
) {
let __result = (self.commands().debug_report_message_ext)(
self.handle(),
flags,
object_type,
object,
location,
message_code,
layer_prefix.as_ptr().cast(),
message.as_ptr().cast(),
);
}
#[inline]
fn destroy_debug_report_callback_ext(
&self,
callback: DebugReportCallbackEXT,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_debug_report_callback_ext)(
self.handle(),
callback,
allocator.map_or(ptr::null(), |v| v),
);
}
}
#[allow(deprecated)]
impl ExtDebugReportExtension for crate::Instance {}
pub trait ExtDebugUtilsExtension: InstanceV1_0 {
#[inline]
fn cmd_begin_debug_utils_label_ext(
&self,
command_buffer: CommandBuffer,
label_info: &DebugUtilsLabelEXT,
) {
let __result =
(self.commands().cmd_begin_debug_utils_label_ext)(command_buffer, label_info);
}
#[inline]
fn cmd_end_debug_utils_label_ext(&self, command_buffer: CommandBuffer) {
let __result = (self.commands().cmd_end_debug_utils_label_ext)(command_buffer);
}
#[inline]
fn cmd_insert_debug_utils_label_ext(
&self,
command_buffer: CommandBuffer,
label_info: &DebugUtilsLabelEXT,
) {
let __result =
(self.commands().cmd_insert_debug_utils_label_ext)(command_buffer, label_info);
}
#[inline]
fn create_debug_utils_messenger_ext(
&self,
create_info: &DebugUtilsMessengerCreateInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DebugUtilsMessengerEXT> {
let mut messenger = MaybeUninit::<DebugUtilsMessengerEXT>::uninit();
let __result = (self.commands().create_debug_utils_messenger_ext)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
messenger.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { messenger.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_debug_utils_messenger_ext(
&self,
messenger: DebugUtilsMessengerEXT,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_debug_utils_messenger_ext)(
self.handle(),
messenger,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn queue_begin_debug_utils_label_ext(&self, queue: Queue, label_info: &DebugUtilsLabelEXT) {
let __result = (self.commands().queue_begin_debug_utils_label_ext)(queue, label_info);
}
#[inline]
fn queue_end_debug_utils_label_ext(&self, queue: Queue) {
let __result = (self.commands().queue_end_debug_utils_label_ext)(queue);
}
#[inline]
fn queue_insert_debug_utils_label_ext(&self, queue: Queue, label_info: &DebugUtilsLabelEXT) {
let __result = (self.commands().queue_insert_debug_utils_label_ext)(queue, label_info);
}
#[inline]
fn set_debug_utils_object_name_ext(
&self,
device: Device,
name_info: &DebugUtilsObjectNameInfoEXT,
) -> crate::VkResult<()> {
let __result = (self.commands().set_debug_utils_object_name_ext)(device, name_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn set_debug_utils_object_tag_ext(
&self,
device: Device,
tag_info: &DebugUtilsObjectTagInfoEXT,
) -> crate::VkResult<()> {
let __result = (self.commands().set_debug_utils_object_tag_ext)(device, tag_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn submit_debug_utils_message_ext(
&self,
message_severity: DebugUtilsMessageSeverityFlagsEXT,
message_types: DebugUtilsMessageTypeFlagsEXT,
callback_data: &DebugUtilsMessengerCallbackDataEXT,
) {
let __result = (self.commands().submit_debug_utils_message_ext)(
self.handle(),
message_severity,
message_types,
callback_data,
);
}
}
impl ExtDebugUtilsExtension for crate::Instance {}
pub trait ExtDepthClipEnableExtension: DeviceV1_0 {}
impl ExtDepthClipEnableExtension for crate::Device {}
pub trait ExtDepthRangeUnrestrictedExtension: DeviceV1_0 {}
impl ExtDepthRangeUnrestrictedExtension for crate::Device {}
pub trait ExtDescriptorIndexingExtension: DeviceV1_0 {}
impl ExtDescriptorIndexingExtension for crate::Device {}
pub trait ExtDeviceMemoryReportExtension: DeviceV1_0 {}
impl ExtDeviceMemoryReportExtension for crate::Device {}
pub trait ExtDirectModeDisplayExtension: InstanceV1_0 {
#[inline]
fn release_display_ext(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
) -> crate::VkResult<()> {
let __result = (self.commands().release_display_ext)(physical_device, display);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl ExtDirectModeDisplayExtension for crate::Instance {}
pub trait ExtDirectfbSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_direct_fb_surface_ext(
&self,
create_info: &DirectFBSurfaceCreateInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_direct_fb_surface_ext)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_direct_fb_presentation_support_ext<T_IDirectFB>(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
) -> T_IDirectFB {
let mut dfb = MaybeUninit::<T_IDirectFB>::uninit();
let __result = (self
.commands()
.get_physical_device_direct_fb_presentation_support_ext)(
physical_device,
queue_family_index,
dfb.as_mut_ptr().cast::<c_void>(),
);
unsafe { dfb.assume_init() }
}
}
impl ExtDirectfbSurfaceExtension for crate::Instance {}
pub trait ExtDiscardRectanglesExtension: DeviceV1_0 {
#[inline]
fn cmd_set_discard_rectangle_ext(
&self,
command_buffer: CommandBuffer,
first_discard_rectangle: u32,
discard_rectangles: &[impl Cast<Target = Rect2D>],
) {
let __result = (self.commands().cmd_set_discard_rectangle_ext)(
command_buffer,
first_discard_rectangle,
discard_rectangles.len() as u32,
discard_rectangles.as_ptr().cast(),
);
}
}
impl ExtDiscardRectanglesExtension for crate::Device {}
pub trait ExtDisplayControlExtension: DeviceV1_0 {
#[inline]
fn display_power_control_ext(
&self,
display: DisplayKHR,
display_power_info: &DisplayPowerInfoEXT,
) -> crate::VkResult<()> {
let __result =
(self.commands().display_power_control_ext)(self.handle(), display, display_power_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn get_swapchain_counter_ext(
&self,
swapchain: SwapchainKHR,
counter: SurfaceCounterFlagsEXT,
) -> crate::VkResult<u64> {
let mut counter_value = MaybeUninit::<u64>::uninit();
let __result = (self.commands().get_swapchain_counter_ext)(
self.handle(),
swapchain,
counter,
counter_value.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { counter_value.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn register_device_event_ext(
&self,
device_event_info: &DeviceEventInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Fence> {
let mut fence = MaybeUninit::<Fence>::uninit();
let __result = (self.commands().register_device_event_ext)(
self.handle(),
device_event_info,
allocator.map_or(ptr::null(), |v| v),
fence.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { fence.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn register_display_event_ext(
&self,
display: DisplayKHR,
display_event_info: &DisplayEventInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Fence> {
let mut fence = MaybeUninit::<Fence>::uninit();
let __result = (self.commands().register_display_event_ext)(
self.handle(),
display,
display_event_info,
allocator.map_or(ptr::null(), |v| v),
fence.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { fence.assume_init() })
} else {
Err(__result)
}
}
}
impl ExtDisplayControlExtension for crate::Device {}
pub trait ExtDisplaySurfaceCounterExtension: InstanceV1_0 {
#[inline]
fn get_physical_device_surface_capabilities2_ext(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
) -> crate::VkResult<SurfaceCapabilities2EXT> {
let mut surface_capabilities = MaybeUninit::<SurfaceCapabilities2EXT>::uninit();
let __result = (self
.commands()
.get_physical_device_surface_capabilities2_ext)(
physical_device,
surface,
surface_capabilities.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface_capabilities.assume_init() })
} else {
Err(__result)
}
}
}
impl ExtDisplaySurfaceCounterExtension for crate::Instance {}
pub trait ExtExtendedDynamicStateExtension: DeviceV1_0 {
#[inline]
fn cmd_bind_vertex_buffers2_ext(
&self,
command_buffer: CommandBuffer,
first_binding: u32,
buffers: &[Buffer],
offsets: &[DeviceSize],
sizes: &[DeviceSize],
strides: &[DeviceSize],
) {
let __result = (self.commands().cmd_bind_vertex_buffers2_ext)(
command_buffer,
first_binding,
buffers.len() as u32,
buffers.as_ptr(),
offsets.as_ptr(),
sizes.as_ptr(),
strides.as_ptr(),
);
}
#[inline]
fn cmd_set_cull_mode_ext(&self, command_buffer: CommandBuffer, cull_mode: CullModeFlags) {
let __result = (self.commands().cmd_set_cull_mode_ext)(command_buffer, cull_mode);
}
#[inline]
fn cmd_set_depth_bounds_test_enable_ext(
&self,
command_buffer: CommandBuffer,
depth_bounds_test_enable: bool,
) {
let __result = (self.commands().cmd_set_depth_bounds_test_enable_ext)(
command_buffer,
if depth_bounds_test_enable {
TRUE
} else {
FALSE
},
);
}
#[inline]
fn cmd_set_depth_compare_op_ext(
&self,
command_buffer: CommandBuffer,
depth_compare_op: CompareOp,
) {
let __result =
(self.commands().cmd_set_depth_compare_op_ext)(command_buffer, depth_compare_op);
}
#[inline]
fn cmd_set_depth_test_enable_ext(
&self,
command_buffer: CommandBuffer,
depth_test_enable: bool,
) {
let __result = (self.commands().cmd_set_depth_test_enable_ext)(
command_buffer,
if depth_test_enable { TRUE } else { FALSE },
);
}
#[inline]
fn cmd_set_depth_write_enable_ext(
&self,
command_buffer: CommandBuffer,
depth_write_enable: bool,
) {
let __result = (self.commands().cmd_set_depth_write_enable_ext)(
command_buffer,
if depth_write_enable { TRUE } else { FALSE },
);
}
#[inline]
fn cmd_set_front_face_ext(&self, command_buffer: CommandBuffer, front_face: FrontFace) {
let __result = (self.commands().cmd_set_front_face_ext)(command_buffer, front_face);
}
#[inline]
fn cmd_set_primitive_topology_ext(
&self,
command_buffer: CommandBuffer,
primitive_topology: PrimitiveTopology,
) {
let __result =
(self.commands().cmd_set_primitive_topology_ext)(command_buffer, primitive_topology);
}
#[inline]
fn cmd_set_scissor_with_count_ext(
&self,
command_buffer: CommandBuffer,
scissors: &[impl Cast<Target = Rect2D>],
) {
let __result = (self.commands().cmd_set_scissor_with_count_ext)(
command_buffer,
scissors.len() as u32,
scissors.as_ptr().cast(),
);
}
#[inline]
fn cmd_set_stencil_op_ext(
&self,
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
fail_op: StencilOp,
pass_op: StencilOp,
depth_fail_op: StencilOp,
compare_op: CompareOp,
) {
let __result = (self.commands().cmd_set_stencil_op_ext)(
command_buffer,
face_mask,
fail_op,
pass_op,
depth_fail_op,
compare_op,
);
}
#[inline]
fn cmd_set_stencil_test_enable_ext(
&self,
command_buffer: CommandBuffer,
stencil_test_enable: bool,
) {
let __result = (self.commands().cmd_set_stencil_test_enable_ext)(
command_buffer,
if stencil_test_enable { TRUE } else { FALSE },
);
}
#[inline]
fn cmd_set_viewport_with_count_ext(
&self,
command_buffer: CommandBuffer,
viewports: &[impl Cast<Target = Viewport>],
) {
let __result = (self.commands().cmd_set_viewport_with_count_ext)(
command_buffer,
viewports.len() as u32,
viewports.as_ptr().cast(),
);
}
}
impl ExtExtendedDynamicStateExtension for crate::Device {}
pub trait ExtExternalMemoryDmaBufExtension: DeviceV1_0 {}
impl ExtExternalMemoryDmaBufExtension for crate::Device {}
pub trait ExtExternalMemoryHostExtension: DeviceV1_0 {
#[inline]
fn get_memory_host_pointer_properties_ext(
&self,
handle_type: ExternalMemoryHandleTypeFlags,
host_pointer: &c_void,
) -> crate::VkResult<MemoryHostPointerPropertiesEXT> {
let mut memory_host_pointer_properties =
MaybeUninit::<MemoryHostPointerPropertiesEXT>::uninit();
let __result = (self.commands().get_memory_host_pointer_properties_ext)(
self.handle(),
handle_type,
host_pointer,
memory_host_pointer_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { memory_host_pointer_properties.assume_init() })
} else {
Err(__result)
}
}
}
impl ExtExternalMemoryHostExtension for crate::Device {}
pub trait ExtFilterCubicExtension: DeviceV1_0 {}
impl ExtFilterCubicExtension for crate::Device {}
pub trait ExtFragmentDensityMapExtension: DeviceV1_0 {}
impl ExtFragmentDensityMapExtension for crate::Device {}
pub trait ExtFragmentDensityMap2Extension: DeviceV1_0 {}
impl ExtFragmentDensityMap2Extension for crate::Device {}
pub trait ExtFragmentShaderInterlockExtension: DeviceV1_0 {}
impl ExtFragmentShaderInterlockExtension for crate::Device {}
pub trait ExtFullScreenExclusiveExtension: DeviceV1_0 {
#[inline]
fn acquire_full_screen_exclusive_mode_ext(
&self,
swapchain: SwapchainKHR,
) -> crate::VkResult<()> {
let __result =
(self.commands().acquire_full_screen_exclusive_mode_ext)(self.handle(), swapchain);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn get_device_group_surface_present_modes2_ext(
&self,
surface_info: &PhysicalDeviceSurfaceInfo2KHR,
) -> crate::VkResult<DeviceGroupPresentModeFlagsKHR> {
let mut modes = MaybeUninit::<DeviceGroupPresentModeFlagsKHR>::uninit();
let __result = (self.commands().get_device_group_surface_present_modes2_ext)(
self.handle(),
surface_info,
modes.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { modes.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_surface_present_modes2_ext(
&self,
physical_device: PhysicalDevice,
surface_info: &PhysicalDeviceSurfaceInfo2KHR,
) -> crate::VkResult<Vec<PresentModeKHR>> {
let mut present_mode_count = 0;
(self
.commands()
.get_physical_device_surface_present_modes2_ext)(
physical_device,
surface_info,
&mut present_mode_count,
ptr::null_mut(),
);
let mut present_modes = Vec::with_capacity(present_mode_count as usize);
let __result = (self
.commands()
.get_physical_device_surface_present_modes2_ext)(
physical_device,
surface_info,
&mut present_mode_count,
present_modes.as_mut_ptr(),
);
debug_assert!(present_modes.capacity() == present_mode_count as usize);
unsafe { present_modes.set_len(present_mode_count as usize) };
if __result == Result::SUCCESS {
Ok(present_modes)
} else {
Err(__result)
}
}
#[inline]
fn release_full_screen_exclusive_mode_ext(
&self,
swapchain: SwapchainKHR,
) -> crate::VkResult<()> {
let __result =
(self.commands().release_full_screen_exclusive_mode_ext)(self.handle(), swapchain);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl ExtFullScreenExclusiveExtension for crate::Device {}
pub trait ExtGlobalPriorityExtension: DeviceV1_0 {}
impl ExtGlobalPriorityExtension for crate::Device {}
pub trait ExtHdrMetadataExtension: DeviceV1_0 {
#[inline]
fn set_hdr_metadata_ext(
&self,
swapchains: &[SwapchainKHR],
metadata: &[impl Cast<Target = HdrMetadataEXT>],
) {
let __result = (self.commands().set_hdr_metadata_ext)(
self.handle(),
swapchains.len() as u32,
swapchains.as_ptr(),
metadata.as_ptr().cast(),
);
}
}
impl ExtHdrMetadataExtension for crate::Device {}
pub trait ExtHeadlessSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_headless_surface_ext(
&self,
create_info: &HeadlessSurfaceCreateInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_headless_surface_ext)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
impl ExtHeadlessSurfaceExtension for crate::Instance {}
pub trait ExtHostQueryResetExtension: DeviceV1_0 {
#[inline]
fn reset_query_pool_ext(&self, query_pool: QueryPool, first_query: u32, query_count: u32) {
let __result = (self.commands().reset_query_pool_ext)(
self.handle(),
query_pool,
first_query,
query_count,
);
}
}
impl ExtHostQueryResetExtension for crate::Device {}
pub trait ExtImageDrmFormatModifierExtension: DeviceV1_0 {
#[inline]
fn get_image_drm_format_modifier_properties_ext(
&self,
image: Image,
) -> crate::VkResult<ImageDrmFormatModifierPropertiesEXT> {
let mut properties = MaybeUninit::<ImageDrmFormatModifierPropertiesEXT>::uninit();
let __result = (self.commands().get_image_drm_format_modifier_properties_ext)(
self.handle(),
image,
properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { properties.assume_init() })
} else {
Err(__result)
}
}
}
impl ExtImageDrmFormatModifierExtension for crate::Device {}
pub trait ExtImageRobustnessExtension: DeviceV1_0 {}
impl ExtImageRobustnessExtension for crate::Device {}
pub trait ExtIndexTypeUint8Extension: DeviceV1_0 {}
impl ExtIndexTypeUint8Extension for crate::Device {}
pub trait ExtInlineUniformBlockExtension: DeviceV1_0 {}
impl ExtInlineUniformBlockExtension for crate::Device {}
pub trait ExtLineRasterizationExtension: DeviceV1_0 {
#[inline]
fn cmd_set_line_stipple_ext(
&self,
command_buffer: CommandBuffer,
line_stipple_factor: u32,
line_stipple_pattern: u16,
) {
let __result = (self.commands().cmd_set_line_stipple_ext)(
command_buffer,
line_stipple_factor,
line_stipple_pattern,
);
}
}
impl ExtLineRasterizationExtension for crate::Device {}
pub trait ExtMemoryBudgetExtension: DeviceV1_0 {}
impl ExtMemoryBudgetExtension for crate::Device {}
pub trait ExtMemoryPriorityExtension: DeviceV1_0 {}
impl ExtMemoryPriorityExtension for crate::Device {}
pub trait ExtMetalSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_metal_surface_ext(
&self,
create_info: &MetalSurfaceCreateInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_metal_surface_ext)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
impl ExtMetalSurfaceExtension for crate::Instance {}
pub trait ExtPciBusInfoExtension: DeviceV1_0 {}
impl ExtPciBusInfoExtension for crate::Device {}
pub trait ExtPipelineCreationCacheControlExtension: DeviceV1_0 {}
impl ExtPipelineCreationCacheControlExtension for crate::Device {}
pub trait ExtPipelineCreationFeedbackExtension: DeviceV1_0 {}
impl ExtPipelineCreationFeedbackExtension for crate::Device {}
pub trait ExtPostDepthCoverageExtension: DeviceV1_0 {}
impl ExtPostDepthCoverageExtension for crate::Device {}
pub trait ExtPrivateDataExtension: DeviceV1_0 {
#[inline]
fn create_private_data_slot_ext(
&self,
create_info: &PrivateDataSlotCreateInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<PrivateDataSlotEXT> {
let mut private_data_slot = MaybeUninit::<PrivateDataSlotEXT>::uninit();
let __result = (self.commands().create_private_data_slot_ext)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
private_data_slot.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { private_data_slot.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_private_data_slot_ext(
&self,
private_data_slot: PrivateDataSlotEXT,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_private_data_slot_ext)(
self.handle(),
private_data_slot,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_private_data_ext(
&self,
object_type: ObjectType,
object_handle: u64,
private_data_slot: PrivateDataSlotEXT,
) -> u64 {
let mut data = MaybeUninit::<u64>::uninit();
let __result = (self.commands().get_private_data_ext)(
self.handle(),
object_type,
object_handle,
private_data_slot,
data.as_mut_ptr(),
);
unsafe { data.assume_init() }
}
#[inline]
fn set_private_data_ext(
&self,
object_type: ObjectType,
object_handle: u64,
private_data_slot: PrivateDataSlotEXT,
data: u64,
) -> crate::VkResult<()> {
let __result = (self.commands().set_private_data_ext)(
self.handle(),
object_type,
object_handle,
private_data_slot,
data,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl ExtPrivateDataExtension for crate::Device {}
pub trait ExtQueueFamilyForeignExtension: DeviceV1_0 {}
impl ExtQueueFamilyForeignExtension for crate::Device {}
pub trait ExtRobustness2Extension: DeviceV1_0 {}
impl ExtRobustness2Extension for crate::Device {}
pub trait ExtSampleLocationsExtension: DeviceV1_0 {
#[inline]
fn cmd_set_sample_locations_ext(
&self,
command_buffer: CommandBuffer,
sample_locations_info: &SampleLocationsInfoEXT,
) {
let __result =
(self.commands().cmd_set_sample_locations_ext)(command_buffer, sample_locations_info);
}
#[inline]
fn get_physical_device_multisample_properties_ext(
&self,
physical_device: PhysicalDevice,
samples: SampleCountFlags,
) -> MultisamplePropertiesEXT {
let mut multisample_properties = MaybeUninit::<MultisamplePropertiesEXT>::uninit();
let __result = (self
.commands()
.get_physical_device_multisample_properties_ext)(
physical_device,
samples,
multisample_properties.as_mut_ptr(),
);
unsafe { multisample_properties.assume_init() }
}
}
impl ExtSampleLocationsExtension for crate::Device {}
pub trait ExtSamplerFilterMinmaxExtension: DeviceV1_0 {}
impl ExtSamplerFilterMinmaxExtension for crate::Device {}
pub trait ExtScalarBlockLayoutExtension: DeviceV1_0 {}
impl ExtScalarBlockLayoutExtension for crate::Device {}
pub trait ExtSeparateStencilUsageExtension: DeviceV1_0 {}
impl ExtSeparateStencilUsageExtension for crate::Device {}
pub trait ExtShaderAtomicFloatExtension: DeviceV1_0 {}
impl ExtShaderAtomicFloatExtension for crate::Device {}
pub trait ExtShaderDemoteToHelperInvocationExtension: DeviceV1_0 {}
impl ExtShaderDemoteToHelperInvocationExtension for crate::Device {}
pub trait ExtShaderImageAtomicInt64Extension: DeviceV1_0 {}
impl ExtShaderImageAtomicInt64Extension for crate::Device {}
pub trait ExtShaderStencilExportExtension: DeviceV1_0 {}
impl ExtShaderStencilExportExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_VERSION_1_2`")]
pub trait ExtShaderSubgroupBallotExtension: DeviceV1_0 {}
#[allow(deprecated)]
impl ExtShaderSubgroupBallotExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_VERSION_1_1`")]
pub trait ExtShaderSubgroupVoteExtension: DeviceV1_0 {}
#[allow(deprecated)]
impl ExtShaderSubgroupVoteExtension for crate::Device {}
pub trait ExtShaderViewportIndexLayerExtension: DeviceV1_0 {}
impl ExtShaderViewportIndexLayerExtension for crate::Device {}
pub trait ExtSubgroupSizeControlExtension: DeviceV1_0 {}
impl ExtSubgroupSizeControlExtension for crate::Device {}
pub trait ExtSwapchainColorspaceExtension: InstanceV1_0 {}
impl ExtSwapchainColorspaceExtension for crate::Instance {}
pub trait ExtTexelBufferAlignmentExtension: DeviceV1_0 {}
impl ExtTexelBufferAlignmentExtension for crate::Device {}
pub trait ExtTextureCompressionAstcHdrExtension: DeviceV1_0 {}
impl ExtTextureCompressionAstcHdrExtension for crate::Device {}
pub trait ExtToolingInfoExtension: DeviceV1_0 {
#[inline]
fn get_physical_device_tool_properties_ext(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<PhysicalDeviceToolPropertiesEXT>> {
let mut tool_count = 0;
(self.commands().get_physical_device_tool_properties_ext)(
physical_device,
&mut tool_count,
ptr::null_mut(),
);
let mut tool_properties = Vec::with_capacity(tool_count as usize);
let __result = (self.commands().get_physical_device_tool_properties_ext)(
physical_device,
&mut tool_count,
tool_properties.as_mut_ptr(),
);
debug_assert!(tool_properties.capacity() == tool_count as usize);
unsafe { tool_properties.set_len(tool_count as usize) };
if __result == Result::SUCCESS {
Ok(tool_properties)
} else {
Err(__result)
}
}
}
impl ExtToolingInfoExtension for crate::Device {}
pub trait ExtTransformFeedbackExtension: DeviceV1_0 {
#[inline]
fn cmd_begin_query_indexed_ext(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
index: u32,
) {
let __result = (self.commands().cmd_begin_query_indexed_ext)(
command_buffer,
query_pool,
query,
flags,
index,
);
}
#[inline]
fn cmd_begin_transform_feedback_ext(
&self,
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffers: &[Buffer],
counter_buffer_offsets: &[DeviceSize],
) {
let __result = (self.commands().cmd_begin_transform_feedback_ext)(
command_buffer,
first_counter_buffer,
counter_buffers.len() as u32,
counter_buffers.as_ptr(),
counter_buffer_offsets.as_ptr(),
);
}
#[inline]
fn cmd_bind_transform_feedback_buffers_ext(
&self,
command_buffer: CommandBuffer,
first_binding: u32,
buffers: &[Buffer],
offsets: &[DeviceSize],
sizes: &[DeviceSize],
) {
let __result = (self.commands().cmd_bind_transform_feedback_buffers_ext)(
command_buffer,
first_binding,
buffers.len() as u32,
buffers.as_ptr(),
offsets.as_ptr(),
sizes.as_ptr(),
);
}
#[inline]
fn cmd_draw_indirect_byte_count_ext(
&self,
command_buffer: CommandBuffer,
instance_count: u32,
first_instance: u32,
counter_buffer: Buffer,
counter_buffer_offset: DeviceSize,
counter_offset: u32,
vertex_stride: u32,
) {
let __result = (self.commands().cmd_draw_indirect_byte_count_ext)(
command_buffer,
instance_count,
first_instance,
counter_buffer,
counter_buffer_offset,
counter_offset,
vertex_stride,
);
}
#[inline]
fn cmd_end_query_indexed_ext(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
index: u32,
) {
let __result =
(self.commands().cmd_end_query_indexed_ext)(command_buffer, query_pool, query, index);
}
#[inline]
fn cmd_end_transform_feedback_ext(
&self,
command_buffer: CommandBuffer,
first_counter_buffer: u32,
counter_buffers: &[Buffer],
counter_buffer_offsets: &[DeviceSize],
) {
let __result = (self.commands().cmd_end_transform_feedback_ext)(
command_buffer,
first_counter_buffer,
counter_buffers.len() as u32,
counter_buffers.as_ptr(),
counter_buffer_offsets.as_ptr(),
);
}
}
impl ExtTransformFeedbackExtension for crate::Device {}
pub trait ExtValidationCacheExtension: DeviceV1_0 {
#[inline]
fn create_validation_cache_ext(
&self,
create_info: &ValidationCacheCreateInfoEXT,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<ValidationCacheEXT> {
let mut validation_cache = MaybeUninit::<ValidationCacheEXT>::uninit();
let __result = (self.commands().create_validation_cache_ext)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
validation_cache.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { validation_cache.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_validation_cache_ext(
&self,
validation_cache: ValidationCacheEXT,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_validation_cache_ext)(
self.handle(),
validation_cache,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_validation_cache_data_ext(
&self,
validation_cache: ValidationCacheEXT,
) -> crate::VkResult<Vec<c_void>> {
let mut data_size = 0;
(self.commands().get_validation_cache_data_ext)(
self.handle(),
validation_cache,
&mut data_size,
ptr::null_mut(),
);
let mut data = Vec::with_capacity(data_size as usize);
let __result = (self.commands().get_validation_cache_data_ext)(
self.handle(),
validation_cache,
&mut data_size,
data.as_mut_ptr(),
);
debug_assert!(data.capacity() == data_size as usize);
unsafe { data.set_len(data_size as usize) };
if __result == Result::SUCCESS {
Ok(data)
} else {
Err(__result)
}
}
#[inline]
fn merge_validation_caches_ext(
&self,
dst_cache: ValidationCacheEXT,
src_caches: &[ValidationCacheEXT],
) -> crate::VkResult<()> {
let __result = (self.commands().merge_validation_caches_ext)(
self.handle(),
dst_cache,
src_caches.len() as u32,
src_caches.as_ptr(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl ExtValidationCacheExtension for crate::Device {}
pub trait ExtValidationFeaturesExtension: InstanceV1_0 {}
impl ExtValidationFeaturesExtension for crate::Instance {}
#[deprecated(note = "deprecated in favor of `VK_EXT_validation_features`")]
pub trait ExtValidationFlagsExtension: InstanceV1_0 {}
#[allow(deprecated)]
impl ExtValidationFlagsExtension for crate::Instance {}
pub trait ExtVertexAttributeDivisorExtension: DeviceV1_0 {}
impl ExtVertexAttributeDivisorExtension for crate::Device {}
pub trait ExtYcbcrImageArraysExtension: DeviceV1_0 {}
impl ExtYcbcrImageArraysExtension for crate::Device {}
pub trait FuchsiaImagepipeSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_image_pipe_surface_fuchsia(
&self,
create_info: &ImagePipeSurfaceCreateInfoFUCHSIA,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_image_pipe_surface_fuchsia)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
impl FuchsiaImagepipeSurfaceExtension for crate::Instance {}
pub trait GgpFrameTokenExtension: DeviceV1_0 {}
impl GgpFrameTokenExtension for crate::Device {}
pub trait GgpStreamDescriptorSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_stream_descriptor_surface_ggp(
&self,
create_info: &StreamDescriptorSurfaceCreateInfoGGP,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_stream_descriptor_surface_ggp)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
impl GgpStreamDescriptorSurfaceExtension for crate::Instance {}
pub trait GoogleDecorateStringExtension: DeviceV1_0 {}
impl GoogleDecorateStringExtension for crate::Device {}
pub trait GoogleDisplayTimingExtension: DeviceV1_0 {
#[inline]
fn get_past_presentation_timing_google(
&self,
swapchain: SwapchainKHR,
) -> crate::VkResult<Vec<PastPresentationTimingGOOGLE>> {
let mut presentation_timing_count = 0;
(self.commands().get_past_presentation_timing_google)(
self.handle(),
swapchain,
&mut presentation_timing_count,
ptr::null_mut(),
);
let mut presentation_timings = Vec::with_capacity(presentation_timing_count as usize);
let __result = (self.commands().get_past_presentation_timing_google)(
self.handle(),
swapchain,
&mut presentation_timing_count,
presentation_timings.as_mut_ptr(),
);
debug_assert!(presentation_timings.capacity() == presentation_timing_count as usize);
unsafe { presentation_timings.set_len(presentation_timing_count as usize) };
if __result == Result::SUCCESS {
Ok(presentation_timings)
} else {
Err(__result)
}
}
#[inline]
fn get_refresh_cycle_duration_google(
&self,
swapchain: SwapchainKHR,
) -> crate::VkResult<RefreshCycleDurationGOOGLE> {
let mut display_timing_properties = MaybeUninit::<RefreshCycleDurationGOOGLE>::uninit();
let __result = (self.commands().get_refresh_cycle_duration_google)(
self.handle(),
swapchain,
display_timing_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { display_timing_properties.assume_init() })
} else {
Err(__result)
}
}
}
impl GoogleDisplayTimingExtension for crate::Device {}
pub trait GoogleHlslFunctionality1Extension: DeviceV1_0 {}
impl GoogleHlslFunctionality1Extension for crate::Device {}
pub trait GoogleUserTypeExtension: DeviceV1_0 {}
impl GoogleUserTypeExtension for crate::Device {}
pub trait ImgFilterCubicExtension: DeviceV1_0 {}
impl ImgFilterCubicExtension for crate::Device {}
pub trait ImgFormatPvrtcExtension: DeviceV1_0 {}
impl ImgFormatPvrtcExtension for crate::Device {}
pub trait IntelPerformanceQueryExtension: DeviceV1_0 {
#[inline]
fn acquire_performance_configuration_intel(
&self,
acquire_info: &PerformanceConfigurationAcquireInfoINTEL,
) -> crate::VkResult<PerformanceConfigurationINTEL> {
let mut configuration = MaybeUninit::<PerformanceConfigurationINTEL>::uninit();
let __result = (self.commands().acquire_performance_configuration_intel)(
self.handle(),
acquire_info,
configuration.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { configuration.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn cmd_set_performance_marker_intel(
&self,
command_buffer: CommandBuffer,
marker_info: &PerformanceMarkerInfoINTEL,
) -> crate::VkResult<()> {
let __result =
(self.commands().cmd_set_performance_marker_intel)(command_buffer, marker_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn cmd_set_performance_override_intel(
&self,
command_buffer: CommandBuffer,
override_info: &PerformanceOverrideInfoINTEL,
) -> crate::VkResult<()> {
let __result =
(self.commands().cmd_set_performance_override_intel)(command_buffer, override_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn cmd_set_performance_stream_marker_intel(
&self,
command_buffer: CommandBuffer,
marker_info: &PerformanceStreamMarkerInfoINTEL,
) -> crate::VkResult<()> {
let __result =
(self.commands().cmd_set_performance_stream_marker_intel)(command_buffer, marker_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn get_performance_parameter_intel(
&self,
parameter: PerformanceParameterTypeINTEL,
) -> crate::VkResult<PerformanceValueINTEL> {
let mut value = MaybeUninit::<PerformanceValueINTEL>::uninit();
let __result = (self.commands().get_performance_parameter_intel)(
self.handle(),
parameter,
value.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { value.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn initialize_performance_api_intel(
&self,
initialize_info: &InitializePerformanceApiInfoINTEL,
) -> crate::VkResult<()> {
let __result =
(self.commands().initialize_performance_api_intel)(self.handle(), initialize_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn queue_set_performance_configuration_intel(
&self,
queue: Queue,
configuration: PerformanceConfigurationINTEL,
) -> crate::VkResult<()> {
let __result =
(self.commands().queue_set_performance_configuration_intel)(queue, configuration);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn release_performance_configuration_intel(
&self,
configuration: PerformanceConfigurationINTEL,
) -> crate::VkResult<()> {
let __result =
(self.commands().release_performance_configuration_intel)(self.handle(), configuration);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn uninitialize_performance_api_intel(&self) {
let __result = (self.commands().uninitialize_performance_api_intel)(self.handle());
}
}
impl IntelPerformanceQueryExtension for crate::Device {}
pub trait IntelShaderIntegerFunctions2Extension: DeviceV1_0 {}
impl IntelShaderIntegerFunctions2Extension for crate::Device {}
pub trait Khr16bitStorageExtension: DeviceV1_0 {}
impl Khr16bitStorageExtension for crate::Device {}
pub trait Khr8bitStorageExtension: DeviceV1_0 {}
impl Khr8bitStorageExtension for crate::Device {}
pub trait KhrAndroidSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_android_surface_khr(
&self,
create_info: &AndroidSurfaceCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_android_surface_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
impl KhrAndroidSurfaceExtension for crate::Instance {}
pub trait KhrBindMemory2Extension: DeviceV1_0 {
#[inline]
fn bind_buffer_memory2_khr(
&self,
bind_infos: &[impl Cast<Target = BindBufferMemoryInfo>],
) -> crate::VkResult<()> {
let __result = (self.commands().bind_buffer_memory2_khr)(
self.handle(),
bind_infos.len() as u32,
bind_infos.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn bind_image_memory2_khr(
&self,
bind_infos: &[impl Cast<Target = BindImageMemoryInfo>],
) -> crate::VkResult<()> {
let __result = (self.commands().bind_image_memory2_khr)(
self.handle(),
bind_infos.len() as u32,
bind_infos.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrBindMemory2Extension for crate::Device {}
pub trait KhrBufferDeviceAddressExtension: DeviceV1_0 {
#[inline]
fn get_buffer_device_address_khr(&self, info: &BufferDeviceAddressInfo) {
let __result = (self.commands().get_buffer_device_address_khr)(self.handle(), info);
}
#[inline]
fn get_buffer_opaque_capture_address_khr(&self, info: &BufferDeviceAddressInfo) {
let __result = (self.commands().get_buffer_opaque_capture_address_khr)(self.handle(), info);
}
#[inline]
fn get_device_memory_opaque_capture_address_khr(
&self,
info: &DeviceMemoryOpaqueCaptureAddressInfo,
) {
let __result =
(self.commands().get_device_memory_opaque_capture_address_khr)(self.handle(), info);
}
}
impl KhrBufferDeviceAddressExtension for crate::Device {}
pub trait KhrCopyCommands2Extension: DeviceV1_0 {
#[inline]
fn cmd_blit_image2_khr(
&self,
command_buffer: CommandBuffer,
blit_image_info: &BlitImageInfo2KHR,
) {
let __result = (self.commands().cmd_blit_image2_khr)(command_buffer, blit_image_info);
}
#[inline]
fn cmd_copy_buffer2_khr(
&self,
command_buffer: CommandBuffer,
copy_buffer_info: &CopyBufferInfo2KHR,
) {
let __result = (self.commands().cmd_copy_buffer2_khr)(command_buffer, copy_buffer_info);
}
#[inline]
fn cmd_copy_buffer_to_image2_khr(
&self,
command_buffer: CommandBuffer,
copy_buffer_to_image_info: &CopyBufferToImageInfo2KHR,
) {
let __result = (self.commands().cmd_copy_buffer_to_image2_khr)(
command_buffer,
copy_buffer_to_image_info,
);
}
#[inline]
fn cmd_copy_image2_khr(
&self,
command_buffer: CommandBuffer,
copy_image_info: &CopyImageInfo2KHR,
) {
let __result = (self.commands().cmd_copy_image2_khr)(command_buffer, copy_image_info);
}
#[inline]
fn cmd_copy_image_to_buffer2_khr(
&self,
command_buffer: CommandBuffer,
copy_image_to_buffer_info: &CopyImageToBufferInfo2KHR,
) {
let __result = (self.commands().cmd_copy_image_to_buffer2_khr)(
command_buffer,
copy_image_to_buffer_info,
);
}
#[inline]
fn cmd_resolve_image2_khr(
&self,
command_buffer: CommandBuffer,
resolve_image_info: &ResolveImageInfo2KHR,
) {
let __result = (self.commands().cmd_resolve_image2_khr)(command_buffer, resolve_image_info);
}
}
impl KhrCopyCommands2Extension for crate::Device {}
pub trait KhrCreateRenderpass2Extension: DeviceV1_0 {
#[inline]
fn cmd_begin_render_pass2_khr(
&self,
command_buffer: CommandBuffer,
render_pass_begin: &RenderPassBeginInfo,
subpass_begin_info: &SubpassBeginInfo,
) {
let __result = (self.commands().cmd_begin_render_pass2_khr)(
command_buffer,
render_pass_begin,
subpass_begin_info,
);
}
#[inline]
fn cmd_end_render_pass2_khr(
&self,
command_buffer: CommandBuffer,
subpass_end_info: &SubpassEndInfo,
) {
let __result = (self.commands().cmd_end_render_pass2_khr)(command_buffer, subpass_end_info);
}
#[inline]
fn cmd_next_subpass2_khr(
&self,
command_buffer: CommandBuffer,
subpass_begin_info: &SubpassBeginInfo,
subpass_end_info: &SubpassEndInfo,
) {
let __result = (self.commands().cmd_next_subpass2_khr)(
command_buffer,
subpass_begin_info,
subpass_end_info,
);
}
#[inline]
fn create_render_pass2_khr(
&self,
create_info: &RenderPassCreateInfo2,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<RenderPass> {
let mut render_pass = MaybeUninit::<RenderPass>::uninit();
let __result = (self.commands().create_render_pass2_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
render_pass.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { render_pass.assume_init() })
} else {
Err(__result)
}
}
}
impl KhrCreateRenderpass2Extension for crate::Device {}
pub trait KhrDedicatedAllocationExtension: DeviceV1_0 {}
impl KhrDedicatedAllocationExtension for crate::Device {}
pub trait KhrDeferredHostOperationsExtension: DeviceV1_0 {
#[inline]
fn create_deferred_operation_khr(
&self,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DeferredOperationKHR> {
let mut deferred_operation = MaybeUninit::<DeferredOperationKHR>::uninit();
let __result = (self.commands().create_deferred_operation_khr)(
self.handle(),
allocator.map_or(ptr::null(), |v| v),
deferred_operation.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { deferred_operation.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn deferred_operation_join_khr(&self, operation: DeferredOperationKHR) -> crate::VkResult<()> {
let __result = (self.commands().deferred_operation_join_khr)(self.handle(), operation);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn destroy_deferred_operation_khr(
&self,
operation: DeferredOperationKHR,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_deferred_operation_khr)(
self.handle(),
operation,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_deferred_operation_max_concurrency_khr(&self, operation: DeferredOperationKHR) {
let __result =
(self.commands().get_deferred_operation_max_concurrency_khr)(self.handle(), operation);
}
#[inline]
fn get_deferred_operation_result_khr(
&self,
operation: DeferredOperationKHR,
) -> crate::VkResult<()> {
let __result =
(self.commands().get_deferred_operation_result_khr)(self.handle(), operation);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrDeferredHostOperationsExtension for crate::Device {}
pub trait KhrDepthStencilResolveExtension: DeviceV1_0 {}
impl KhrDepthStencilResolveExtension for crate::Device {}
pub trait KhrDescriptorUpdateTemplateExtension: DeviceV1_0 {
#[inline]
fn cmd_push_descriptor_set_with_template_khr(
&self,
command_buffer: CommandBuffer,
descriptor_update_template: DescriptorUpdateTemplate,
layout: PipelineLayout,
set: u32,
data: &c_void,
) {
let __result = (self.commands().cmd_push_descriptor_set_with_template_khr)(
command_buffer,
descriptor_update_template,
layout,
set,
data,
);
}
#[inline]
fn create_descriptor_update_template_khr(
&self,
create_info: &DescriptorUpdateTemplateCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DescriptorUpdateTemplate> {
let mut descriptor_update_template = MaybeUninit::<DescriptorUpdateTemplate>::uninit();
let __result = (self.commands().create_descriptor_update_template_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
descriptor_update_template.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { descriptor_update_template.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_descriptor_update_template_khr(
&self,
descriptor_update_template: DescriptorUpdateTemplate,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_descriptor_update_template_khr)(
self.handle(),
descriptor_update_template,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn update_descriptor_set_with_template_khr(
&self,
descriptor_set: DescriptorSet,
descriptor_update_template: DescriptorUpdateTemplate,
data: &c_void,
) {
let __result = (self.commands().update_descriptor_set_with_template_khr)(
self.handle(),
descriptor_set,
descriptor_update_template,
data,
);
}
}
impl KhrDescriptorUpdateTemplateExtension for crate::Device {}
pub trait KhrDeviceGroupExtension: DeviceV1_0 {
#[inline]
fn acquire_next_image2_khr(
&self,
acquire_info: &AcquireNextImageInfoKHR,
) -> crate::VkResult<u32> {
let mut image_index = MaybeUninit::<u32>::uninit();
let __result = (self.commands().acquire_next_image2_khr)(
self.handle(),
acquire_info,
image_index.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { image_index.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn cmd_dispatch_base_khr(
&self,
command_buffer: CommandBuffer,
base_group_x: u32,
base_group_y: u32,
base_group_z: u32,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) {
let __result = (self.commands().cmd_dispatch_base_khr)(
command_buffer,
base_group_x,
base_group_y,
base_group_z,
group_count_x,
group_count_y,
group_count_z,
);
}
#[inline]
fn cmd_set_device_mask_khr(&self, command_buffer: CommandBuffer, device_mask: u32) {
let __result = (self.commands().cmd_set_device_mask_khr)(command_buffer, device_mask);
}
#[inline]
fn get_device_group_peer_memory_features_khr(
&self,
heap_index: u32,
local_device_index: u32,
remote_device_index: u32,
) -> PeerMemoryFeatureFlags {
let mut peer_memory_features = MaybeUninit::<PeerMemoryFeatureFlags>::uninit();
let __result = (self.commands().get_device_group_peer_memory_features_khr)(
self.handle(),
heap_index,
local_device_index,
remote_device_index,
peer_memory_features.as_mut_ptr(),
);
unsafe { peer_memory_features.assume_init() }
}
#[inline]
fn get_device_group_present_capabilities_khr(
&self,
) -> crate::VkResult<DeviceGroupPresentCapabilitiesKHR> {
let mut device_group_present_capabilities =
MaybeUninit::<DeviceGroupPresentCapabilitiesKHR>::uninit();
let __result = (self.commands().get_device_group_present_capabilities_khr)(
self.handle(),
device_group_present_capabilities.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { device_group_present_capabilities.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_device_group_surface_present_modes_khr(
&self,
surface: SurfaceKHR,
) -> crate::VkResult<DeviceGroupPresentModeFlagsKHR> {
let mut modes = MaybeUninit::<DeviceGroupPresentModeFlagsKHR>::uninit();
let __result = (self.commands().get_device_group_surface_present_modes_khr)(
self.handle(),
surface,
modes.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { modes.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_present_rectangles_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
) -> crate::VkResult<Vec<Rect2D>> {
let mut rect_count = 0;
(self.commands().get_physical_device_present_rectangles_khr)(
physical_device,
surface,
&mut rect_count,
ptr::null_mut(),
);
let mut rects = Vec::with_capacity(rect_count as usize);
let __result = (self.commands().get_physical_device_present_rectangles_khr)(
physical_device,
surface,
&mut rect_count,
rects.as_mut_ptr(),
);
debug_assert!(rects.capacity() == rect_count as usize);
unsafe { rects.set_len(rect_count as usize) };
if __result == Result::SUCCESS {
Ok(rects)
} else {
Err(__result)
}
}
}
impl KhrDeviceGroupExtension for crate::Device {}
pub trait KhrDeviceGroupCreationExtension: InstanceV1_0 {
#[inline]
fn enumerate_physical_device_groups_khr(
&self,
) -> crate::VkResult<Vec<PhysicalDeviceGroupProperties>> {
let mut physical_device_group_count = 0;
(self.commands().enumerate_physical_device_groups_khr)(
self.handle(),
&mut physical_device_group_count,
ptr::null_mut(),
);
let mut physical_device_group_properties =
Vec::with_capacity(physical_device_group_count as usize);
let __result = (self.commands().enumerate_physical_device_groups_khr)(
self.handle(),
&mut physical_device_group_count,
physical_device_group_properties.as_mut_ptr(),
);
debug_assert!(
physical_device_group_properties.capacity() == physical_device_group_count as usize
);
unsafe { physical_device_group_properties.set_len(physical_device_group_count as usize) };
if __result == Result::SUCCESS {
Ok(physical_device_group_properties)
} else {
Err(__result)
}
}
}
impl KhrDeviceGroupCreationExtension for crate::Instance {}
pub trait KhrDisplayExtension: InstanceV1_0 {
#[inline]
fn create_display_mode_khr(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
create_info: &DisplayModeCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DisplayModeKHR> {
let mut mode = MaybeUninit::<DisplayModeKHR>::uninit();
let __result = (self.commands().create_display_mode_khr)(
physical_device,
display,
create_info,
allocator.map_or(ptr::null(), |v| v),
mode.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { mode.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn create_display_plane_surface_khr(
&self,
create_info: &DisplaySurfaceCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_display_plane_surface_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_display_mode_properties_khr(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
) -> crate::VkResult<Vec<DisplayModePropertiesKHR>> {
let mut property_count = 0;
(self.commands().get_display_mode_properties_khr)(
physical_device,
display,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().get_display_mode_properties_khr)(
physical_device,
display,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
#[inline]
fn get_display_plane_capabilities_khr(
&self,
physical_device: PhysicalDevice,
mode: DisplayModeKHR,
plane_index: u32,
) -> crate::VkResult<DisplayPlaneCapabilitiesKHR> {
let mut capabilities = MaybeUninit::<DisplayPlaneCapabilitiesKHR>::uninit();
let __result = (self.commands().get_display_plane_capabilities_khr)(
physical_device,
mode,
plane_index,
capabilities.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { capabilities.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_display_plane_supported_displays_khr(
&self,
physical_device: PhysicalDevice,
plane_index: u32,
) -> crate::VkResult<Vec<DisplayKHR>> {
let mut display_count = 0;
(self.commands().get_display_plane_supported_displays_khr)(
physical_device,
plane_index,
&mut display_count,
ptr::null_mut(),
);
let mut displays = Vec::with_capacity(display_count as usize);
let __result = (self.commands().get_display_plane_supported_displays_khr)(
physical_device,
plane_index,
&mut display_count,
displays.as_mut_ptr(),
);
debug_assert!(displays.capacity() == display_count as usize);
unsafe { displays.set_len(display_count as usize) };
if __result == Result::SUCCESS {
Ok(displays)
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_display_plane_properties_khr(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<DisplayPlanePropertiesKHR>> {
let mut property_count = 0;
(self
.commands()
.get_physical_device_display_plane_properties_khr)(
physical_device,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self
.commands()
.get_physical_device_display_plane_properties_khr)(
physical_device,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_display_properties_khr(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<DisplayPropertiesKHR>> {
let mut property_count = 0;
(self.commands().get_physical_device_display_properties_khr)(
physical_device,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().get_physical_device_display_properties_khr)(
physical_device,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
}
impl KhrDisplayExtension for crate::Instance {}
pub trait KhrDisplaySwapchainExtension: DeviceV1_0 {
#[inline]
fn create_shared_swapchains_khr(
&self,
create_infos: &[impl Cast<Target = SwapchainCreateInfoKHR>],
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SwapchainKHR> {
let mut swapchains = MaybeUninit::<SwapchainKHR>::uninit();
let __result = (self.commands().create_shared_swapchains_khr)(
self.handle(),
create_infos.len() as u32,
create_infos.as_ptr().cast(),
allocator.map_or(ptr::null(), |v| v),
swapchains.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { swapchains.assume_init() })
} else {
Err(__result)
}
}
}
impl KhrDisplaySwapchainExtension for crate::Device {}
pub trait KhrDrawIndirectCountExtension: DeviceV1_0 {
#[inline]
fn cmd_draw_indexed_indirect_count_khr(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_indexed_indirect_count_khr)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
#[inline]
fn cmd_draw_indirect_count_khr(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_indirect_count_khr)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
}
impl KhrDrawIndirectCountExtension for crate::Device {}
pub trait KhrDriverPropertiesExtension: DeviceV1_0 {}
impl KhrDriverPropertiesExtension for crate::Device {}
pub trait KhrExternalFenceExtension: DeviceV1_0 {}
impl KhrExternalFenceExtension for crate::Device {}
pub trait KhrExternalFenceCapabilitiesExtension: InstanceV1_0 {
#[inline]
fn get_physical_device_external_fence_properties_khr(
&self,
physical_device: PhysicalDevice,
external_fence_info: &PhysicalDeviceExternalFenceInfo,
) -> ExternalFenceProperties {
let mut external_fence_properties = MaybeUninit::<ExternalFenceProperties>::uninit();
let __result = (self
.commands()
.get_physical_device_external_fence_properties_khr)(
physical_device,
external_fence_info,
external_fence_properties.as_mut_ptr(),
);
unsafe { external_fence_properties.assume_init() }
}
}
impl KhrExternalFenceCapabilitiesExtension for crate::Instance {}
pub trait KhrExternalFenceFdExtension: DeviceV1_0 {
#[inline]
fn get_fence_fd_khr(&self, get_fd_info: &FenceGetFdInfoKHR) -> crate::VkResult<c_int> {
let mut fd = MaybeUninit::<c_int>::uninit();
let __result =
(self.commands().get_fence_fd_khr)(self.handle(), get_fd_info, fd.as_mut_ptr());
if __result == Result::SUCCESS {
Ok(unsafe { fd.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn import_fence_fd_khr(
&self,
import_fence_fd_info: &ImportFenceFdInfoKHR,
) -> crate::VkResult<()> {
let __result = (self.commands().import_fence_fd_khr)(self.handle(), import_fence_fd_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrExternalFenceFdExtension for crate::Device {}
pub trait KhrExternalFenceWin32Extension: DeviceV1_0 {
#[inline]
fn get_fence_win32_handle_khr(
&self,
get_win32_handle_info: &FenceGetWin32HandleInfoKHR,
) -> crate::VkResult<HANDLE> {
let mut handle = MaybeUninit::<HANDLE>::uninit();
let __result = (self.commands().get_fence_win32_handle_khr)(
self.handle(),
get_win32_handle_info,
handle.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { handle.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn import_fence_win32_handle_khr(
&self,
import_fence_win32_handle_info: &ImportFenceWin32HandleInfoKHR,
) -> crate::VkResult<()> {
let __result = (self.commands().import_fence_win32_handle_khr)(
self.handle(),
import_fence_win32_handle_info,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrExternalFenceWin32Extension for crate::Device {}
pub trait KhrExternalMemoryExtension: DeviceV1_0 {}
impl KhrExternalMemoryExtension for crate::Device {}
pub trait KhrExternalMemoryCapabilitiesExtension: InstanceV1_0 {
#[inline]
fn get_physical_device_external_buffer_properties_khr(
&self,
physical_device: PhysicalDevice,
external_buffer_info: &PhysicalDeviceExternalBufferInfo,
) -> ExternalBufferProperties {
let mut external_buffer_properties = MaybeUninit::<ExternalBufferProperties>::uninit();
let __result = (self
.commands()
.get_physical_device_external_buffer_properties_khr)(
physical_device,
external_buffer_info,
external_buffer_properties.as_mut_ptr(),
);
unsafe { external_buffer_properties.assume_init() }
}
}
impl KhrExternalMemoryCapabilitiesExtension for crate::Instance {}
pub trait KhrExternalMemoryFdExtension: DeviceV1_0 {
#[inline]
fn get_memory_fd_khr(&self, get_fd_info: &MemoryGetFdInfoKHR) -> crate::VkResult<c_int> {
let mut fd = MaybeUninit::<c_int>::uninit();
let __result =
(self.commands().get_memory_fd_khr)(self.handle(), get_fd_info, fd.as_mut_ptr());
if __result == Result::SUCCESS {
Ok(unsafe { fd.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_memory_fd_properties_khr(
&self,
handle_type: ExternalMemoryHandleTypeFlags,
fd: c_int,
) -> crate::VkResult<MemoryFdPropertiesKHR> {
let mut memory_fd_properties = MaybeUninit::<MemoryFdPropertiesKHR>::uninit();
let __result = (self.commands().get_memory_fd_properties_khr)(
self.handle(),
handle_type,
fd,
memory_fd_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { memory_fd_properties.assume_init() })
} else {
Err(__result)
}
}
}
impl KhrExternalMemoryFdExtension for crate::Device {}
pub trait KhrExternalMemoryWin32Extension: DeviceV1_0 {
#[inline]
fn get_memory_win32_handle_khr(
&self,
get_win32_handle_info: &MemoryGetWin32HandleInfoKHR,
) -> crate::VkResult<HANDLE> {
let mut handle = MaybeUninit::<HANDLE>::uninit();
let __result = (self.commands().get_memory_win32_handle_khr)(
self.handle(),
get_win32_handle_info,
handle.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { handle.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_memory_win32_handle_properties_khr(
&self,
handle_type: ExternalMemoryHandleTypeFlags,
handle: HANDLE,
) -> crate::VkResult<MemoryWin32HandlePropertiesKHR> {
let mut memory_win32_handle_properties =
MaybeUninit::<MemoryWin32HandlePropertiesKHR>::uninit();
let __result = (self.commands().get_memory_win32_handle_properties_khr)(
self.handle(),
handle_type,
handle,
memory_win32_handle_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { memory_win32_handle_properties.assume_init() })
} else {
Err(__result)
}
}
}
impl KhrExternalMemoryWin32Extension for crate::Device {}
pub trait KhrExternalSemaphoreExtension: DeviceV1_0 {}
impl KhrExternalSemaphoreExtension for crate::Device {}
pub trait KhrExternalSemaphoreCapabilitiesExtension: InstanceV1_0 {
#[inline]
fn get_physical_device_external_semaphore_properties_khr(
&self,
physical_device: PhysicalDevice,
external_semaphore_info: &PhysicalDeviceExternalSemaphoreInfo,
) -> ExternalSemaphoreProperties {
let mut external_semaphore_properties =
MaybeUninit::<ExternalSemaphoreProperties>::uninit();
let __result = (self
.commands()
.get_physical_device_external_semaphore_properties_khr)(
physical_device,
external_semaphore_info,
external_semaphore_properties.as_mut_ptr(),
);
unsafe { external_semaphore_properties.assume_init() }
}
}
impl KhrExternalSemaphoreCapabilitiesExtension for crate::Instance {}
pub trait KhrExternalSemaphoreFdExtension: DeviceV1_0 {
#[inline]
fn get_semaphore_fd_khr(&self, get_fd_info: &SemaphoreGetFdInfoKHR) -> crate::VkResult<c_int> {
let mut fd = MaybeUninit::<c_int>::uninit();
let __result =
(self.commands().get_semaphore_fd_khr)(self.handle(), get_fd_info, fd.as_mut_ptr());
if __result == Result::SUCCESS {
Ok(unsafe { fd.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn import_semaphore_fd_khr(
&self,
import_semaphore_fd_info: &ImportSemaphoreFdInfoKHR,
) -> crate::VkResult<()> {
let __result =
(self.commands().import_semaphore_fd_khr)(self.handle(), import_semaphore_fd_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrExternalSemaphoreFdExtension for crate::Device {}
pub trait KhrExternalSemaphoreWin32Extension: DeviceV1_0 {
#[inline]
fn get_semaphore_win32_handle_khr(
&self,
get_win32_handle_info: &SemaphoreGetWin32HandleInfoKHR,
) -> crate::VkResult<HANDLE> {
let mut handle = MaybeUninit::<HANDLE>::uninit();
let __result = (self.commands().get_semaphore_win32_handle_khr)(
self.handle(),
get_win32_handle_info,
handle.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { handle.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn import_semaphore_win32_handle_khr(
&self,
import_semaphore_win32_handle_info: &ImportSemaphoreWin32HandleInfoKHR,
) -> crate::VkResult<()> {
let __result = (self.commands().import_semaphore_win32_handle_khr)(
self.handle(),
import_semaphore_win32_handle_info,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrExternalSemaphoreWin32Extension for crate::Device {}
pub trait KhrGetDisplayProperties2Extension: InstanceV1_0 {
#[inline]
fn get_display_mode_properties2_khr(
&self,
physical_device: PhysicalDevice,
display: DisplayKHR,
) -> crate::VkResult<Vec<DisplayModeProperties2KHR>> {
let mut property_count = 0;
(self.commands().get_display_mode_properties2_khr)(
physical_device,
display,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().get_display_mode_properties2_khr)(
physical_device,
display,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
#[inline]
fn get_display_plane_capabilities2_khr(
&self,
physical_device: PhysicalDevice,
display_plane_info: &DisplayPlaneInfo2KHR,
) -> crate::VkResult<DisplayPlaneCapabilities2KHR> {
let mut capabilities = MaybeUninit::<DisplayPlaneCapabilities2KHR>::uninit();
let __result = (self.commands().get_display_plane_capabilities2_khr)(
physical_device,
display_plane_info,
capabilities.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { capabilities.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_display_plane_properties2_khr(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<DisplayPlaneProperties2KHR>> {
let mut property_count = 0;
(self
.commands()
.get_physical_device_display_plane_properties2_khr)(
physical_device,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self
.commands()
.get_physical_device_display_plane_properties2_khr)(
physical_device,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_display_properties2_khr(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<DisplayProperties2KHR>> {
let mut property_count = 0;
(self.commands().get_physical_device_display_properties2_khr)(
physical_device,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().get_physical_device_display_properties2_khr)(
physical_device,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
}
impl KhrGetDisplayProperties2Extension for crate::Instance {}
pub trait KhrGetMemoryRequirements2Extension: DeviceV1_0 {
#[inline]
fn get_buffer_memory_requirements2_khr(
&self,
info: &BufferMemoryRequirementsInfo2,
) -> MemoryRequirements2 {
let mut memory_requirements = MaybeUninit::<MemoryRequirements2>::uninit();
let __result = (self.commands().get_buffer_memory_requirements2_khr)(
self.handle(),
info,
memory_requirements.as_mut_ptr(),
);
unsafe { memory_requirements.assume_init() }
}
#[inline]
fn get_image_memory_requirements2_khr(
&self,
info: &ImageMemoryRequirementsInfo2,
) -> MemoryRequirements2 {
let mut memory_requirements = MaybeUninit::<MemoryRequirements2>::uninit();
let __result = (self.commands().get_image_memory_requirements2_khr)(
self.handle(),
info,
memory_requirements.as_mut_ptr(),
);
unsafe { memory_requirements.assume_init() }
}
#[inline]
fn get_image_sparse_memory_requirements2_khr(
&self,
info: &ImageSparseMemoryRequirementsInfo2,
) -> Vec<SparseImageMemoryRequirements2> {
let mut sparse_memory_requirement_count = 0;
(self.commands().get_image_sparse_memory_requirements2_khr)(
self.handle(),
info,
&mut sparse_memory_requirement_count,
ptr::null_mut(),
);
let mut sparse_memory_requirements =
Vec::with_capacity(sparse_memory_requirement_count as usize);
let __result = (self.commands().get_image_sparse_memory_requirements2_khr)(
self.handle(),
info,
&mut sparse_memory_requirement_count,
sparse_memory_requirements.as_mut_ptr(),
);
debug_assert!(
sparse_memory_requirements.capacity() == sparse_memory_requirement_count as usize
);
unsafe { sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize) };
sparse_memory_requirements
}
}
impl KhrGetMemoryRequirements2Extension for crate::Device {}
pub trait KhrGetPhysicalDeviceProperties2Extension: InstanceV1_0 {
#[inline]
fn get_physical_device_features2_khr(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceFeatures2 {
let mut features = MaybeUninit::<PhysicalDeviceFeatures2>::uninit();
let __result = (self.commands().get_physical_device_features2_khr)(
physical_device,
features.as_mut_ptr(),
);
unsafe { features.assume_init() }
}
#[inline]
fn get_physical_device_format_properties2_khr(
&self,
physical_device: PhysicalDevice,
format: Format,
) -> FormatProperties2 {
let mut format_properties = MaybeUninit::<FormatProperties2>::uninit();
let __result = (self.commands().get_physical_device_format_properties2_khr)(
physical_device,
format,
format_properties.as_mut_ptr(),
);
unsafe { format_properties.assume_init() }
}
#[inline]
fn get_physical_device_image_format_properties2_khr(
&self,
physical_device: PhysicalDevice,
image_format_info: &PhysicalDeviceImageFormatInfo2,
) -> crate::VkResult<ImageFormatProperties2> {
let mut image_format_properties = MaybeUninit::<ImageFormatProperties2>::uninit();
let __result = (self
.commands()
.get_physical_device_image_format_properties2_khr)(
physical_device,
image_format_info,
image_format_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { image_format_properties.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_memory_properties2_khr(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceMemoryProperties2 {
let mut memory_properties = MaybeUninit::<PhysicalDeviceMemoryProperties2>::uninit();
let __result = (self.commands().get_physical_device_memory_properties2_khr)(
physical_device,
memory_properties.as_mut_ptr(),
);
unsafe { memory_properties.assume_init() }
}
#[inline]
fn get_physical_device_properties2_khr(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceProperties2 {
let mut properties = MaybeUninit::<PhysicalDeviceProperties2>::uninit();
let __result = (self.commands().get_physical_device_properties2_khr)(
physical_device,
properties.as_mut_ptr(),
);
unsafe { properties.assume_init() }
}
#[inline]
fn get_physical_device_queue_family_properties2_khr(
&self,
physical_device: PhysicalDevice,
) -> Vec<QueueFamilyProperties2> {
let mut queue_family_property_count = 0;
(self
.commands()
.get_physical_device_queue_family_properties2_khr)(
physical_device,
&mut queue_family_property_count,
ptr::null_mut(),
);
let mut queue_family_properties = Vec::with_capacity(queue_family_property_count as usize);
let __result = (self
.commands()
.get_physical_device_queue_family_properties2_khr)(
physical_device,
&mut queue_family_property_count,
queue_family_properties.as_mut_ptr(),
);
debug_assert!(queue_family_properties.capacity() == queue_family_property_count as usize);
unsafe { queue_family_properties.set_len(queue_family_property_count as usize) };
queue_family_properties
}
#[inline]
fn get_physical_device_sparse_image_format_properties2_khr(
&self,
physical_device: PhysicalDevice,
format_info: &PhysicalDeviceSparseImageFormatInfo2,
) -> Vec<SparseImageFormatProperties2> {
let mut property_count = 0;
(self
.commands()
.get_physical_device_sparse_image_format_properties2_khr)(
physical_device,
format_info,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self
.commands()
.get_physical_device_sparse_image_format_properties2_khr)(
physical_device,
format_info,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
properties
}
}
impl KhrGetPhysicalDeviceProperties2Extension for crate::Instance {}
pub trait KhrGetSurfaceCapabilities2Extension: InstanceV1_0 {
#[inline]
fn get_physical_device_surface_capabilities2_khr(
&self,
physical_device: PhysicalDevice,
surface_info: &PhysicalDeviceSurfaceInfo2KHR,
) -> crate::VkResult<SurfaceCapabilities2KHR> {
let mut surface_capabilities = MaybeUninit::<SurfaceCapabilities2KHR>::uninit();
let __result = (self
.commands()
.get_physical_device_surface_capabilities2_khr)(
physical_device,
surface_info,
surface_capabilities.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface_capabilities.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_surface_formats2_khr(
&self,
physical_device: PhysicalDevice,
surface_info: &PhysicalDeviceSurfaceInfo2KHR,
) -> crate::VkResult<Vec<SurfaceFormat2KHR>> {
let mut surface_format_count = 0;
(self.commands().get_physical_device_surface_formats2_khr)(
physical_device,
surface_info,
&mut surface_format_count,
ptr::null_mut(),
);
let mut surface_formats = Vec::with_capacity(surface_format_count as usize);
let __result = (self.commands().get_physical_device_surface_formats2_khr)(
physical_device,
surface_info,
&mut surface_format_count,
surface_formats.as_mut_ptr(),
);
debug_assert!(surface_formats.capacity() == surface_format_count as usize);
unsafe { surface_formats.set_len(surface_format_count as usize) };
if __result == Result::SUCCESS {
Ok(surface_formats)
} else {
Err(__result)
}
}
}
impl KhrGetSurfaceCapabilities2Extension for crate::Instance {}
pub trait KhrImageFormatListExtension: DeviceV1_0 {}
impl KhrImageFormatListExtension for crate::Device {}
pub trait KhrImagelessFramebufferExtension: DeviceV1_0 {}
impl KhrImagelessFramebufferExtension for crate::Device {}
pub trait KhrIncrementalPresentExtension: DeviceV1_0 {}
impl KhrIncrementalPresentExtension for crate::Device {}
pub trait KhrMaintenance1Extension: DeviceV1_0 {
#[inline]
fn trim_command_pool_khr(&self, command_pool: CommandPool, flags: CommandPoolTrimFlags) {
let __result = (self.commands().trim_command_pool_khr)(self.handle(), command_pool, flags);
}
}
impl KhrMaintenance1Extension for crate::Device {}
pub trait KhrMaintenance2Extension: DeviceV1_0 {}
impl KhrMaintenance2Extension for crate::Device {}
pub trait KhrMaintenance3Extension: DeviceV1_0 {
#[inline]
fn get_descriptor_set_layout_support_khr(
&self,
create_info: &DescriptorSetLayoutCreateInfo,
) -> DescriptorSetLayoutSupport {
let mut support = MaybeUninit::<DescriptorSetLayoutSupport>::uninit();
let __result = (self.commands().get_descriptor_set_layout_support_khr)(
self.handle(),
create_info,
support.as_mut_ptr(),
);
unsafe { support.assume_init() }
}
}
impl KhrMaintenance3Extension for crate::Device {}
pub trait KhrMultiviewExtension: DeviceV1_0 {}
impl KhrMultiviewExtension for crate::Device {}
pub trait KhrPerformanceQueryExtension: DeviceV1_0 {
#[inline]
fn acquire_profiling_lock_khr(
&self,
info: &AcquireProfilingLockInfoKHR,
) -> crate::VkResult<()> {
let __result = (self.commands().acquire_profiling_lock_khr)(self.handle(), info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn enumerate_physical_device_queue_family_performance_query_counters_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
) -> crate::VkResult<(
Vec<PerformanceCounterKHR>,
Vec<PerformanceCounterDescriptionKHR>,
)> {
let mut counter_count = 0;
(self
.commands()
.enumerate_physical_device_queue_family_performance_query_counters_khr)(
physical_device,
queue_family_index,
&mut counter_count,
ptr::null_mut(),
ptr::null_mut(),
);
let mut counters = Vec::with_capacity(counter_count as usize);
let mut counter_descriptions = Vec::with_capacity(counter_count as usize);
let __result = (self
.commands()
.enumerate_physical_device_queue_family_performance_query_counters_khr)(
physical_device,
queue_family_index,
&mut counter_count,
counters.as_mut_ptr(),
counter_descriptions.as_mut_ptr(),
);
debug_assert!(counters.capacity() == counter_count as usize);
unsafe { counters.set_len(counter_count as usize) };
debug_assert!(counter_descriptions.capacity() == counter_count as usize);
unsafe { counter_descriptions.set_len(counter_count as usize) };
if __result == Result::SUCCESS {
Ok((counters, counter_descriptions))
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_queue_family_performance_query_passes_khr(
&self,
physical_device: PhysicalDevice,
performance_query_create_info: &QueryPoolPerformanceCreateInfoKHR,
) -> u32 {
let mut num_passes = MaybeUninit::<u32>::uninit();
let __result = (self
.commands()
.get_physical_device_queue_family_performance_query_passes_khr)(
physical_device,
performance_query_create_info,
num_passes.as_mut_ptr(),
);
unsafe { num_passes.assume_init() }
}
#[inline]
fn release_profiling_lock_khr(&self) {
let __result = (self.commands().release_profiling_lock_khr)(self.handle());
}
}
impl KhrPerformanceQueryExtension for crate::Device {}
pub trait KhrPipelineExecutablePropertiesExtension: DeviceV1_0 {
#[inline]
fn get_pipeline_executable_internal_representations_khr(
&self,
executable_info: &PipelineExecutableInfoKHR,
) -> crate::VkResult<Vec<PipelineExecutableInternalRepresentationKHR>> {
let mut internal_representation_count = 0;
(self
.commands()
.get_pipeline_executable_internal_representations_khr)(
self.handle(),
executable_info,
&mut internal_representation_count,
ptr::null_mut(),
);
let mut internal_representations =
Vec::with_capacity(internal_representation_count as usize);
let __result = (self
.commands()
.get_pipeline_executable_internal_representations_khr)(
self.handle(),
executable_info,
&mut internal_representation_count,
internal_representations.as_mut_ptr(),
);
debug_assert!(
internal_representations.capacity() == internal_representation_count as usize
);
unsafe { internal_representations.set_len(internal_representation_count as usize) };
if __result == Result::SUCCESS {
Ok(internal_representations)
} else {
Err(__result)
}
}
#[inline]
fn get_pipeline_executable_properties_khr(
&self,
pipeline_info: &PipelineInfoKHR,
) -> crate::VkResult<Vec<PipelineExecutablePropertiesKHR>> {
let mut executable_count = 0;
(self.commands().get_pipeline_executable_properties_khr)(
self.handle(),
pipeline_info,
&mut executable_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(executable_count as usize);
let __result = (self.commands().get_pipeline_executable_properties_khr)(
self.handle(),
pipeline_info,
&mut executable_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == executable_count as usize);
unsafe { properties.set_len(executable_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
#[inline]
fn get_pipeline_executable_statistics_khr(
&self,
executable_info: &PipelineExecutableInfoKHR,
) -> crate::VkResult<Vec<PipelineExecutableStatisticKHR>> {
let mut statistic_count = 0;
(self.commands().get_pipeline_executable_statistics_khr)(
self.handle(),
executable_info,
&mut statistic_count,
ptr::null_mut(),
);
let mut statistics = Vec::with_capacity(statistic_count as usize);
let __result = (self.commands().get_pipeline_executable_statistics_khr)(
self.handle(),
executable_info,
&mut statistic_count,
statistics.as_mut_ptr(),
);
debug_assert!(statistics.capacity() == statistic_count as usize);
unsafe { statistics.set_len(statistic_count as usize) };
if __result == Result::SUCCESS {
Ok(statistics)
} else {
Err(__result)
}
}
}
impl KhrPipelineExecutablePropertiesExtension for crate::Device {}
pub trait KhrPipelineLibraryExtension: DeviceV1_0 {}
impl KhrPipelineLibraryExtension for crate::Device {}
pub trait KhrPortabilitySubsetExtension: DeviceV1_0 {}
impl KhrPortabilitySubsetExtension for crate::Device {}
pub trait KhrPushDescriptorExtension: DeviceV1_0 {
#[inline]
fn cmd_push_descriptor_set_khr(
&self,
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
set: u32,
descriptor_writes: &[impl Cast<Target = WriteDescriptorSet>],
) {
let __result = (self.commands().cmd_push_descriptor_set_khr)(
command_buffer,
pipeline_bind_point,
layout,
set,
descriptor_writes.len() as u32,
descriptor_writes.as_ptr().cast(),
);
}
#[inline]
fn cmd_push_descriptor_set_with_template_khr(
&self,
command_buffer: CommandBuffer,
descriptor_update_template: DescriptorUpdateTemplate,
layout: PipelineLayout,
set: u32,
data: &c_void,
) {
let __result = (self.commands().cmd_push_descriptor_set_with_template_khr)(
command_buffer,
descriptor_update_template,
layout,
set,
data,
);
}
}
impl KhrPushDescriptorExtension for crate::Device {}
pub trait KhrRayTracingExtension: DeviceV1_0 {
#[inline]
fn bind_acceleration_structure_memory_khr(
&self,
bind_infos: &[impl Cast<Target = BindAccelerationStructureMemoryInfoKHR>],
) -> crate::VkResult<()> {
let __result = (self.commands().bind_acceleration_structure_memory_khr)(
self.handle(),
bind_infos.len() as u32,
bind_infos.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn build_acceleration_structure_khr(
&self,
infos: &[impl Cast<Target = AccelerationStructureBuildGeometryInfoKHR>],
offset_infos: &[&AccelerationStructureBuildOffsetInfoKHR],
) -> crate::VkResult<()> {
let __result = (self.commands().build_acceleration_structure_khr)(
self.handle(),
infos.len() as u32,
infos.as_ptr().cast(),
offset_infos.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn cmd_build_acceleration_structure_indirect_khr(
&self,
command_buffer: CommandBuffer,
info: &AccelerationStructureBuildGeometryInfoKHR,
indirect_buffer: Buffer,
indirect_offset: DeviceSize,
indirect_stride: u32,
) {
let __result = (self
.commands()
.cmd_build_acceleration_structure_indirect_khr)(
command_buffer,
info,
indirect_buffer,
indirect_offset,
indirect_stride,
);
}
#[inline]
fn cmd_build_acceleration_structure_khr(
&self,
command_buffer: CommandBuffer,
infos: &[impl Cast<Target = AccelerationStructureBuildGeometryInfoKHR>],
offset_infos: &[&AccelerationStructureBuildOffsetInfoKHR],
) {
let __result = (self.commands().cmd_build_acceleration_structure_khr)(
command_buffer,
infos.len() as u32,
infos.as_ptr().cast(),
offset_infos.as_ptr().cast(),
);
}
#[inline]
fn cmd_copy_acceleration_structure_khr(
&self,
command_buffer: CommandBuffer,
info: &CopyAccelerationStructureInfoKHR,
) {
let __result = (self.commands().cmd_copy_acceleration_structure_khr)(command_buffer, info);
}
#[inline]
fn cmd_copy_acceleration_structure_to_memory_khr(
&self,
command_buffer: CommandBuffer,
info: &CopyAccelerationStructureToMemoryInfoKHR,
) {
let __result =
(self
.commands()
.cmd_copy_acceleration_structure_to_memory_khr)(command_buffer, info);
}
#[inline]
fn cmd_copy_memory_to_acceleration_structure_khr(
&self,
command_buffer: CommandBuffer,
info: &CopyMemoryToAccelerationStructureInfoKHR,
) {
let __result =
(self
.commands()
.cmd_copy_memory_to_acceleration_structure_khr)(command_buffer, info);
}
#[inline]
fn cmd_trace_rays_indirect_khr(
&self,
command_buffer: CommandBuffer,
raygen_shader_binding_table: &StridedBufferRegionKHR,
miss_shader_binding_table: &StridedBufferRegionKHR,
hit_shader_binding_table: &StridedBufferRegionKHR,
callable_shader_binding_table: &StridedBufferRegionKHR,
buffer: Buffer,
offset: DeviceSize,
) {
let __result = (self.commands().cmd_trace_rays_indirect_khr)(
command_buffer,
raygen_shader_binding_table,
miss_shader_binding_table,
hit_shader_binding_table,
callable_shader_binding_table,
buffer,
offset,
);
}
#[inline]
fn cmd_trace_rays_khr(
&self,
command_buffer: CommandBuffer,
raygen_shader_binding_table: &StridedBufferRegionKHR,
miss_shader_binding_table: &StridedBufferRegionKHR,
hit_shader_binding_table: &StridedBufferRegionKHR,
callable_shader_binding_table: &StridedBufferRegionKHR,
width: u32,
height: u32,
depth: u32,
) {
let __result = (self.commands().cmd_trace_rays_khr)(
command_buffer,
raygen_shader_binding_table,
miss_shader_binding_table,
hit_shader_binding_table,
callable_shader_binding_table,
width,
height,
depth,
);
}
#[inline]
fn cmd_write_acceleration_structures_properties_khr(
&self,
command_buffer: CommandBuffer,
acceleration_structures: &[AccelerationStructureKHR],
query_type: QueryType,
query_pool: QueryPool,
first_query: u32,
) {
let __result = (self
.commands()
.cmd_write_acceleration_structures_properties_khr)(
command_buffer,
acceleration_structures.len() as u32,
acceleration_structures.as_ptr(),
query_type,
query_pool,
first_query,
);
}
#[inline]
fn copy_acceleration_structure_khr(
&self,
info: &CopyAccelerationStructureInfoKHR,
) -> crate::VkResult<()> {
let __result = (self.commands().copy_acceleration_structure_khr)(self.handle(), info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn copy_acceleration_structure_to_memory_khr(
&self,
info: &CopyAccelerationStructureToMemoryInfoKHR,
) -> crate::VkResult<()> {
let __result =
(self.commands().copy_acceleration_structure_to_memory_khr)(self.handle(), info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn copy_memory_to_acceleration_structure_khr(
&self,
info: &CopyMemoryToAccelerationStructureInfoKHR,
) -> crate::VkResult<()> {
let __result =
(self.commands().copy_memory_to_acceleration_structure_khr)(self.handle(), info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn create_acceleration_structure_khr(
&self,
create_info: &AccelerationStructureCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<AccelerationStructureKHR> {
let mut acceleration_structure = MaybeUninit::<AccelerationStructureKHR>::uninit();
let __result = (self.commands().create_acceleration_structure_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
acceleration_structure.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { acceleration_structure.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn create_ray_tracing_pipelines_khr(
&self,
pipeline_cache: PipelineCache,
create_infos: &[impl Cast<Target = RayTracingPipelineCreateInfoKHR>],
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Pipeline> {
let mut pipelines = MaybeUninit::<Pipeline>::uninit();
let __result = (self.commands().create_ray_tracing_pipelines_khr)(
self.handle(),
pipeline_cache,
create_infos.len() as u32,
create_infos.as_ptr().cast(),
allocator.map_or(ptr::null(), |v| v),
pipelines.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { pipelines.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_acceleration_structure_khr(
&self,
acceleration_structure: AccelerationStructureKHR,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_acceleration_structure_khr)(
self.handle(),
acceleration_structure,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_acceleration_structure_device_address_khr(
&self,
info: &AccelerationStructureDeviceAddressInfoKHR,
) {
let __result =
(self
.commands()
.get_acceleration_structure_device_address_khr)(self.handle(), info);
}
#[inline]
fn get_acceleration_structure_memory_requirements_khr(
&self,
info: &AccelerationStructureMemoryRequirementsInfoKHR,
) -> MemoryRequirements2 {
let mut memory_requirements = MaybeUninit::<MemoryRequirements2>::uninit();
let __result = (self
.commands()
.get_acceleration_structure_memory_requirements_khr)(
self.handle(),
info,
memory_requirements.as_mut_ptr(),
);
unsafe { memory_requirements.assume_init() }
}
#[inline]
fn get_device_acceleration_structure_compatibility_khr(
&self,
version: &AccelerationStructureVersionKHR,
) -> crate::VkResult<()> {
let __result = (self
.commands()
.get_device_acceleration_structure_compatibility_khr)(
self.handle(), version
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn get_ray_tracing_capture_replay_shader_group_handles_khr(
&self,
pipeline: Pipeline,
first_group: u32,
group_count: u32,
data: &mut [u8],
) -> crate::VkResult<()> {
let __result = (self
.commands()
.get_ray_tracing_capture_replay_shader_group_handles_khr)(
self.handle(),
pipeline,
first_group,
group_count,
data.len() as usize,
data.as_ptr() as *mut c_void,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn get_ray_tracing_shader_group_handles_khr(
&self,
pipeline: Pipeline,
first_group: u32,
group_count: u32,
data: &mut [u8],
) -> crate::VkResult<()> {
let __result = (self.commands().get_ray_tracing_shader_group_handles_khr)(
self.handle(),
pipeline,
first_group,
group_count,
data.len() as usize,
data.as_ptr() as *mut c_void,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn write_acceleration_structures_properties_khr(
&self,
acceleration_structures: &[AccelerationStructureKHR],
query_type: QueryType,
data: &mut [u8],
stride: usize,
) -> crate::VkResult<()> {
let __result = (self.commands().write_acceleration_structures_properties_khr)(
self.handle(),
acceleration_structures.len() as u32,
acceleration_structures.as_ptr(),
query_type,
data.len() as usize,
data.as_ptr() as *mut c_void,
stride,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrRayTracingExtension for crate::Device {}
pub trait KhrRelaxedBlockLayoutExtension: DeviceV1_0 {}
impl KhrRelaxedBlockLayoutExtension for crate::Device {}
pub trait KhrSamplerMirrorClampToEdgeExtension: DeviceV1_0 {}
impl KhrSamplerMirrorClampToEdgeExtension for crate::Device {}
pub trait KhrSamplerYcbcrConversionExtension: DeviceV1_0 {
#[inline]
fn create_sampler_ycbcr_conversion_khr(
&self,
create_info: &SamplerYcbcrConversionCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SamplerYcbcrConversion> {
let mut ycbcr_conversion = MaybeUninit::<SamplerYcbcrConversion>::uninit();
let __result = (self.commands().create_sampler_ycbcr_conversion_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
ycbcr_conversion.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { ycbcr_conversion.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_sampler_ycbcr_conversion_khr(
&self,
ycbcr_conversion: SamplerYcbcrConversion,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_sampler_ycbcr_conversion_khr)(
self.handle(),
ycbcr_conversion,
allocator.map_or(ptr::null(), |v| v),
);
}
}
impl KhrSamplerYcbcrConversionExtension for crate::Device {}
pub trait KhrSeparateDepthStencilLayoutsExtension: DeviceV1_0 {}
impl KhrSeparateDepthStencilLayoutsExtension for crate::Device {}
pub trait KhrShaderAtomicInt64Extension: DeviceV1_0 {}
impl KhrShaderAtomicInt64Extension for crate::Device {}
pub trait KhrShaderClockExtension: DeviceV1_0 {}
impl KhrShaderClockExtension for crate::Device {}
pub trait KhrShaderDrawParametersExtension: DeviceV1_0 {}
impl KhrShaderDrawParametersExtension for crate::Device {}
pub trait KhrShaderFloat16Int8Extension: DeviceV1_0 {}
impl KhrShaderFloat16Int8Extension for crate::Device {}
pub trait KhrShaderFloatControlsExtension: DeviceV1_0 {}
impl KhrShaderFloatControlsExtension for crate::Device {}
pub trait KhrShaderNonSemanticInfoExtension: DeviceV1_0 {}
impl KhrShaderNonSemanticInfoExtension for crate::Device {}
pub trait KhrShaderSubgroupExtendedTypesExtension: DeviceV1_0 {}
impl KhrShaderSubgroupExtendedTypesExtension for crate::Device {}
pub trait KhrSharedPresentableImageExtension: DeviceV1_0 {
#[inline]
fn get_swapchain_status_khr(&self, swapchain: SwapchainKHR) -> crate::VkResult<()> {
let __result = (self.commands().get_swapchain_status_khr)(self.handle(), swapchain);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrSharedPresentableImageExtension for crate::Device {}
pub trait KhrSpirv14Extension: DeviceV1_0 {}
impl KhrSpirv14Extension for crate::Device {}
pub trait KhrStorageBufferStorageClassExtension: DeviceV1_0 {}
impl KhrStorageBufferStorageClassExtension for crate::Device {}
pub trait KhrSurfaceExtension: InstanceV1_0 {
#[inline]
fn destroy_surface_khr(&self, surface: SurfaceKHR, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().destroy_surface_khr)(
self.handle(),
surface,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_physical_device_surface_capabilities_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
) -> crate::VkResult<SurfaceCapabilitiesKHR> {
let mut surface_capabilities = MaybeUninit::<SurfaceCapabilitiesKHR>::uninit();
let __result = (self.commands().get_physical_device_surface_capabilities_khr)(
physical_device,
surface,
surface_capabilities.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface_capabilities.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_surface_formats_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
) -> crate::VkResult<Vec<SurfaceFormatKHR>> {
let mut surface_format_count = 0;
(self.commands().get_physical_device_surface_formats_khr)(
physical_device,
surface,
&mut surface_format_count,
ptr::null_mut(),
);
let mut surface_formats = Vec::with_capacity(surface_format_count as usize);
let __result = (self.commands().get_physical_device_surface_formats_khr)(
physical_device,
surface,
&mut surface_format_count,
surface_formats.as_mut_ptr(),
);
debug_assert!(surface_formats.capacity() == surface_format_count as usize);
unsafe { surface_formats.set_len(surface_format_count as usize) };
if __result == Result::SUCCESS {
Ok(surface_formats)
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_surface_present_modes_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
) -> crate::VkResult<Vec<PresentModeKHR>> {
let mut present_mode_count = 0;
(self
.commands()
.get_physical_device_surface_present_modes_khr)(
physical_device,
surface,
&mut present_mode_count,
ptr::null_mut(),
);
let mut present_modes = Vec::with_capacity(present_mode_count as usize);
let __result = (self
.commands()
.get_physical_device_surface_present_modes_khr)(
physical_device,
surface,
&mut present_mode_count,
present_modes.as_mut_ptr(),
);
debug_assert!(present_modes.capacity() == present_mode_count as usize);
unsafe { present_modes.set_len(present_mode_count as usize) };
if __result == Result::SUCCESS {
Ok(present_modes)
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_surface_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
surface: SurfaceKHR,
) -> crate::VkResult<bool> {
let mut supported = MaybeUninit::<Bool32>::uninit();
let __result = (self.commands().get_physical_device_surface_support_khr)(
physical_device,
queue_family_index,
surface,
supported.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { supported.assume_init() } == TRUE)
} else {
Err(__result)
}
}
}
impl KhrSurfaceExtension for crate::Instance {}
pub trait KhrSurfaceProtectedCapabilitiesExtension: InstanceV1_0 {}
impl KhrSurfaceProtectedCapabilitiesExtension for crate::Instance {}
pub trait KhrSwapchainExtension: DeviceV1_0 {
#[inline]
fn acquire_next_image2_khr(
&self,
acquire_info: &AcquireNextImageInfoKHR,
) -> crate::VkResult<u32> {
let mut image_index = MaybeUninit::<u32>::uninit();
let __result = (self.commands().acquire_next_image2_khr)(
self.handle(),
acquire_info,
image_index.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { image_index.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn acquire_next_image_khr(
&self,
swapchain: SwapchainKHR,
timeout: u64,
semaphore: Semaphore,
fence: Fence,
) -> crate::VkResult<u32> {
let mut image_index = MaybeUninit::<u32>::uninit();
let __result = (self.commands().acquire_next_image_khr)(
self.handle(),
swapchain,
timeout,
semaphore,
fence,
image_index.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { image_index.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn create_swapchain_khr(
&self,
create_info: &SwapchainCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SwapchainKHR> {
let mut swapchain = MaybeUninit::<SwapchainKHR>::uninit();
let __result = (self.commands().create_swapchain_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
swapchain.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { swapchain.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_swapchain_khr(
&self,
swapchain: SwapchainKHR,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_swapchain_khr)(
self.handle(),
swapchain,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_device_group_present_capabilities_khr(
&self,
) -> crate::VkResult<DeviceGroupPresentCapabilitiesKHR> {
let mut device_group_present_capabilities =
MaybeUninit::<DeviceGroupPresentCapabilitiesKHR>::uninit();
let __result = (self.commands().get_device_group_present_capabilities_khr)(
self.handle(),
device_group_present_capabilities.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { device_group_present_capabilities.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_device_group_surface_present_modes_khr(
&self,
surface: SurfaceKHR,
) -> crate::VkResult<DeviceGroupPresentModeFlagsKHR> {
let mut modes = MaybeUninit::<DeviceGroupPresentModeFlagsKHR>::uninit();
let __result = (self.commands().get_device_group_surface_present_modes_khr)(
self.handle(),
surface,
modes.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { modes.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_present_rectangles_khr(
&self,
physical_device: PhysicalDevice,
surface: SurfaceKHR,
) -> crate::VkResult<Vec<Rect2D>> {
let mut rect_count = 0;
(self.commands().get_physical_device_present_rectangles_khr)(
physical_device,
surface,
&mut rect_count,
ptr::null_mut(),
);
let mut rects = Vec::with_capacity(rect_count as usize);
let __result = (self.commands().get_physical_device_present_rectangles_khr)(
physical_device,
surface,
&mut rect_count,
rects.as_mut_ptr(),
);
debug_assert!(rects.capacity() == rect_count as usize);
unsafe { rects.set_len(rect_count as usize) };
if __result == Result::SUCCESS {
Ok(rects)
} else {
Err(__result)
}
}
#[inline]
fn get_swapchain_images_khr(&self, swapchain: SwapchainKHR) -> crate::VkResult<Vec<Image>> {
let mut swapchain_image_count = 0;
(self.commands().get_swapchain_images_khr)(
self.handle(),
swapchain,
&mut swapchain_image_count,
ptr::null_mut(),
);
let mut swapchain_images = Vec::with_capacity(swapchain_image_count as usize);
let __result = (self.commands().get_swapchain_images_khr)(
self.handle(),
swapchain,
&mut swapchain_image_count,
swapchain_images.as_mut_ptr(),
);
debug_assert!(swapchain_images.capacity() == swapchain_image_count as usize);
unsafe { swapchain_images.set_len(swapchain_image_count as usize) };
if __result == Result::SUCCESS {
Ok(swapchain_images)
} else {
Err(__result)
}
}
#[inline]
fn queue_present_khr(
&self,
queue: Queue,
present_info: &PresentInfoKHR,
) -> crate::VkResult<()> {
let __result = (self.commands().queue_present_khr)(queue, present_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrSwapchainExtension for crate::Device {}
pub trait KhrSwapchainMutableFormatExtension: DeviceV1_0 {}
impl KhrSwapchainMutableFormatExtension for crate::Device {}
pub trait KhrTimelineSemaphoreExtension: DeviceV1_0 {
#[inline]
fn get_semaphore_counter_value_khr(&self, semaphore: Semaphore) -> crate::VkResult<u64> {
let mut value = MaybeUninit::<u64>::uninit();
let __result = (self.commands().get_semaphore_counter_value_khr)(
self.handle(),
semaphore,
value.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { value.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn signal_semaphore_khr(&self, signal_info: &SemaphoreSignalInfo) -> crate::VkResult<()> {
let __result = (self.commands().signal_semaphore_khr)(self.handle(), signal_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn wait_semaphores_khr(
&self,
wait_info: &SemaphoreWaitInfo,
timeout: u64,
) -> crate::VkResult<()> {
let __result = (self.commands().wait_semaphores_khr)(self.handle(), wait_info, timeout);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl KhrTimelineSemaphoreExtension for crate::Device {}
pub trait KhrUniformBufferStandardLayoutExtension: DeviceV1_0 {}
impl KhrUniformBufferStandardLayoutExtension for crate::Device {}
pub trait KhrVariablePointersExtension: DeviceV1_0 {}
impl KhrVariablePointersExtension for crate::Device {}
pub trait KhrVulkanMemoryModelExtension: DeviceV1_0 {}
impl KhrVulkanMemoryModelExtension for crate::Device {}
pub trait KhrWaylandSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_wayland_surface_khr(
&self,
create_info: &WaylandSurfaceCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_wayland_surface_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_wayland_presentation_support_khr<T_wl_display>(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
) -> T_wl_display {
let mut display = MaybeUninit::<T_wl_display>::uninit();
let __result = (self
.commands()
.get_physical_device_wayland_presentation_support_khr)(
physical_device,
queue_family_index,
display.as_mut_ptr().cast::<c_void>(),
);
unsafe { display.assume_init() }
}
}
impl KhrWaylandSurfaceExtension for crate::Instance {}
pub trait KhrWin32KeyedMutexExtension: DeviceV1_0 {}
impl KhrWin32KeyedMutexExtension for crate::Device {}
pub trait KhrWin32SurfaceExtension: InstanceV1_0 {
#[inline]
fn create_win32_surface_khr(
&self,
create_info: &Win32SurfaceCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_win32_surface_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_win32_presentation_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
) {
let __result = (self
.commands()
.get_physical_device_win32_presentation_support_khr)(
physical_device,
queue_family_index,
);
}
}
impl KhrWin32SurfaceExtension for crate::Instance {}
pub trait KhrXcbSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_xcb_surface_khr(
&self,
create_info: &XcbSurfaceCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_xcb_surface_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_xcb_presentation_support_khr<T_xcb_connection_t>(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
visual_id: xcb_visualid_t,
) -> T_xcb_connection_t {
let mut connection = MaybeUninit::<T_xcb_connection_t>::uninit();
let __result = (self
.commands()
.get_physical_device_xcb_presentation_support_khr)(
physical_device,
queue_family_index,
connection.as_mut_ptr().cast::<c_void>(),
visual_id,
);
unsafe { connection.assume_init() }
}
}
impl KhrXcbSurfaceExtension for crate::Instance {}
pub trait KhrXlibSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_xlib_surface_khr(
&self,
create_info: &XlibSurfaceCreateInfoKHR,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_xlib_surface_khr)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_physical_device_xlib_presentation_support_khr(
&self,
physical_device: PhysicalDevice,
queue_family_index: u32,
visual_id: VisualID,
) -> Display {
let mut dpy = MaybeUninit::<Display>::uninit();
let __result = (self
.commands()
.get_physical_device_xlib_presentation_support_khr)(
physical_device,
queue_family_index,
dpy.as_mut_ptr(),
visual_id,
);
unsafe { dpy.assume_init() }
}
}
impl KhrXlibSurfaceExtension for crate::Instance {}
#[deprecated(note = "deprecated in favor of `VK_EXT_metal_surface`")]
pub trait MvkIosSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_ios_surface_mvk(
&self,
create_info: &IOSSurfaceCreateInfoMVK,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_ios_surface_mvk)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
#[allow(deprecated)]
impl MvkIosSurfaceExtension for crate::Instance {}
#[deprecated(note = "deprecated in favor of `VK_EXT_metal_surface`")]
pub trait MvkMacosSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_mac_os_surface_mvk(
&self,
create_info: &MacOSSurfaceCreateInfoMVK,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_mac_os_surface_mvk)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
#[allow(deprecated)]
impl MvkMacosSurfaceExtension for crate::Instance {}
pub trait NnViSurfaceExtension: InstanceV1_0 {
#[inline]
fn create_vi_surface_nn(
&self,
create_info: &ViSurfaceCreateInfoNN,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SurfaceKHR> {
let mut surface = MaybeUninit::<SurfaceKHR>::uninit();
let __result = (self.commands().create_vi_surface_nn)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
surface.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { surface.assume_init() })
} else {
Err(__result)
}
}
}
impl NnViSurfaceExtension for crate::Instance {}
pub trait NvxImageViewHandleExtension: DeviceV1_0 {
#[inline]
fn get_image_view_address_nvx(
&self,
image_view: ImageView,
) -> crate::VkResult<ImageViewAddressPropertiesNVX> {
let mut properties = MaybeUninit::<ImageViewAddressPropertiesNVX>::uninit();
let __result = (self.commands().get_image_view_address_nvx)(
self.handle(),
image_view,
properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { properties.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn get_image_view_handle_nvx(&self, info: &ImageViewHandleInfoNVX) {
let __result = (self.commands().get_image_view_handle_nvx)(self.handle(), info);
}
}
impl NvxImageViewHandleExtension for crate::Device {}
pub trait NvxMultiviewPerViewAttributesExtension: DeviceV1_0 {}
impl NvxMultiviewPerViewAttributesExtension for crate::Device {}
pub trait NvClipSpaceWScalingExtension: DeviceV1_0 {
#[inline]
fn cmd_set_viewport_w_scaling_nv(
&self,
command_buffer: CommandBuffer,
first_viewport: u32,
viewport_w_scalings: &[impl Cast<Target = ViewportWScalingNV>],
) {
let __result = (self.commands().cmd_set_viewport_w_scaling_nv)(
command_buffer,
first_viewport,
viewport_w_scalings.len() as u32,
viewport_w_scalings.as_ptr().cast(),
);
}
}
impl NvClipSpaceWScalingExtension for crate::Device {}
pub trait NvComputeShaderDerivativesExtension: DeviceV1_0 {}
impl NvComputeShaderDerivativesExtension for crate::Device {}
pub trait NvCooperativeMatrixExtension: DeviceV1_0 {
#[inline]
fn get_physical_device_cooperative_matrix_properties_nv(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<CooperativeMatrixPropertiesNV>> {
let mut property_count = 0;
(self
.commands()
.get_physical_device_cooperative_matrix_properties_nv)(
physical_device,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self
.commands()
.get_physical_device_cooperative_matrix_properties_nv)(
physical_device,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
unsafe { properties.set_len(property_count as usize) };
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result)
}
}
}
impl NvCooperativeMatrixExtension for crate::Device {}
pub trait NvCornerSampledImageExtension: DeviceV1_0 {}
impl NvCornerSampledImageExtension for crate::Device {}
pub trait NvCoverageReductionModeExtension: DeviceV1_0 {
#[inline]
fn get_physical_device_supported_framebuffer_mixed_samples_combinations_nv(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<FramebufferMixedSamplesCombinationNV>> {
let mut combination_count = 0;
(self
.commands()
.get_physical_device_supported_framebuffer_mixed_samples_combinations_nv)(
physical_device,
&mut combination_count,
ptr::null_mut(),
);
let mut combinations = Vec::with_capacity(combination_count as usize);
let __result = (self
.commands()
.get_physical_device_supported_framebuffer_mixed_samples_combinations_nv)(
physical_device,
&mut combination_count,
combinations.as_mut_ptr(),
);
debug_assert!(combinations.capacity() == combination_count as usize);
unsafe { combinations.set_len(combination_count as usize) };
if __result == Result::SUCCESS {
Ok(combinations)
} else {
Err(__result)
}
}
}
impl NvCoverageReductionModeExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_KHR_dedicated_allocation`")]
pub trait NvDedicatedAllocationExtension: DeviceV1_0 {}
#[allow(deprecated)]
impl NvDedicatedAllocationExtension for crate::Device {}
pub trait NvDedicatedAllocationImageAliasingExtension: DeviceV1_0 {}
impl NvDedicatedAllocationImageAliasingExtension for crate::Device {}
pub trait NvDeviceDiagnosticCheckpointsExtension: DeviceV1_0 {
#[inline]
fn cmd_set_checkpoint_nv(&self, command_buffer: CommandBuffer, checkpoint_marker: &c_void) {
let __result = (self.commands().cmd_set_checkpoint_nv)(command_buffer, checkpoint_marker);
}
#[inline]
fn get_queue_checkpoint_data_nv(&self, queue: Queue) -> Vec<CheckpointDataNV> {
let mut checkpoint_data_count = 0;
(self.commands().get_queue_checkpoint_data_nv)(
queue,
&mut checkpoint_data_count,
ptr::null_mut(),
);
let mut checkpoint_data = Vec::with_capacity(checkpoint_data_count as usize);
let __result = (self.commands().get_queue_checkpoint_data_nv)(
queue,
&mut checkpoint_data_count,
checkpoint_data.as_mut_ptr(),
);
debug_assert!(checkpoint_data.capacity() == checkpoint_data_count as usize);
unsafe { checkpoint_data.set_len(checkpoint_data_count as usize) };
checkpoint_data
}
}
impl NvDeviceDiagnosticCheckpointsExtension for crate::Device {}
pub trait NvDeviceDiagnosticsConfigExtension: DeviceV1_0 {}
impl NvDeviceDiagnosticsConfigExtension for crate::Device {}
pub trait NvDeviceGeneratedCommandsExtension: DeviceV1_0 {
#[inline]
fn cmd_bind_pipeline_shader_group_nv(
&self,
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
pipeline: Pipeline,
group_index: u32,
) {
let __result = (self.commands().cmd_bind_pipeline_shader_group_nv)(
command_buffer,
pipeline_bind_point,
pipeline,
group_index,
);
}
#[inline]
fn cmd_execute_generated_commands_nv(
&self,
command_buffer: CommandBuffer,
is_preprocessed: bool,
generated_commands_info: &GeneratedCommandsInfoNV,
) {
let __result = (self.commands().cmd_execute_generated_commands_nv)(
command_buffer,
if is_preprocessed { TRUE } else { FALSE },
generated_commands_info,
);
}
#[inline]
fn cmd_preprocess_generated_commands_nv(
&self,
command_buffer: CommandBuffer,
generated_commands_info: &GeneratedCommandsInfoNV,
) {
let __result = (self.commands().cmd_preprocess_generated_commands_nv)(
command_buffer,
generated_commands_info,
);
}
#[inline]
fn create_indirect_commands_layout_nv(
&self,
create_info: &IndirectCommandsLayoutCreateInfoNV,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<IndirectCommandsLayoutNV> {
let mut indirect_commands_layout = MaybeUninit::<IndirectCommandsLayoutNV>::uninit();
let __result = (self.commands().create_indirect_commands_layout_nv)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
indirect_commands_layout.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { indirect_commands_layout.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_indirect_commands_layout_nv(
&self,
indirect_commands_layout: IndirectCommandsLayoutNV,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_indirect_commands_layout_nv)(
self.handle(),
indirect_commands_layout,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_generated_commands_memory_requirements_nv(
&self,
info: &GeneratedCommandsMemoryRequirementsInfoNV,
) -> MemoryRequirements2 {
let mut memory_requirements = MaybeUninit::<MemoryRequirements2>::uninit();
let __result = (self
.commands()
.get_generated_commands_memory_requirements_nv)(
self.handle(),
info,
memory_requirements.as_mut_ptr(),
);
unsafe { memory_requirements.assume_init() }
}
}
impl NvDeviceGeneratedCommandsExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_KHR_external_memory`")]
pub trait NvExternalMemoryExtension: DeviceV1_0 {}
#[allow(deprecated)]
impl NvExternalMemoryExtension for crate::Device {}
#[deprecated(note = "deprecated in favor of `VK_KHR_external_memory_capabilities`")]
pub trait NvExternalMemoryCapabilitiesExtension: InstanceV1_0 {
#[inline]
fn get_physical_device_external_image_format_properties_nv(
&self,
physical_device: PhysicalDevice,
format: Format,
type_: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
external_handle_type: ExternalMemoryHandleTypeFlagsNV,
) -> crate::VkResult<ExternalImageFormatPropertiesNV> {
let mut external_image_format_properties =
MaybeUninit::<ExternalImageFormatPropertiesNV>::uninit();
let __result = (self
.commands()
.get_physical_device_external_image_format_properties_nv)(
physical_device,
format,
type_,
tiling,
usage,
flags,
external_handle_type,
external_image_format_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { external_image_format_properties.assume_init() })
} else {
Err(__result)
}
}
}
#[allow(deprecated)]
impl NvExternalMemoryCapabilitiesExtension for crate::Instance {}
#[deprecated(note = "deprecated in favor of `VK_KHR_external_memory_win32`")]
pub trait NvExternalMemoryWin32Extension: DeviceV1_0 {
#[inline]
fn get_memory_win32_handle_nv(
&self,
memory: DeviceMemory,
handle_type: ExternalMemoryHandleTypeFlagsNV,
) -> crate::VkResult<HANDLE> {
let mut handle = MaybeUninit::<HANDLE>::uninit();
let __result = (self.commands().get_memory_win32_handle_nv)(
self.handle(),
memory,
handle_type,
handle.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { handle.assume_init() })
} else {
Err(__result)
}
}
}
#[allow(deprecated)]
impl NvExternalMemoryWin32Extension for crate::Device {}
pub trait NvFillRectangleExtension: DeviceV1_0 {}
impl NvFillRectangleExtension for crate::Device {}
pub trait NvFragmentCoverageToColorExtension: DeviceV1_0 {}
impl NvFragmentCoverageToColorExtension for crate::Device {}
pub trait NvFragmentShaderBarycentricExtension: DeviceV1_0 {}
impl NvFragmentShaderBarycentricExtension for crate::Device {}
pub trait NvFramebufferMixedSamplesExtension: DeviceV1_0 {}
impl NvFramebufferMixedSamplesExtension for crate::Device {}
pub trait NvGeometryShaderPassthroughExtension: DeviceV1_0 {}
impl NvGeometryShaderPassthroughExtension for crate::Device {}
#[deprecated]
pub trait NvGlslShaderExtension: DeviceV1_0 {}
#[allow(deprecated)]
impl NvGlslShaderExtension for crate::Device {}
pub trait NvMeshShaderExtension: DeviceV1_0 {
#[inline]
fn cmd_draw_mesh_tasks_indirect_count_nv(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_mesh_tasks_indirect_count_nv)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
#[inline]
fn cmd_draw_mesh_tasks_indirect_nv(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_mesh_tasks_indirect_nv)(
command_buffer,
buffer,
offset,
draw_count,
stride,
);
}
#[inline]
fn cmd_draw_mesh_tasks_nv(
&self,
command_buffer: CommandBuffer,
task_count: u32,
first_task: u32,
) {
let __result =
(self.commands().cmd_draw_mesh_tasks_nv)(command_buffer, task_count, first_task);
}
}
impl NvMeshShaderExtension for crate::Device {}
pub trait NvRayTracingExtension: DeviceV1_0 {
#[inline]
fn bind_acceleration_structure_memory_nv(
&self,
bind_infos: &[impl Cast<Target = BindAccelerationStructureMemoryInfoKHR>],
) -> crate::VkResult<()> {
let __result = (self.commands().bind_acceleration_structure_memory_nv)(
self.handle(),
bind_infos.len() as u32,
bind_infos.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn cmd_build_acceleration_structure_nv(
&self,
command_buffer: CommandBuffer,
info: &AccelerationStructureInfoNV,
instance_data: Buffer,
instance_offset: DeviceSize,
update: bool,
dst: AccelerationStructureKHR,
src: AccelerationStructureKHR,
scratch: Buffer,
scratch_offset: DeviceSize,
) {
let __result = (self.commands().cmd_build_acceleration_structure_nv)(
command_buffer,
info,
instance_data,
instance_offset,
if update { TRUE } else { FALSE },
dst,
src,
scratch,
scratch_offset,
);
}
#[inline]
fn cmd_copy_acceleration_structure_nv(
&self,
command_buffer: CommandBuffer,
dst: AccelerationStructureKHR,
src: AccelerationStructureKHR,
mode: CopyAccelerationStructureModeKHR,
) {
let __result =
(self.commands().cmd_copy_acceleration_structure_nv)(command_buffer, dst, src, mode);
}
#[inline]
fn cmd_trace_rays_nv(
&self,
command_buffer: CommandBuffer,
raygen_shader_binding_table_buffer: Buffer,
raygen_shader_binding_offset: DeviceSize,
miss_shader_binding_table_buffer: Buffer,
miss_shader_binding_offset: DeviceSize,
miss_shader_binding_stride: DeviceSize,
hit_shader_binding_table_buffer: Buffer,
hit_shader_binding_offset: DeviceSize,
hit_shader_binding_stride: DeviceSize,
callable_shader_binding_table_buffer: Buffer,
callable_shader_binding_offset: DeviceSize,
callable_shader_binding_stride: DeviceSize,
width: u32,
height: u32,
depth: u32,
) {
let __result = (self.commands().cmd_trace_rays_nv)(
command_buffer,
raygen_shader_binding_table_buffer,
raygen_shader_binding_offset,
miss_shader_binding_table_buffer,
miss_shader_binding_offset,
miss_shader_binding_stride,
hit_shader_binding_table_buffer,
hit_shader_binding_offset,
hit_shader_binding_stride,
callable_shader_binding_table_buffer,
callable_shader_binding_offset,
callable_shader_binding_stride,
width,
height,
depth,
);
}
#[inline]
fn cmd_write_acceleration_structures_properties_nv(
&self,
command_buffer: CommandBuffer,
acceleration_structures: &[AccelerationStructureKHR],
query_type: QueryType,
query_pool: QueryPool,
first_query: u32,
) {
let __result = (self
.commands()
.cmd_write_acceleration_structures_properties_nv)(
command_buffer,
acceleration_structures.len() as u32,
acceleration_structures.as_ptr(),
query_type,
query_pool,
first_query,
);
}
#[inline]
fn compile_deferred_nv(&self, pipeline: Pipeline, shader: u32) -> crate::VkResult<()> {
let __result = (self.commands().compile_deferred_nv)(self.handle(), pipeline, shader);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn create_acceleration_structure_nv(
&self,
create_info: &AccelerationStructureCreateInfoNV,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<AccelerationStructureNV> {
let mut acceleration_structure = MaybeUninit::<AccelerationStructureNV>::uninit();
let __result = (self.commands().create_acceleration_structure_nv)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
acceleration_structure.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { acceleration_structure.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn create_ray_tracing_pipelines_nv(
&self,
pipeline_cache: PipelineCache,
create_infos: &[impl Cast<Target = RayTracingPipelineCreateInfoNV>],
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Pipeline> {
let mut pipelines = MaybeUninit::<Pipeline>::uninit();
let __result = (self.commands().create_ray_tracing_pipelines_nv)(
self.handle(),
pipeline_cache,
create_infos.len() as u32,
create_infos.as_ptr().cast(),
allocator.map_or(ptr::null(), |v| v),
pipelines.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(unsafe { pipelines.assume_init() })
} else {
Err(__result)
}
}
#[inline]
fn destroy_acceleration_structure_nv(
&self,
acceleration_structure: AccelerationStructureKHR,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_acceleration_structure_nv)(
self.handle(),
acceleration_structure,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
fn get_acceleration_structure_handle_nv(
&self,
acceleration_structure: AccelerationStructureKHR,
data: &mut [u8],
) -> crate::VkResult<()> {
let __result = (self.commands().get_acceleration_structure_handle_nv)(
self.handle(),
acceleration_structure,
data.len() as usize,
data.as_ptr() as *mut c_void,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
#[inline]
fn get_acceleration_structure_memory_requirements_nv(
&self,
info: &AccelerationStructureMemoryRequirementsInfoNV,
) -> MemoryRequirements2KHR {
let mut memory_requirements = MaybeUninit::<MemoryRequirements2KHR>::uninit();
let __result = (self
.commands()
.get_acceleration_structure_memory_requirements_nv)(
self.handle(),
info,
memory_requirements.as_mut_ptr(),
);
unsafe { memory_requirements.assume_init() }
}
#[inline]
fn get_ray_tracing_shader_group_handles_nv(
&self,
pipeline: Pipeline,
first_group: u32,
group_count: u32,
data: &mut [u8],
) -> crate::VkResult<()> {
let __result = (self.commands().get_ray_tracing_shader_group_handles_nv)(
self.handle(),
pipeline,
first_group,
group_count,
data.len() as usize,
data.as_ptr() as *mut c_void,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result)
}
}
}
impl NvRayTracingExtension for crate::Device {}
pub trait NvRepresentativeFragmentTestExtension: DeviceV1_0 {}
impl NvRepresentativeFragmentTestExtension for crate::Device {}
pub trait NvSampleMaskOverrideCoverageExtension: DeviceV1_0 {}
impl NvSampleMaskOverrideCoverageExtension for crate::Device {}
pub trait NvScissorExclusiveExtension: DeviceV1_0 {
#[inline]
fn cmd_set_exclusive_scissor_nv(
&self,
command_buffer: CommandBuffer,
first_exclusive_scissor: u32,
exclusive_scissors: &[impl Cast<Target = Rect2D>],
) {
let __result = (self.commands().cmd_set_exclusive_scissor_nv)(
command_buffer,
first_exclusive_scissor,
exclusive_scissors.len() as u32,
exclusive_scissors.as_ptr().cast(),
);
}
}
impl NvScissorExclusiveExtension for crate::Device {}
pub trait NvShaderImageFootprintExtension: DeviceV1_0 {}
impl NvShaderImageFootprintExtension for crate::Device {}
pub trait NvShaderSmBuiltinsExtension: DeviceV1_0 {}
impl NvShaderSmBuiltinsExtension for crate::Device {}
pub trait NvShaderSubgroupPartitionedExtension: DeviceV1_0 {}
impl NvShaderSubgroupPartitionedExtension for crate::Device {}
pub trait NvShadingRateImageExtension: DeviceV1_0 {
#[inline]
fn cmd_bind_shading_rate_image_nv(
&self,
command_buffer: CommandBuffer,
image_view: ImageView,
image_layout: ImageLayout,
) {
let __result = (self.commands().cmd_bind_shading_rate_image_nv)(
command_buffer,
image_view,
image_layout,
);
}
#[inline]
fn cmd_set_coarse_sample_order_nv(
&self,
command_buffer: CommandBuffer,
sample_order_type: CoarseSampleOrderTypeNV,
custom_sample_orders: &[impl Cast<Target = CoarseSampleOrderCustomNV>],
) {
let __result = (self.commands().cmd_set_coarse_sample_order_nv)(
command_buffer,
sample_order_type,
custom_sample_orders.len() as u32,
custom_sample_orders.as_ptr().cast(),
);
}
#[inline]
fn cmd_set_viewport_shading_rate_palette_nv(
&self,
command_buffer: CommandBuffer,
first_viewport: u32,
shading_rate_palettes: &[impl Cast<Target = ShadingRatePaletteNV>],
) {
let __result = (self.commands().cmd_set_viewport_shading_rate_palette_nv)(
command_buffer,
first_viewport,
shading_rate_palettes.len() as u32,
shading_rate_palettes.as_ptr().cast(),
);
}
}
impl NvShadingRateImageExtension for crate::Device {}
pub trait NvViewportArray2Extension: DeviceV1_0 {}
impl NvViewportArray2Extension for crate::Device {}
pub trait NvViewportSwizzleExtension: DeviceV1_0 {}
impl NvViewportSwizzleExtension for crate::Device {}
pub trait NvWin32KeyedMutexExtension: DeviceV1_0 {}
impl NvWin32KeyedMutexExtension for crate::Device {}
pub trait QcomRenderPassShaderResolveExtension: DeviceV1_0 {}
impl QcomRenderPassShaderResolveExtension for crate::Device {}
pub trait QcomRenderPassStoreOpsExtension: DeviceV1_0 {}
impl QcomRenderPassStoreOpsExtension for crate::Device {}
pub trait QcomRenderPassTransformExtension: DeviceV1_0 {}
impl QcomRenderPassTransformExtension for crate::Device {}