#![allow(
non_camel_case_types,
non_snake_case,
clippy::missing_safety_doc,
clippy::too_many_arguments,
clippy::type_complexity,
clippy::upper_case_acronyms
)]
use std::mem::MaybeUninit;
use std::os::raw::c_void;
use std::ptr;
use super::*;
pub trait EntryV1_0 {
fn commands(&self) -> &EntryCommands;
#[inline]
unsafe fn create_instance(
&self,
create_info: &InstanceCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Instance> {
let mut instance = MaybeUninit::<Instance>::uninit();
let __result = (self.commands().create_instance)(
create_info,
allocator.map_or(ptr::null(), |v| v),
instance.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(instance.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn enumerate_instance_extension_properties(
&self,
layer_name: Option<&[u8]>,
) -> crate::VkResult<Vec<ExtensionProperties>> {
let mut property_count = 0;
(self.commands().enumerate_instance_extension_properties)(
layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().enumerate_instance_extension_properties)(
layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
properties.set_len(property_count as usize);
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn enumerate_instance_layer_properties(&self) -> crate::VkResult<Vec<LayerProperties>> {
let mut property_count = 0;
(self.commands().enumerate_instance_layer_properties)(&mut property_count, ptr::null_mut());
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().enumerate_instance_layer_properties)(
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
properties.set_len(property_count as usize);
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result.into())
}
}
}
impl EntryV1_0 for crate::Entry {
#[inline]
fn commands(&self) -> &EntryCommands {
&self.commands
}
}
pub trait InstanceV1_0 {
fn commands(&self) -> &InstanceCommands;
fn handle(&self) -> Instance;
#[inline]
unsafe fn create_device(
&self,
physical_device: PhysicalDevice,
create_info: &DeviceCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Device> {
let mut device = MaybeUninit::<Device>::uninit();
let __result = (self.commands().create_device)(
physical_device,
create_info,
allocator.map_or(ptr::null(), |v| v),
device.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(device.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn destroy_instance(&self, allocator: Option<&AllocationCallbacks>) {
let __result =
(self.commands().destroy_instance)(self.handle(), allocator.map_or(ptr::null(), |v| v));
}
#[inline]
unsafe fn enumerate_device_extension_properties(
&self,
physical_device: PhysicalDevice,
layer_name: Option<&[u8]>,
) -> crate::VkResult<Vec<ExtensionProperties>> {
let mut property_count = 0;
(self.commands().enumerate_device_extension_properties)(
physical_device,
layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().enumerate_device_extension_properties)(
physical_device,
layer_name.map_or(ptr::null(), |v| v.as_ptr().cast()),
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
properties.set_len(property_count as usize);
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn enumerate_device_layer_properties(
&self,
physical_device: PhysicalDevice,
) -> crate::VkResult<Vec<LayerProperties>> {
let mut property_count = 0;
(self.commands().enumerate_device_layer_properties)(
physical_device,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self.commands().enumerate_device_layer_properties)(
physical_device,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
properties.set_len(property_count as usize);
if __result == Result::SUCCESS {
Ok(properties)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn enumerate_physical_devices(&self) -> crate::VkResult<Vec<PhysicalDevice>> {
let mut physical_device_count = 0;
(self.commands().enumerate_physical_devices)(
self.handle(),
&mut physical_device_count,
ptr::null_mut(),
);
let mut physical_devices = Vec::with_capacity(physical_device_count as usize);
let __result = (self.commands().enumerate_physical_devices)(
self.handle(),
&mut physical_device_count,
physical_devices.as_mut_ptr(),
);
debug_assert!(physical_devices.capacity() == physical_device_count as usize);
physical_devices.set_len(physical_device_count as usize);
if __result == Result::SUCCESS {
Ok(physical_devices)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_physical_device_features(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceFeatures {
let mut features = MaybeUninit::<PhysicalDeviceFeatures>::uninit();
let __result =
(self.commands().get_physical_device_features)(physical_device, features.as_mut_ptr());
features.assume_init()
}
#[inline]
unsafe fn get_physical_device_format_properties(
&self,
physical_device: PhysicalDevice,
format: Format,
) -> FormatProperties {
let mut format_properties = MaybeUninit::<FormatProperties>::uninit();
let __result = (self.commands().get_physical_device_format_properties)(
physical_device,
format,
format_properties.as_mut_ptr(),
);
format_properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_image_format_properties(
&self,
physical_device: PhysicalDevice,
format: Format,
type_: ImageType,
tiling: ImageTiling,
usage: ImageUsageFlags,
flags: ImageCreateFlags,
) -> crate::VkResult<ImageFormatProperties> {
let mut image_format_properties = MaybeUninit::<ImageFormatProperties>::uninit();
let __result = (self.commands().get_physical_device_image_format_properties)(
physical_device,
format,
type_,
tiling,
usage,
flags,
image_format_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(image_format_properties.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_physical_device_memory_properties(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceMemoryProperties {
let mut memory_properties = MaybeUninit::<PhysicalDeviceMemoryProperties>::uninit();
let __result = (self.commands().get_physical_device_memory_properties)(
physical_device,
memory_properties.as_mut_ptr(),
);
memory_properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_properties(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceProperties {
let mut properties = MaybeUninit::<PhysicalDeviceProperties>::uninit();
let __result = (self.commands().get_physical_device_properties)(
physical_device,
properties.as_mut_ptr(),
);
properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_queue_family_properties(
&self,
physical_device: PhysicalDevice,
) -> Vec<QueueFamilyProperties> {
let mut queue_family_property_count = 0;
(self.commands().get_physical_device_queue_family_properties)(
physical_device,
&mut queue_family_property_count,
ptr::null_mut(),
);
let mut queue_family_properties = Vec::with_capacity(queue_family_property_count as usize);
let __result = (self.commands().get_physical_device_queue_family_properties)(
physical_device,
&mut queue_family_property_count,
queue_family_properties.as_mut_ptr(),
);
debug_assert!(queue_family_properties.capacity() == queue_family_property_count as usize);
queue_family_properties.set_len(queue_family_property_count as usize);
queue_family_properties
}
#[inline]
unsafe fn get_physical_device_sparse_image_format_properties(
&self,
physical_device: PhysicalDevice,
format: Format,
type_: ImageType,
samples: SampleCountFlags,
usage: ImageUsageFlags,
tiling: ImageTiling,
) -> Vec<SparseImageFormatProperties> {
let mut property_count = 0;
(self
.commands()
.get_physical_device_sparse_image_format_properties)(
physical_device,
format,
type_,
samples,
usage,
tiling,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self
.commands()
.get_physical_device_sparse_image_format_properties)(
physical_device,
format,
type_,
samples,
usage,
tiling,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
properties.set_len(property_count as usize);
properties
}
}
impl InstanceV1_0 for crate::Instance {
#[inline]
fn commands(&self) -> &InstanceCommands {
&self.commands
}
#[inline]
fn handle(&self) -> Instance {
self.handle
}
}
pub trait DeviceV1_0 {
fn commands(&self) -> &DeviceCommands;
fn handle(&self) -> Device;
#[inline]
unsafe fn allocate_command_buffers(
&self,
allocate_info: &CommandBufferAllocateInfo,
) -> crate::VkResult<Vec<CommandBuffer>> {
let mut command_buffers =
Vec::with_capacity(allocate_info.as_ref().command_buffer_count as usize);
let __result = (self.commands().allocate_command_buffers)(
self.handle(),
allocate_info,
command_buffers.as_mut_ptr(),
);
command_buffers.set_len(allocate_info.as_ref().command_buffer_count as usize);
if __result == Result::SUCCESS {
Ok(command_buffers)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn allocate_descriptor_sets(
&self,
allocate_info: &DescriptorSetAllocateInfo,
) -> crate::VkResult<Vec<DescriptorSet>> {
let mut descriptor_sets =
Vec::with_capacity(allocate_info.as_ref().descriptor_set_count as usize);
let __result = (self.commands().allocate_descriptor_sets)(
self.handle(),
allocate_info,
descriptor_sets.as_mut_ptr(),
);
descriptor_sets.set_len(allocate_info.as_ref().descriptor_set_count as usize);
if __result == Result::SUCCESS {
Ok(descriptor_sets)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn allocate_memory(
&self,
allocate_info: &MemoryAllocateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DeviceMemory> {
let mut memory = MaybeUninit::<DeviceMemory>::uninit();
let __result = (self.commands().allocate_memory)(
self.handle(),
allocate_info,
allocator.map_or(ptr::null(), |v| v),
memory.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(memory.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn begin_command_buffer(
&self,
command_buffer: CommandBuffer,
begin_info: &CommandBufferBeginInfo,
) -> crate::VkResult<()> {
let __result = (self.commands().begin_command_buffer)(command_buffer, begin_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn bind_buffer_memory(
&self,
buffer: Buffer,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> crate::VkResult<()> {
let __result =
(self.commands().bind_buffer_memory)(self.handle(), buffer, memory, memory_offset);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn bind_image_memory(
&self,
image: Image,
memory: DeviceMemory,
memory_offset: DeviceSize,
) -> crate::VkResult<()> {
let __result =
(self.commands().bind_image_memory)(self.handle(), image, memory, memory_offset);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn cmd_begin_query(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
flags: QueryControlFlags,
) {
let __result = (self.commands().cmd_begin_query)(command_buffer, query_pool, query, flags);
}
#[inline]
unsafe fn cmd_begin_render_pass(
&self,
command_buffer: CommandBuffer,
render_pass_begin: &RenderPassBeginInfo,
contents: SubpassContents,
) {
let __result =
(self.commands().cmd_begin_render_pass)(command_buffer, render_pass_begin, contents);
}
#[inline]
unsafe fn cmd_bind_descriptor_sets(
&self,
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
layout: PipelineLayout,
first_set: u32,
descriptor_sets: &[DescriptorSet],
dynamic_offsets: &[u32],
) {
let __result = (self.commands().cmd_bind_descriptor_sets)(
command_buffer,
pipeline_bind_point,
layout,
first_set,
descriptor_sets.len() as u32,
descriptor_sets.as_ptr(),
dynamic_offsets.len() as u32,
dynamic_offsets.as_ptr(),
);
}
#[inline]
unsafe fn cmd_bind_index_buffer(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
index_type: IndexType,
) {
let __result =
(self.commands().cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
}
#[inline]
unsafe fn cmd_bind_pipeline(
&self,
command_buffer: CommandBuffer,
pipeline_bind_point: PipelineBindPoint,
pipeline: Pipeline,
) {
let __result =
(self.commands().cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
}
#[inline]
unsafe fn cmd_bind_vertex_buffers(
&self,
command_buffer: CommandBuffer,
first_binding: u32,
buffers: &[Buffer],
offsets: &[DeviceSize],
) {
let __result = (self.commands().cmd_bind_vertex_buffers)(
command_buffer,
first_binding,
buffers.len() as u32,
buffers.as_ptr(),
offsets.as_ptr(),
);
}
#[inline]
unsafe fn cmd_blit_image(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
regions: &[impl Cast<Target = ImageBlit>],
filter: Filter,
) {
let __result = (self.commands().cmd_blit_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
regions.len() as u32,
regions.as_ptr().cast(),
filter,
);
}
#[inline]
unsafe fn cmd_clear_attachments(
&self,
command_buffer: CommandBuffer,
attachments: &[impl Cast<Target = ClearAttachment>],
rects: &[impl Cast<Target = ClearRect>],
) {
let __result = (self.commands().cmd_clear_attachments)(
command_buffer,
attachments.len() as u32,
attachments.as_ptr().cast(),
rects.len() as u32,
rects.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_clear_color_image(
&self,
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
color: &ClearColorValue,
ranges: &[impl Cast<Target = ImageSubresourceRange>],
) {
let __result = (self.commands().cmd_clear_color_image)(
command_buffer,
image,
image_layout,
color,
ranges.len() as u32,
ranges.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_clear_depth_stencil_image(
&self,
command_buffer: CommandBuffer,
image: Image,
image_layout: ImageLayout,
depth_stencil: &ClearDepthStencilValue,
ranges: &[impl Cast<Target = ImageSubresourceRange>],
) {
let __result = (self.commands().cmd_clear_depth_stencil_image)(
command_buffer,
image,
image_layout,
depth_stencil,
ranges.len() as u32,
ranges.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_copy_buffer(
&self,
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_buffer: Buffer,
regions: &[impl Cast<Target = BufferCopy>],
) {
let __result = (self.commands().cmd_copy_buffer)(
command_buffer,
src_buffer,
dst_buffer,
regions.len() as u32,
regions.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_copy_buffer_to_image(
&self,
command_buffer: CommandBuffer,
src_buffer: Buffer,
dst_image: Image,
dst_image_layout: ImageLayout,
regions: &[impl Cast<Target = BufferImageCopy>],
) {
let __result = (self.commands().cmd_copy_buffer_to_image)(
command_buffer,
src_buffer,
dst_image,
dst_image_layout,
regions.len() as u32,
regions.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_copy_image(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
regions: &[impl Cast<Target = ImageCopy>],
) {
let __result = (self.commands().cmd_copy_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
regions.len() as u32,
regions.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_copy_image_to_buffer(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_buffer: Buffer,
regions: &[impl Cast<Target = BufferImageCopy>],
) {
let __result = (self.commands().cmd_copy_image_to_buffer)(
command_buffer,
src_image,
src_image_layout,
dst_buffer,
regions.len() as u32,
regions.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_copy_query_pool_results(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
dst_buffer: Buffer,
dst_offset: DeviceSize,
stride: DeviceSize,
flags: QueryResultFlags,
) {
let __result = (self.commands().cmd_copy_query_pool_results)(
command_buffer,
query_pool,
first_query,
query_count,
dst_buffer,
dst_offset,
stride,
flags,
);
}
#[inline]
unsafe fn cmd_dispatch(
&self,
command_buffer: CommandBuffer,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) {
let __result = (self.commands().cmd_dispatch)(
command_buffer,
group_count_x,
group_count_y,
group_count_z,
);
}
#[inline]
unsafe fn cmd_dispatch_indirect(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
) {
let __result = (self.commands().cmd_dispatch_indirect)(command_buffer, buffer, offset);
}
#[inline]
unsafe fn cmd_draw(
&self,
command_buffer: CommandBuffer,
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
) {
let __result = (self.commands().cmd_draw)(
command_buffer,
vertex_count,
instance_count,
first_vertex,
first_instance,
);
}
#[inline]
unsafe fn cmd_draw_indexed(
&self,
command_buffer: CommandBuffer,
index_count: u32,
instance_count: u32,
first_index: u32,
vertex_offset: i32,
first_instance: u32,
) {
let __result = (self.commands().cmd_draw_indexed)(
command_buffer,
index_count,
instance_count,
first_index,
vertex_offset,
first_instance,
);
}
#[inline]
unsafe fn cmd_draw_indexed_indirect(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_indexed_indirect)(
command_buffer,
buffer,
offset,
draw_count,
stride,
);
}
#[inline]
unsafe fn cmd_draw_indirect(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
draw_count: u32,
stride: u32,
) {
let __result =
(self.commands().cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride);
}
#[inline]
unsafe fn cmd_end_query(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
query: u32,
) {
let __result = (self.commands().cmd_end_query)(command_buffer, query_pool, query);
}
#[inline]
unsafe fn cmd_end_render_pass(&self, command_buffer: CommandBuffer) {
let __result = (self.commands().cmd_end_render_pass)(command_buffer);
}
#[inline]
unsafe fn cmd_execute_commands(
&self,
command_buffer: CommandBuffer,
command_buffers: &[CommandBuffer],
) {
let __result = (self.commands().cmd_execute_commands)(
command_buffer,
command_buffers.len() as u32,
command_buffers.as_ptr(),
);
}
#[inline]
unsafe fn cmd_fill_buffer(
&self,
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
size: DeviceSize,
data: u32,
) {
let __result =
(self.commands().cmd_fill_buffer)(command_buffer, dst_buffer, dst_offset, size, data);
}
#[inline]
unsafe fn cmd_next_subpass(&self, command_buffer: CommandBuffer, contents: SubpassContents) {
let __result = (self.commands().cmd_next_subpass)(command_buffer, contents);
}
#[inline]
unsafe fn cmd_pipeline_barrier(
&self,
command_buffer: CommandBuffer,
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
dependency_flags: DependencyFlags,
memory_barriers: &[impl Cast<Target = MemoryBarrier>],
buffer_memory_barriers: &[impl Cast<Target = BufferMemoryBarrier>],
image_memory_barriers: &[impl Cast<Target = ImageMemoryBarrier>],
) {
let __result = (self.commands().cmd_pipeline_barrier)(
command_buffer,
src_stage_mask,
dst_stage_mask,
dependency_flags,
memory_barriers.len() as u32,
memory_barriers.as_ptr().cast(),
buffer_memory_barriers.len() as u32,
buffer_memory_barriers.as_ptr().cast(),
image_memory_barriers.len() as u32,
image_memory_barriers.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_push_constants(
&self,
command_buffer: CommandBuffer,
layout: PipelineLayout,
stage_flags: ShaderStageFlags,
offset: u32,
values: &[u8],
) {
let __result = (self.commands().cmd_push_constants)(
command_buffer,
layout,
stage_flags,
offset,
values.len() as u32,
values.as_ptr() as *const c_void,
);
}
#[inline]
unsafe fn cmd_reset_event(
&self,
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) {
let __result = (self.commands().cmd_reset_event)(command_buffer, event, stage_mask);
}
#[inline]
unsafe fn cmd_reset_query_pool(
&self,
command_buffer: CommandBuffer,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
) {
let __result = (self.commands().cmd_reset_query_pool)(
command_buffer,
query_pool,
first_query,
query_count,
);
}
#[inline]
unsafe fn cmd_resolve_image(
&self,
command_buffer: CommandBuffer,
src_image: Image,
src_image_layout: ImageLayout,
dst_image: Image,
dst_image_layout: ImageLayout,
regions: &[impl Cast<Target = ImageResolve>],
) {
let __result = (self.commands().cmd_resolve_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
regions.len() as u32,
regions.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_set_blend_constants(
&self,
command_buffer: CommandBuffer,
blend_constants: [f32; 4],
) {
let __result =
(self.commands().cmd_set_blend_constants)(command_buffer, blend_constants.as_ptr());
}
#[inline]
unsafe fn cmd_set_depth_bias(
&self,
command_buffer: CommandBuffer,
depth_bias_constant_factor: f32,
depth_bias_clamp: f32,
depth_bias_slope_factor: f32,
) {
let __result = (self.commands().cmd_set_depth_bias)(
command_buffer,
depth_bias_constant_factor,
depth_bias_clamp,
depth_bias_slope_factor,
);
}
#[inline]
unsafe fn cmd_set_depth_bounds(
&self,
command_buffer: CommandBuffer,
min_depth_bounds: f32,
max_depth_bounds: f32,
) {
let __result = (self.commands().cmd_set_depth_bounds)(
command_buffer,
min_depth_bounds,
max_depth_bounds,
);
}
#[inline]
unsafe fn cmd_set_event(
&self,
command_buffer: CommandBuffer,
event: Event,
stage_mask: PipelineStageFlags,
) {
let __result = (self.commands().cmd_set_event)(command_buffer, event, stage_mask);
}
#[inline]
unsafe fn cmd_set_line_width(&self, command_buffer: CommandBuffer, line_width: f32) {
let __result = (self.commands().cmd_set_line_width)(command_buffer, line_width);
}
#[inline]
unsafe fn cmd_set_scissor(
&self,
command_buffer: CommandBuffer,
first_scissor: u32,
scissors: &[impl Cast<Target = Rect2D>],
) {
let __result = (self.commands().cmd_set_scissor)(
command_buffer,
first_scissor,
scissors.len() as u32,
scissors.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_set_stencil_compare_mask(
&self,
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
compare_mask: u32,
) {
let __result =
(self.commands().cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask);
}
#[inline]
unsafe fn cmd_set_stencil_reference(
&self,
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
reference: u32,
) {
let __result =
(self.commands().cmd_set_stencil_reference)(command_buffer, face_mask, reference);
}
#[inline]
unsafe fn cmd_set_stencil_write_mask(
&self,
command_buffer: CommandBuffer,
face_mask: StencilFaceFlags,
write_mask: u32,
) {
let __result =
(self.commands().cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
}
#[inline]
unsafe fn cmd_set_viewport(
&self,
command_buffer: CommandBuffer,
first_viewport: u32,
viewports: &[impl Cast<Target = Viewport>],
) {
let __result = (self.commands().cmd_set_viewport)(
command_buffer,
first_viewport,
viewports.len() as u32,
viewports.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_update_buffer(
&self,
command_buffer: CommandBuffer,
dst_buffer: Buffer,
dst_offset: DeviceSize,
data: &[u8],
) {
let __result = (self.commands().cmd_update_buffer)(
command_buffer,
dst_buffer,
dst_offset,
data.len() as DeviceSize,
data.as_ptr() as *const c_void,
);
}
#[inline]
unsafe fn cmd_wait_events(
&self,
command_buffer: CommandBuffer,
events: &[Event],
src_stage_mask: PipelineStageFlags,
dst_stage_mask: PipelineStageFlags,
memory_barriers: &[impl Cast<Target = MemoryBarrier>],
buffer_memory_barriers: &[impl Cast<Target = BufferMemoryBarrier>],
image_memory_barriers: &[impl Cast<Target = ImageMemoryBarrier>],
) {
let __result = (self.commands().cmd_wait_events)(
command_buffer,
events.len() as u32,
events.as_ptr(),
src_stage_mask,
dst_stage_mask,
memory_barriers.len() as u32,
memory_barriers.as_ptr().cast(),
buffer_memory_barriers.len() as u32,
buffer_memory_barriers.as_ptr().cast(),
image_memory_barriers.len() as u32,
image_memory_barriers.as_ptr().cast(),
);
}
#[inline]
unsafe fn cmd_write_timestamp(
&self,
command_buffer: CommandBuffer,
pipeline_stage: PipelineStageFlags,
query_pool: QueryPool,
query: u32,
) {
let __result = (self.commands().cmd_write_timestamp)(
command_buffer,
pipeline_stage,
query_pool,
query,
);
}
#[inline]
unsafe fn create_buffer(
&self,
create_info: &BufferCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Buffer> {
let mut buffer = MaybeUninit::<Buffer>::uninit();
let __result = (self.commands().create_buffer)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
buffer.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(buffer.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_buffer_view(
&self,
create_info: &BufferViewCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<BufferView> {
let mut view = MaybeUninit::<BufferView>::uninit();
let __result = (self.commands().create_buffer_view)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
view.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(view.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_command_pool(
&self,
create_info: &CommandPoolCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<CommandPool> {
let mut command_pool = MaybeUninit::<CommandPool>::uninit();
let __result = (self.commands().create_command_pool)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
command_pool.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(command_pool.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_compute_pipelines(
&self,
pipeline_cache: PipelineCache,
create_infos: &[impl Cast<Target = ComputePipelineCreateInfo>],
allocator: Option<&AllocationCallbacks>,
) -> crate::VkSuccessResult<Pipeline> {
let mut pipelines = MaybeUninit::<Pipeline>::uninit();
let __result = (self.commands().create_compute_pipelines)(
self.handle(),
pipeline_cache,
create_infos.len() as u32,
create_infos.as_ptr().cast(),
allocator.map_or(ptr::null(), |v| v),
pipelines.as_mut_ptr(),
);
if __result >= Result::SUCCESS {
Ok((pipelines.assume_init(), __result.into()))
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_descriptor_pool(
&self,
create_info: &DescriptorPoolCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DescriptorPool> {
let mut descriptor_pool = MaybeUninit::<DescriptorPool>::uninit();
let __result = (self.commands().create_descriptor_pool)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
descriptor_pool.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(descriptor_pool.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_descriptor_set_layout(
&self,
create_info: &DescriptorSetLayoutCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DescriptorSetLayout> {
let mut set_layout = MaybeUninit::<DescriptorSetLayout>::uninit();
let __result = (self.commands().create_descriptor_set_layout)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
set_layout.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(set_layout.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_event(
&self,
create_info: &EventCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Event> {
let mut event = MaybeUninit::<Event>::uninit();
let __result = (self.commands().create_event)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
event.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(event.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_fence(
&self,
create_info: &FenceCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Fence> {
let mut fence = MaybeUninit::<Fence>::uninit();
let __result = (self.commands().create_fence)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
fence.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(fence.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_framebuffer(
&self,
create_info: &FramebufferCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Framebuffer> {
let mut framebuffer = MaybeUninit::<Framebuffer>::uninit();
let __result = (self.commands().create_framebuffer)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
framebuffer.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(framebuffer.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_graphics_pipelines(
&self,
pipeline_cache: PipelineCache,
create_infos: &[impl Cast<Target = GraphicsPipelineCreateInfo>],
allocator: Option<&AllocationCallbacks>,
) -> crate::VkSuccessResult<Pipeline> {
let mut pipelines = MaybeUninit::<Pipeline>::uninit();
let __result = (self.commands().create_graphics_pipelines)(
self.handle(),
pipeline_cache,
create_infos.len() as u32,
create_infos.as_ptr().cast(),
allocator.map_or(ptr::null(), |v| v),
pipelines.as_mut_ptr(),
);
if __result >= Result::SUCCESS {
Ok((pipelines.assume_init(), __result.into()))
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_image(
&self,
create_info: &ImageCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Image> {
let mut image = MaybeUninit::<Image>::uninit();
let __result = (self.commands().create_image)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
image.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(image.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_image_view(
&self,
create_info: &ImageViewCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<ImageView> {
let mut view = MaybeUninit::<ImageView>::uninit();
let __result = (self.commands().create_image_view)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
view.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(view.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_pipeline_cache(
&self,
create_info: &PipelineCacheCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<PipelineCache> {
let mut pipeline_cache = MaybeUninit::<PipelineCache>::uninit();
let __result = (self.commands().create_pipeline_cache)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
pipeline_cache.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(pipeline_cache.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_pipeline_layout(
&self,
create_info: &PipelineLayoutCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<PipelineLayout> {
let mut pipeline_layout = MaybeUninit::<PipelineLayout>::uninit();
let __result = (self.commands().create_pipeline_layout)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
pipeline_layout.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(pipeline_layout.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_query_pool(
&self,
create_info: &QueryPoolCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<QueryPool> {
let mut query_pool = MaybeUninit::<QueryPool>::uninit();
let __result = (self.commands().create_query_pool)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
query_pool.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(query_pool.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_render_pass(
&self,
create_info: &RenderPassCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<RenderPass> {
let mut render_pass = MaybeUninit::<RenderPass>::uninit();
let __result = (self.commands().create_render_pass)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
render_pass.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(render_pass.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_sampler(
&self,
create_info: &SamplerCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Sampler> {
let mut sampler = MaybeUninit::<Sampler>::uninit();
let __result = (self.commands().create_sampler)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
sampler.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(sampler.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_semaphore(
&self,
create_info: &SemaphoreCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<Semaphore> {
let mut semaphore = MaybeUninit::<Semaphore>::uninit();
let __result = (self.commands().create_semaphore)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
semaphore.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(semaphore.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_shader_module(
&self,
create_info: &ShaderModuleCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<ShaderModule> {
let mut shader_module = MaybeUninit::<ShaderModule>::uninit();
let __result = (self.commands().create_shader_module)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
shader_module.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(shader_module.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn destroy_buffer(&self, buffer: Buffer, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().destroy_buffer)(
self.handle(),
buffer,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_buffer_view(
&self,
buffer_view: BufferView,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_buffer_view)(
self.handle(),
buffer_view,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_command_pool(
&self,
command_pool: CommandPool,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_command_pool)(
self.handle(),
command_pool,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_descriptor_pool(
&self,
descriptor_pool: DescriptorPool,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_descriptor_pool)(
self.handle(),
descriptor_pool,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_descriptor_set_layout(
&self,
descriptor_set_layout: DescriptorSetLayout,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_descriptor_set_layout)(
self.handle(),
descriptor_set_layout,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_device(&self, allocator: Option<&AllocationCallbacks>) {
let __result =
(self.commands().destroy_device)(self.handle(), allocator.map_or(ptr::null(), |v| v));
}
#[inline]
unsafe fn destroy_event(&self, event: Event, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().destroy_event)(
self.handle(),
event,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_fence(&self, fence: Fence, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().destroy_fence)(
self.handle(),
fence,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_framebuffer(
&self,
framebuffer: Framebuffer,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_framebuffer)(
self.handle(),
framebuffer,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_image(&self, image: Image, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().destroy_image)(
self.handle(),
image,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_image_view(
&self,
image_view: ImageView,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_image_view)(
self.handle(),
image_view,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_pipeline(&self, pipeline: Pipeline, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().destroy_pipeline)(
self.handle(),
pipeline,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_pipeline_cache(
&self,
pipeline_cache: PipelineCache,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_pipeline_cache)(
self.handle(),
pipeline_cache,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_pipeline_layout(
&self,
pipeline_layout: PipelineLayout,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_pipeline_layout)(
self.handle(),
pipeline_layout,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_query_pool(
&self,
query_pool: QueryPool,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_query_pool)(
self.handle(),
query_pool,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_render_pass(
&self,
render_pass: RenderPass,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_render_pass)(
self.handle(),
render_pass,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_sampler(&self, sampler: Sampler, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().destroy_sampler)(
self.handle(),
sampler,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_semaphore(
&self,
semaphore: Semaphore,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_semaphore)(
self.handle(),
semaphore,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_shader_module(
&self,
shader_module: ShaderModule,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_shader_module)(
self.handle(),
shader_module,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn device_wait_idle(&self) -> crate::VkResult<()> {
let __result = (self.commands().device_wait_idle)(self.handle());
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn end_command_buffer(&self, command_buffer: CommandBuffer) -> crate::VkResult<()> {
let __result = (self.commands().end_command_buffer)(command_buffer);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn flush_mapped_memory_ranges(
&self,
memory_ranges: &[impl Cast<Target = MappedMemoryRange>],
) -> crate::VkResult<()> {
let __result = (self.commands().flush_mapped_memory_ranges)(
self.handle(),
memory_ranges.len() as u32,
memory_ranges.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn free_command_buffers(
&self,
command_pool: CommandPool,
command_buffers: &[CommandBuffer],
) {
let __result = (self.commands().free_command_buffers)(
self.handle(),
command_pool,
command_buffers.len() as u32,
command_buffers.as_ptr(),
);
}
#[inline]
unsafe fn free_descriptor_sets(
&self,
descriptor_pool: DescriptorPool,
descriptor_sets: &[DescriptorSet],
) -> crate::VkResult<()> {
let __result = (self.commands().free_descriptor_sets)(
self.handle(),
descriptor_pool,
descriptor_sets.len() as u32,
descriptor_sets.as_ptr(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn free_memory(&self, memory: DeviceMemory, allocator: Option<&AllocationCallbacks>) {
let __result = (self.commands().free_memory)(
self.handle(),
memory,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn get_buffer_memory_requirements(&self, buffer: Buffer) -> MemoryRequirements {
let mut memory_requirements = MaybeUninit::<MemoryRequirements>::uninit();
let __result = (self.commands().get_buffer_memory_requirements)(
self.handle(),
buffer,
memory_requirements.as_mut_ptr(),
);
memory_requirements.assume_init()
}
#[inline]
unsafe fn get_device_memory_commitment(&self, memory: DeviceMemory) -> DeviceSize {
let mut committed_memory_in_bytes = MaybeUninit::<DeviceSize>::uninit();
let __result = (self.commands().get_device_memory_commitment)(
self.handle(),
memory,
committed_memory_in_bytes.as_mut_ptr(),
);
committed_memory_in_bytes.assume_init()
}
#[inline]
unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> Queue {
let mut queue = MaybeUninit::<Queue>::uninit();
let __result = (self.commands().get_device_queue)(
self.handle(),
queue_family_index,
queue_index,
queue.as_mut_ptr(),
);
queue.assume_init()
}
#[inline]
unsafe fn get_event_status(&self, event: Event) -> crate::VkResult<SuccessCode> {
let __result = (self.commands().get_event_status)(self.handle(), event);
if __result >= Result::SUCCESS {
Ok(__result.into())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_fence_status(&self, fence: Fence) -> crate::VkResult<SuccessCode> {
let __result = (self.commands().get_fence_status)(self.handle(), fence);
if __result >= Result::SUCCESS {
Ok(__result.into())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_image_memory_requirements(&self, image: Image) -> MemoryRequirements {
let mut memory_requirements = MaybeUninit::<MemoryRequirements>::uninit();
let __result = (self.commands().get_image_memory_requirements)(
self.handle(),
image,
memory_requirements.as_mut_ptr(),
);
memory_requirements.assume_init()
}
#[inline]
unsafe fn get_image_sparse_memory_requirements(
&self,
image: Image,
) -> Vec<SparseImageMemoryRequirements> {
let mut sparse_memory_requirement_count = 0;
(self.commands().get_image_sparse_memory_requirements)(
self.handle(),
image,
&mut sparse_memory_requirement_count,
ptr::null_mut(),
);
let mut sparse_memory_requirements =
Vec::with_capacity(sparse_memory_requirement_count as usize);
let __result = (self.commands().get_image_sparse_memory_requirements)(
self.handle(),
image,
&mut sparse_memory_requirement_count,
sparse_memory_requirements.as_mut_ptr(),
);
debug_assert!(
sparse_memory_requirements.capacity() == sparse_memory_requirement_count as usize
);
sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize);
sparse_memory_requirements
}
#[inline]
unsafe fn get_image_subresource_layout(
&self,
image: Image,
subresource: &ImageSubresource,
) -> SubresourceLayout {
let mut layout = MaybeUninit::<SubresourceLayout>::uninit();
let __result = (self.commands().get_image_subresource_layout)(
self.handle(),
image,
subresource,
layout.as_mut_ptr(),
);
layout.assume_init()
}
#[inline]
unsafe fn get_pipeline_cache_data(
&self,
pipeline_cache: PipelineCache,
) -> crate::VkResult<Vec<u8>> {
let mut data_size = 0;
(self.commands().get_pipeline_cache_data)(
self.handle(),
pipeline_cache,
&mut data_size,
ptr::null_mut(),
);
let mut data = Vec::with_capacity(data_size as usize);
let __result = (self.commands().get_pipeline_cache_data)(
self.handle(),
pipeline_cache,
&mut data_size,
data.as_mut_ptr() as *mut c_void,
);
debug_assert!(data.capacity() == data_size as usize);
data.set_len(data_size as usize);
if __result == Result::SUCCESS {
Ok(data)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_query_pool_results(
&self,
query_pool: QueryPool,
first_query: u32,
query_count: u32,
data: &mut [u8],
stride: DeviceSize,
flags: QueryResultFlags,
) -> crate::VkResult<SuccessCode> {
let __result = (self.commands().get_query_pool_results)(
self.handle(),
query_pool,
first_query,
query_count,
data.len() as usize,
data.as_ptr() as *mut c_void,
stride,
flags,
);
if __result >= Result::SUCCESS {
Ok(__result.into())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_render_area_granularity(&self, render_pass: RenderPass) -> Extent2D {
let mut granularity = MaybeUninit::<Extent2D>::uninit();
let __result = (self.commands().get_render_area_granularity)(
self.handle(),
render_pass,
granularity.as_mut_ptr(),
);
granularity.assume_init()
}
#[inline]
unsafe fn invalidate_mapped_memory_ranges(
&self,
memory_ranges: &[impl Cast<Target = MappedMemoryRange>],
) -> crate::VkResult<()> {
let __result = (self.commands().invalidate_mapped_memory_ranges)(
self.handle(),
memory_ranges.len() as u32,
memory_ranges.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn map_memory(
&self,
memory: DeviceMemory,
offset: DeviceSize,
size: DeviceSize,
flags: MemoryMapFlags,
) -> crate::VkResult<*mut c_void> {
let mut data = MaybeUninit::<*mut c_void>::uninit();
let __result = (self.commands().map_memory)(
self.handle(),
memory,
offset,
size,
flags,
data.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(data.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn merge_pipeline_caches(
&self,
dst_cache: PipelineCache,
src_caches: &[PipelineCache],
) -> crate::VkResult<()> {
let __result = (self.commands().merge_pipeline_caches)(
self.handle(),
dst_cache,
src_caches.len() as u32,
src_caches.as_ptr(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn queue_bind_sparse(
&self,
queue: Queue,
bind_info: &[impl Cast<Target = BindSparseInfo>],
fence: Fence,
) -> crate::VkResult<()> {
let __result = (self.commands().queue_bind_sparse)(
queue,
bind_info.len() as u32,
bind_info.as_ptr().cast(),
fence,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn queue_submit(
&self,
queue: Queue,
submits: &[impl Cast<Target = SubmitInfo>],
fence: Fence,
) -> crate::VkResult<()> {
let __result = (self.commands().queue_submit)(
queue,
submits.len() as u32,
submits.as_ptr().cast(),
fence,
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn queue_wait_idle(&self, queue: Queue) -> crate::VkResult<()> {
let __result = (self.commands().queue_wait_idle)(queue);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn reset_command_buffer(
&self,
command_buffer: CommandBuffer,
flags: CommandBufferResetFlags,
) -> crate::VkResult<()> {
let __result = (self.commands().reset_command_buffer)(command_buffer, flags);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn reset_command_pool(
&self,
command_pool: CommandPool,
flags: CommandPoolResetFlags,
) -> crate::VkResult<()> {
let __result = (self.commands().reset_command_pool)(self.handle(), command_pool, flags);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn reset_descriptor_pool(
&self,
descriptor_pool: DescriptorPool,
flags: DescriptorPoolResetFlags,
) -> crate::VkResult<()> {
let __result =
(self.commands().reset_descriptor_pool)(self.handle(), descriptor_pool, flags);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn reset_event(&self, event: Event) -> crate::VkResult<()> {
let __result = (self.commands().reset_event)(self.handle(), event);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn reset_fences(&self, fences: &[Fence]) -> crate::VkResult<()> {
let __result =
(self.commands().reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr());
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn set_event(&self, event: Event) -> crate::VkResult<()> {
let __result = (self.commands().set_event)(self.handle(), event);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn unmap_memory(&self, memory: DeviceMemory) {
let __result = (self.commands().unmap_memory)(self.handle(), memory);
}
#[inline]
unsafe fn update_descriptor_sets(
&self,
descriptor_writes: &[impl Cast<Target = WriteDescriptorSet>],
descriptor_copies: &[impl Cast<Target = CopyDescriptorSet>],
) {
let __result = (self.commands().update_descriptor_sets)(
self.handle(),
descriptor_writes.len() as u32,
descriptor_writes.as_ptr().cast(),
descriptor_copies.len() as u32,
descriptor_copies.as_ptr().cast(),
);
}
#[inline]
unsafe fn wait_for_fences(
&self,
fences: &[Fence],
wait_all: bool,
timeout: u64,
) -> crate::VkResult<SuccessCode> {
let __result = (self.commands().wait_for_fences)(
self.handle(),
fences.len() as u32,
fences.as_ptr(),
wait_all as Bool32,
timeout,
);
if __result >= Result::SUCCESS {
Ok(__result.into())
} else {
Err(__result.into())
}
}
}
impl DeviceV1_0 for crate::Device {
#[inline]
fn commands(&self) -> &DeviceCommands {
&self.commands
}
#[inline]
fn handle(&self) -> Device {
self.handle
}
}
pub trait EntryV1_1: EntryV1_0 {
#[inline]
unsafe fn enumerate_instance_version(&self) -> crate::VkResult<u32> {
let mut api_version = MaybeUninit::<u32>::uninit();
let __result = (self.commands().enumerate_instance_version)(api_version.as_mut_ptr());
if __result == Result::SUCCESS {
Ok(api_version.assume_init())
} else {
Err(__result.into())
}
}
}
impl EntryV1_1 for crate::Entry {}
pub trait InstanceV1_1: InstanceV1_0 {
#[inline]
unsafe fn enumerate_physical_device_groups(
&self,
) -> crate::VkResult<Vec<PhysicalDeviceGroupProperties>> {
let mut physical_device_group_count = 0;
(self.commands().enumerate_physical_device_groups)(
self.handle(),
&mut physical_device_group_count,
ptr::null_mut(),
);
let mut physical_device_group_properties =
Vec::with_capacity(physical_device_group_count as usize);
let __result = (self.commands().enumerate_physical_device_groups)(
self.handle(),
&mut physical_device_group_count,
physical_device_group_properties.as_mut_ptr(),
);
debug_assert!(
physical_device_group_properties.capacity() == physical_device_group_count as usize
);
physical_device_group_properties.set_len(physical_device_group_count as usize);
if __result == Result::SUCCESS {
Ok(physical_device_group_properties)
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_physical_device_external_buffer_properties(
&self,
physical_device: PhysicalDevice,
external_buffer_info: &PhysicalDeviceExternalBufferInfo,
) -> ExternalBufferProperties {
let mut external_buffer_properties = MaybeUninit::<ExternalBufferProperties>::uninit();
let __result = (self
.commands()
.get_physical_device_external_buffer_properties)(
physical_device,
external_buffer_info,
external_buffer_properties.as_mut_ptr(),
);
external_buffer_properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_external_fence_properties(
&self,
physical_device: PhysicalDevice,
external_fence_info: &PhysicalDeviceExternalFenceInfo,
) -> ExternalFenceProperties {
let mut external_fence_properties = MaybeUninit::<ExternalFenceProperties>::uninit();
let __result = (self
.commands()
.get_physical_device_external_fence_properties)(
physical_device,
external_fence_info,
external_fence_properties.as_mut_ptr(),
);
external_fence_properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_external_semaphore_properties(
&self,
physical_device: PhysicalDevice,
external_semaphore_info: &PhysicalDeviceExternalSemaphoreInfo,
) -> ExternalSemaphoreProperties {
let mut external_semaphore_properties =
MaybeUninit::<ExternalSemaphoreProperties>::uninit();
let __result = (self
.commands()
.get_physical_device_external_semaphore_properties)(
physical_device,
external_semaphore_info,
external_semaphore_properties.as_mut_ptr(),
);
external_semaphore_properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_features2(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceFeatures2 {
let mut features = MaybeUninit::<PhysicalDeviceFeatures2>::uninit();
let __result =
(self.commands().get_physical_device_features2)(physical_device, features.as_mut_ptr());
features.assume_init()
}
#[inline]
unsafe fn get_physical_device_format_properties2(
&self,
physical_device: PhysicalDevice,
format: Format,
) -> FormatProperties2 {
let mut format_properties = MaybeUninit::<FormatProperties2>::uninit();
let __result = (self.commands().get_physical_device_format_properties2)(
physical_device,
format,
format_properties.as_mut_ptr(),
);
format_properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_image_format_properties2(
&self,
physical_device: PhysicalDevice,
image_format_info: &PhysicalDeviceImageFormatInfo2,
) -> crate::VkResult<ImageFormatProperties2> {
let mut image_format_properties = MaybeUninit::<ImageFormatProperties2>::uninit();
let __result = (self.commands().get_physical_device_image_format_properties2)(
physical_device,
image_format_info,
image_format_properties.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(image_format_properties.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_physical_device_memory_properties2(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceMemoryProperties2 {
let mut memory_properties = MaybeUninit::<PhysicalDeviceMemoryProperties2>::uninit();
let __result = (self.commands().get_physical_device_memory_properties2)(
physical_device,
memory_properties.as_mut_ptr(),
);
memory_properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_properties2(
&self,
physical_device: PhysicalDevice,
) -> PhysicalDeviceProperties2 {
let mut properties = MaybeUninit::<PhysicalDeviceProperties2>::uninit();
let __result = (self.commands().get_physical_device_properties2)(
physical_device,
properties.as_mut_ptr(),
);
properties.assume_init()
}
#[inline]
unsafe fn get_physical_device_queue_family_properties2(
&self,
physical_device: PhysicalDevice,
) -> Vec<QueueFamilyProperties2> {
let mut queue_family_property_count = 0;
(self.commands().get_physical_device_queue_family_properties2)(
physical_device,
&mut queue_family_property_count,
ptr::null_mut(),
);
let mut queue_family_properties = Vec::with_capacity(queue_family_property_count as usize);
let __result = (self.commands().get_physical_device_queue_family_properties2)(
physical_device,
&mut queue_family_property_count,
queue_family_properties.as_mut_ptr(),
);
debug_assert!(queue_family_properties.capacity() == queue_family_property_count as usize);
queue_family_properties.set_len(queue_family_property_count as usize);
queue_family_properties
}
#[inline]
unsafe fn get_physical_device_sparse_image_format_properties2(
&self,
physical_device: PhysicalDevice,
format_info: &PhysicalDeviceSparseImageFormatInfo2,
) -> Vec<SparseImageFormatProperties2> {
let mut property_count = 0;
(self
.commands()
.get_physical_device_sparse_image_format_properties2)(
physical_device,
format_info,
&mut property_count,
ptr::null_mut(),
);
let mut properties = Vec::with_capacity(property_count as usize);
let __result = (self
.commands()
.get_physical_device_sparse_image_format_properties2)(
physical_device,
format_info,
&mut property_count,
properties.as_mut_ptr(),
);
debug_assert!(properties.capacity() == property_count as usize);
properties.set_len(property_count as usize);
properties
}
}
impl InstanceV1_1 for crate::Instance {}
pub trait DeviceV1_1: DeviceV1_0 {
#[inline]
unsafe fn bind_buffer_memory2(
&self,
bind_infos: &[impl Cast<Target = BindBufferMemoryInfo>],
) -> crate::VkResult<()> {
let __result = (self.commands().bind_buffer_memory2)(
self.handle(),
bind_infos.len() as u32,
bind_infos.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn bind_image_memory2(
&self,
bind_infos: &[impl Cast<Target = BindImageMemoryInfo>],
) -> crate::VkResult<()> {
let __result = (self.commands().bind_image_memory2)(
self.handle(),
bind_infos.len() as u32,
bind_infos.as_ptr().cast(),
);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn cmd_dispatch_base(
&self,
command_buffer: CommandBuffer,
base_group_x: u32,
base_group_y: u32,
base_group_z: u32,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) {
let __result = (self.commands().cmd_dispatch_base)(
command_buffer,
base_group_x,
base_group_y,
base_group_z,
group_count_x,
group_count_y,
group_count_z,
);
}
#[inline]
unsafe fn cmd_set_device_mask(&self, command_buffer: CommandBuffer, device_mask: u32) {
let __result = (self.commands().cmd_set_device_mask)(command_buffer, device_mask);
}
#[inline]
unsafe fn create_descriptor_update_template(
&self,
create_info: &DescriptorUpdateTemplateCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<DescriptorUpdateTemplate> {
let mut descriptor_update_template = MaybeUninit::<DescriptorUpdateTemplate>::uninit();
let __result = (self.commands().create_descriptor_update_template)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
descriptor_update_template.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(descriptor_update_template.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn create_sampler_ycbcr_conversion(
&self,
create_info: &SamplerYcbcrConversionCreateInfo,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<SamplerYcbcrConversion> {
let mut ycbcr_conversion = MaybeUninit::<SamplerYcbcrConversion>::uninit();
let __result = (self.commands().create_sampler_ycbcr_conversion)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
ycbcr_conversion.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(ycbcr_conversion.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn destroy_descriptor_update_template(
&self,
descriptor_update_template: DescriptorUpdateTemplate,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_descriptor_update_template)(
self.handle(),
descriptor_update_template,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn destroy_sampler_ycbcr_conversion(
&self,
ycbcr_conversion: SamplerYcbcrConversion,
allocator: Option<&AllocationCallbacks>,
) {
let __result = (self.commands().destroy_sampler_ycbcr_conversion)(
self.handle(),
ycbcr_conversion,
allocator.map_or(ptr::null(), |v| v),
);
}
#[inline]
unsafe fn get_buffer_memory_requirements2(
&self,
info: &BufferMemoryRequirementsInfo2,
) -> MemoryRequirements2 {
let mut memory_requirements = MaybeUninit::<MemoryRequirements2>::uninit();
let __result = (self.commands().get_buffer_memory_requirements2)(
self.handle(),
info,
memory_requirements.as_mut_ptr(),
);
memory_requirements.assume_init()
}
#[inline]
unsafe fn get_descriptor_set_layout_support(
&self,
create_info: &DescriptorSetLayoutCreateInfo,
) -> DescriptorSetLayoutSupport {
let mut support = MaybeUninit::<DescriptorSetLayoutSupport>::uninit();
let __result = (self.commands().get_descriptor_set_layout_support)(
self.handle(),
create_info,
support.as_mut_ptr(),
);
support.assume_init()
}
#[inline]
unsafe fn get_device_group_peer_memory_features(
&self,
heap_index: u32,
local_device_index: u32,
remote_device_index: u32,
) -> PeerMemoryFeatureFlags {
let mut peer_memory_features = MaybeUninit::<PeerMemoryFeatureFlags>::uninit();
let __result = (self.commands().get_device_group_peer_memory_features)(
self.handle(),
heap_index,
local_device_index,
remote_device_index,
peer_memory_features.as_mut_ptr(),
);
peer_memory_features.assume_init()
}
#[inline]
unsafe fn get_device_queue2(&self, queue_info: &DeviceQueueInfo2) -> Queue {
let mut queue = MaybeUninit::<Queue>::uninit();
let __result =
(self.commands().get_device_queue2)(self.handle(), queue_info, queue.as_mut_ptr());
queue.assume_init()
}
#[inline]
unsafe fn get_image_memory_requirements2(
&self,
info: &ImageMemoryRequirementsInfo2,
) -> MemoryRequirements2 {
let mut memory_requirements = MaybeUninit::<MemoryRequirements2>::uninit();
let __result = (self.commands().get_image_memory_requirements2)(
self.handle(),
info,
memory_requirements.as_mut_ptr(),
);
memory_requirements.assume_init()
}
#[inline]
unsafe fn get_image_sparse_memory_requirements2(
&self,
info: &ImageSparseMemoryRequirementsInfo2,
) -> Vec<SparseImageMemoryRequirements2> {
let mut sparse_memory_requirement_count = 0;
(self.commands().get_image_sparse_memory_requirements2)(
self.handle(),
info,
&mut sparse_memory_requirement_count,
ptr::null_mut(),
);
let mut sparse_memory_requirements =
Vec::with_capacity(sparse_memory_requirement_count as usize);
let __result = (self.commands().get_image_sparse_memory_requirements2)(
self.handle(),
info,
&mut sparse_memory_requirement_count,
sparse_memory_requirements.as_mut_ptr(),
);
debug_assert!(
sparse_memory_requirements.capacity() == sparse_memory_requirement_count as usize
);
sparse_memory_requirements.set_len(sparse_memory_requirement_count as usize);
sparse_memory_requirements
}
#[inline]
unsafe fn trim_command_pool(&self, command_pool: CommandPool, flags: CommandPoolTrimFlags) {
let __result = (self.commands().trim_command_pool)(self.handle(), command_pool, flags);
}
#[inline]
unsafe fn update_descriptor_set_with_template(
&self,
descriptor_set: DescriptorSet,
descriptor_update_template: DescriptorUpdateTemplate,
data: &c_void,
) {
let __result = (self.commands().update_descriptor_set_with_template)(
self.handle(),
descriptor_set,
descriptor_update_template,
data,
);
}
}
impl DeviceV1_1 for crate::Device {}
pub trait EntryV1_2: EntryV1_1 {}
impl EntryV1_2 for crate::Entry {}
pub trait InstanceV1_2: InstanceV1_1 {}
impl InstanceV1_2 for crate::Instance {}
pub trait DeviceV1_2: DeviceV1_1 {
#[inline]
unsafe fn cmd_begin_render_pass2(
&self,
command_buffer: CommandBuffer,
render_pass_begin: &RenderPassBeginInfo,
subpass_begin_info: &SubpassBeginInfo,
) {
let __result = (self.commands().cmd_begin_render_pass2)(
command_buffer,
render_pass_begin,
subpass_begin_info,
);
}
#[inline]
unsafe fn cmd_draw_indexed_indirect_count(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_indexed_indirect_count)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
#[inline]
unsafe fn cmd_draw_indirect_count(
&self,
command_buffer: CommandBuffer,
buffer: Buffer,
offset: DeviceSize,
count_buffer: Buffer,
count_buffer_offset: DeviceSize,
max_draw_count: u32,
stride: u32,
) {
let __result = (self.commands().cmd_draw_indirect_count)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
#[inline]
unsafe fn cmd_end_render_pass2(
&self,
command_buffer: CommandBuffer,
subpass_end_info: &SubpassEndInfo,
) {
let __result = (self.commands().cmd_end_render_pass2)(command_buffer, subpass_end_info);
}
#[inline]
unsafe fn cmd_next_subpass2(
&self,
command_buffer: CommandBuffer,
subpass_begin_info: &SubpassBeginInfo,
subpass_end_info: &SubpassEndInfo,
) {
let __result = (self.commands().cmd_next_subpass2)(
command_buffer,
subpass_begin_info,
subpass_end_info,
);
}
#[inline]
unsafe fn create_render_pass2(
&self,
create_info: &RenderPassCreateInfo2,
allocator: Option<&AllocationCallbacks>,
) -> crate::VkResult<RenderPass> {
let mut render_pass = MaybeUninit::<RenderPass>::uninit();
let __result = (self.commands().create_render_pass2)(
self.handle(),
create_info,
allocator.map_or(ptr::null(), |v| v),
render_pass.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(render_pass.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn get_buffer_device_address(&self, info: &BufferDeviceAddressInfo) {
let __result = (self.commands().get_buffer_device_address)(self.handle(), info);
}
#[inline]
unsafe fn get_buffer_opaque_capture_address(&self, info: &BufferDeviceAddressInfo) {
let __result = (self.commands().get_buffer_opaque_capture_address)(self.handle(), info);
}
#[inline]
unsafe fn get_device_memory_opaque_capture_address(
&self,
info: &DeviceMemoryOpaqueCaptureAddressInfo,
) {
let __result =
(self.commands().get_device_memory_opaque_capture_address)(self.handle(), info);
}
#[inline]
unsafe fn get_semaphore_counter_value(&self, semaphore: Semaphore) -> crate::VkResult<u64> {
let mut value = MaybeUninit::<u64>::uninit();
let __result = (self.commands().get_semaphore_counter_value)(
self.handle(),
semaphore,
value.as_mut_ptr(),
);
if __result == Result::SUCCESS {
Ok(value.assume_init())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn reset_query_pool(&self, query_pool: QueryPool, first_query: u32, query_count: u32) {
let __result =
(self.commands().reset_query_pool)(self.handle(), query_pool, first_query, query_count);
}
#[inline]
unsafe fn signal_semaphore(&self, signal_info: &SemaphoreSignalInfo) -> crate::VkResult<()> {
let __result = (self.commands().signal_semaphore)(self.handle(), signal_info);
if __result == Result::SUCCESS {
Ok(())
} else {
Err(__result.into())
}
}
#[inline]
unsafe fn wait_semaphores(
&self,
wait_info: &SemaphoreWaitInfo,
timeout: u64,
) -> crate::VkResult<SuccessCode> {
let __result = (self.commands().wait_semaphores)(self.handle(), wait_info, timeout);
if __result >= Result::SUCCESS {
Ok(__result.into())
} else {
Err(__result.into())
}
}
}
impl DeviceV1_2 for crate::Device {}