mod adapter;
mod command;
mod conv;
mod device;
mod instance;
use std::{
borrow::Borrow,
collections::HashSet,
ffi::{CStr, CString},
fmt, mem,
num::NonZeroU32,
sync::Arc,
};
use arrayvec::ArrayVec;
use ash::{ext, khr, vk};
use parking_lot::{Mutex, RwLock};
use wgt::InternalCounter;
const MILLIS_TO_NANOS: u64 = 1_000_000;
const MAX_TOTAL_ATTACHMENTS: usize = crate::MAX_COLOR_ATTACHMENTS * 2 + 1;
#[derive(Clone, Debug)]
pub struct Api;
impl crate::Api for Api {
type Instance = Instance;
type Surface = Surface;
type Adapter = Adapter;
type Device = Device;
type Queue = Queue;
type CommandEncoder = CommandEncoder;
type CommandBuffer = CommandBuffer;
type Buffer = Buffer;
type Texture = Texture;
type SurfaceTexture = SurfaceTexture;
type TextureView = TextureView;
type Sampler = Sampler;
type QuerySet = QuerySet;
type Fence = Fence;
type AccelerationStructure = AccelerationStructure;
type PipelineCache = PipelineCache;
type BindGroupLayout = BindGroupLayout;
type BindGroup = BindGroup;
type PipelineLayout = PipelineLayout;
type ShaderModule = ShaderModule;
type RenderPipeline = RenderPipeline;
type ComputePipeline = ComputePipeline;
}
crate::impl_dyn_resource!(
Adapter,
AccelerationStructure,
BindGroup,
BindGroupLayout,
Buffer,
CommandBuffer,
CommandEncoder,
ComputePipeline,
Device,
Fence,
Instance,
PipelineCache,
PipelineLayout,
QuerySet,
Queue,
RenderPipeline,
Sampler,
ShaderModule,
Surface,
SurfaceTexture,
Texture,
TextureView
);
struct DebugUtils {
extension: ext::debug_utils::Instance,
messenger: vk::DebugUtilsMessengerEXT,
#[allow(dead_code)]
callback_data: Box<DebugUtilsMessengerUserData>,
}
pub struct DebugUtilsCreateInfo {
severity: vk::DebugUtilsMessageSeverityFlagsEXT,
message_type: vk::DebugUtilsMessageTypeFlagsEXT,
callback_data: Box<DebugUtilsMessengerUserData>,
}
#[derive(Debug)]
struct ValidationLayerProperties {
layer_description: CString,
layer_spec_version: u32,
}
#[derive(Debug)]
pub struct DebugUtilsMessengerUserData {
validation_layer_properties: Option<ValidationLayerProperties>,
has_obs_layer: bool,
}
pub struct InstanceShared {
raw: ash::Instance,
extensions: Vec<&'static CStr>,
drop_guard: Option<crate::DropGuard>,
flags: wgt::InstanceFlags,
debug_utils: Option<DebugUtils>,
get_physical_device_properties: Option<khr::get_physical_device_properties2::Instance>,
entry: ash::Entry,
has_nv_optimus: bool,
android_sdk_version: u32,
instance_api_version: u32,
}
pub struct Instance {
shared: Arc<InstanceShared>,
}
#[derive(Debug)]
struct SwapchainImageSemaphores {
acquire: vk::Semaphore,
should_wait_for_acquire: bool,
present: Vec<vk::Semaphore>,
present_index: usize,
previously_used_submission_index: crate::FenceValue,
}
impl SwapchainImageSemaphores {
fn new(device: &DeviceShared) -> Result<Self, crate::DeviceError> {
Ok(Self {
acquire: device.new_binary_semaphore()?,
should_wait_for_acquire: true,
present: Vec::new(),
present_index: 0,
previously_used_submission_index: 0,
})
}
fn set_used_fence_value(&mut self, value: crate::FenceValue) {
self.previously_used_submission_index = value;
}
fn get_acquire_wait_semaphore(&mut self) -> Option<vk::Semaphore> {
if self.should_wait_for_acquire {
self.should_wait_for_acquire = false;
Some(self.acquire)
} else {
None
}
}
fn get_submit_signal_semaphore(
&mut self,
device: &DeviceShared,
) -> Result<vk::Semaphore, crate::DeviceError> {
let sem = match self.present.get(self.present_index) {
Some(sem) => *sem,
None => {
let sem = device.new_binary_semaphore()?;
self.present.push(sem);
sem
}
};
self.present_index += 1;
Ok(sem)
}
fn get_present_wait_semaphores(&mut self) -> &[vk::Semaphore] {
let old_index = self.present_index;
self.present_index = 0;
self.should_wait_for_acquire = true;
&self.present[0..old_index]
}
unsafe fn destroy(&self, device: &ash::Device) {
unsafe {
device.destroy_semaphore(self.acquire, None);
for sem in &self.present {
device.destroy_semaphore(*sem, None);
}
}
}
}
struct Swapchain {
raw: vk::SwapchainKHR,
raw_flags: vk::SwapchainCreateFlagsKHR,
functor: khr::swapchain::Device,
device: Arc<DeviceShared>,
images: Vec<vk::Image>,
config: crate::SurfaceConfiguration,
view_formats: Vec<wgt::TextureFormat>,
surface_semaphores: Vec<Arc<Mutex<SwapchainImageSemaphores>>>,
next_semaphore_index: usize,
next_present_time: Option<vk::PresentTimeGOOGLE>,
}
impl Swapchain {
fn advance_surface_semaphores(&mut self) {
let semaphore_count = self.surface_semaphores.len();
self.next_semaphore_index = (self.next_semaphore_index + 1) % semaphore_count;
}
fn get_surface_semaphores(&self) -> Arc<Mutex<SwapchainImageSemaphores>> {
self.surface_semaphores[self.next_semaphore_index].clone()
}
}
pub struct Surface {
raw: vk::SurfaceKHR,
functor: khr::surface::Instance,
instance: Arc<InstanceShared>,
swapchain: RwLock<Option<Swapchain>>,
}
impl Surface {
pub fn raw_swapchain(&self) -> Option<vk::SwapchainKHR> {
let read = self.swapchain.read();
read.as_ref().map(|it| it.raw)
}
#[track_caller]
pub fn set_next_present_time(&self, present_timing: vk::PresentTimeGOOGLE) {
let mut swapchain = self.swapchain.write();
let swapchain = swapchain
.as_mut()
.expect("Surface should have been configured");
let features = wgt::Features::VULKAN_GOOGLE_DISPLAY_TIMING;
if swapchain.device.features.contains(features) {
swapchain.next_present_time = Some(present_timing);
} else {
panic!(
concat!(
"Tried to set display timing properties ",
"without the corresponding feature ({:?}) enabled."
),
features
);
}
}
}
#[derive(Debug)]
pub struct SurfaceTexture {
index: u32,
texture: Texture,
surface_semaphores: Arc<Mutex<SwapchainImageSemaphores>>,
}
impl crate::DynSurfaceTexture for SurfaceTexture {}
impl Borrow<Texture> for SurfaceTexture {
fn borrow(&self) -> &Texture {
&self.texture
}
}
impl Borrow<dyn crate::DynTexture> for SurfaceTexture {
fn borrow(&self) -> &dyn crate::DynTexture {
&self.texture
}
}
pub struct Adapter {
raw: vk::PhysicalDevice,
instance: Arc<InstanceShared>,
known_memory_flags: vk::MemoryPropertyFlags,
phd_capabilities: adapter::PhysicalDeviceProperties,
downlevel_flags: wgt::DownlevelFlags,
private_caps: PrivateCapabilities,
workarounds: Workarounds,
}
enum ExtensionFn<T> {
Extension(T),
Promoted,
}
struct DeviceExtensionFunctions {
debug_utils: Option<ext::debug_utils::Device>,
draw_indirect_count: Option<khr::draw_indirect_count::Device>,
timeline_semaphore: Option<ExtensionFn<khr::timeline_semaphore::Device>>,
ray_tracing: Option<RayTracingDeviceExtensionFunctions>,
}
struct RayTracingDeviceExtensionFunctions {
acceleration_structure: khr::acceleration_structure::Device,
buffer_device_address: khr::buffer_device_address::Device,
}
#[derive(Clone, Debug)]
struct PrivateCapabilities {
flip_y_requires_shift: bool,
imageless_framebuffers: bool,
image_view_usage: bool,
timeline_semaphores: bool,
texture_d24: bool,
texture_d24_s8: bool,
texture_s8: bool,
can_present: bool,
non_coherent_map_mask: wgt::BufferAddress,
robust_buffer_access: bool,
robust_image_access: bool,
robust_buffer_access2: bool,
robust_image_access2: bool,
zero_initialize_workgroup_memory: bool,
image_format_list: bool,
#[cfg(windows)]
external_memory_win32: bool,
}
bitflags::bitflags!(
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Workarounds: u32 {
const SEPARATE_ENTRY_POINTS = 0x1;
const EMPTY_RESOLVE_ATTACHMENT_LISTS = 0x2;
const FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16 = 0x4;
}
);
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct AttachmentKey {
format: vk::Format,
layout: vk::ImageLayout,
ops: crate::AttachmentOps,
}
impl AttachmentKey {
fn compatible(format: vk::Format, layout: vk::ImageLayout) -> Self {
Self {
format,
layout,
ops: crate::AttachmentOps::all(),
}
}
}
#[derive(Clone, Eq, Hash, PartialEq)]
struct ColorAttachmentKey {
base: AttachmentKey,
resolve: Option<AttachmentKey>,
}
#[derive(Clone, Eq, Hash, PartialEq)]
struct DepthStencilAttachmentKey {
base: AttachmentKey,
stencil_ops: crate::AttachmentOps,
}
#[derive(Clone, Eq, Default, Hash, PartialEq)]
struct RenderPassKey {
colors: ArrayVec<Option<ColorAttachmentKey>, { crate::MAX_COLOR_ATTACHMENTS }>,
depth_stencil: Option<DepthStencilAttachmentKey>,
sample_count: u32,
multiview: Option<NonZeroU32>,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct FramebufferAttachment {
raw: vk::ImageView,
raw_image_flags: vk::ImageCreateFlags,
view_usage: crate::TextureUses,
view_format: wgt::TextureFormat,
raw_view_formats: Vec<vk::Format>,
}
#[derive(Clone, Eq, Hash, PartialEq)]
struct FramebufferKey {
attachments: ArrayVec<FramebufferAttachment, { MAX_TOTAL_ATTACHMENTS }>,
extent: wgt::Extent3d,
sample_count: u32,
}
struct DeviceShared {
raw: ash::Device,
family_index: u32,
queue_index: u32,
raw_queue: vk::Queue,
drop_guard: Option<crate::DropGuard>,
instance: Arc<InstanceShared>,
physical_device: vk::PhysicalDevice,
enabled_extensions: Vec<&'static CStr>,
extension_fns: DeviceExtensionFunctions,
vendor_id: u32,
pipeline_cache_validation_key: [u8; 16],
timestamp_period: f32,
private_caps: PrivateCapabilities,
workarounds: Workarounds,
features: wgt::Features,
render_passes: Mutex<rustc_hash::FxHashMap<RenderPassKey, vk::RenderPass>>,
framebuffers: Mutex<rustc_hash::FxHashMap<FramebufferKey, vk::Framebuffer>>,
memory_allocations_counter: InternalCounter,
}
pub struct Device {
shared: Arc<DeviceShared>,
mem_allocator: Mutex<gpu_alloc::GpuAllocator<vk::DeviceMemory>>,
desc_allocator:
Mutex<gpu_descriptor::DescriptorAllocator<vk::DescriptorPool, vk::DescriptorSet>>,
valid_ash_memory_types: u32,
naga_options: naga::back::spv::Options<'static>,
#[cfg(feature = "renderdoc")]
render_doc: crate::auxil::renderdoc::RenderDoc,
counters: wgt::HalCounters,
}
#[derive(Clone)]
struct RelaySemaphores {
wait: Option<vk::Semaphore>,
signal: vk::Semaphore,
}
impl RelaySemaphores {
fn new(device: &DeviceShared) -> Result<Self, crate::DeviceError> {
Ok(Self {
wait: None,
signal: device.new_binary_semaphore()?,
})
}
fn advance(&mut self, device: &DeviceShared) -> Result<Self, crate::DeviceError> {
let old = self.clone();
match self.wait {
None => {
self.wait = Some(old.signal);
self.signal = device.new_binary_semaphore()?;
}
Some(ref mut wait) => {
mem::swap(wait, &mut self.signal);
}
};
Ok(old)
}
unsafe fn destroy(&self, device: &ash::Device) {
unsafe {
if let Some(wait) = self.wait {
device.destroy_semaphore(wait, None);
}
device.destroy_semaphore(self.signal, None);
}
}
}
pub struct Queue {
raw: vk::Queue,
swapchain_fn: khr::swapchain::Device,
device: Arc<DeviceShared>,
family_index: u32,
relay_semaphores: Mutex<RelaySemaphores>,
}
#[derive(Debug)]
pub struct Buffer {
raw: vk::Buffer,
block: Option<Mutex<gpu_alloc::MemoryBlock<vk::DeviceMemory>>>,
}
impl crate::DynBuffer for Buffer {}
#[derive(Debug)]
pub struct AccelerationStructure {
raw: vk::AccelerationStructureKHR,
buffer: vk::Buffer,
block: Mutex<gpu_alloc::MemoryBlock<vk::DeviceMemory>>,
}
impl crate::DynAccelerationStructure for AccelerationStructure {}
#[derive(Debug)]
pub struct Texture {
raw: vk::Image,
drop_guard: Option<crate::DropGuard>,
external_memory: Option<vk::DeviceMemory>,
block: Option<gpu_alloc::MemoryBlock<vk::DeviceMemory>>,
usage: crate::TextureUses,
format: wgt::TextureFormat,
raw_flags: vk::ImageCreateFlags,
copy_size: crate::CopyExtent,
view_formats: Vec<wgt::TextureFormat>,
}
impl crate::DynTexture for Texture {}
impl Texture {
pub unsafe fn raw_handle(&self) -> vk::Image {
self.raw
}
}
#[derive(Debug)]
pub struct TextureView {
raw: vk::ImageView,
layers: NonZeroU32,
attachment: FramebufferAttachment,
}
impl crate::DynTextureView for TextureView {}
impl TextureView {
pub unsafe fn raw_handle(&self) -> vk::ImageView {
self.raw
}
}
#[derive(Debug)]
pub struct Sampler {
raw: vk::Sampler,
}
impl crate::DynSampler for Sampler {}
#[derive(Debug)]
pub struct BindGroupLayout {
raw: vk::DescriptorSetLayout,
desc_count: gpu_descriptor::DescriptorTotalCount,
types: Box<[(vk::DescriptorType, u32)]>,
binding_arrays: Vec<(u32, NonZeroU32)>,
}
impl crate::DynBindGroupLayout for BindGroupLayout {}
#[derive(Debug)]
pub struct PipelineLayout {
raw: vk::PipelineLayout,
binding_arrays: naga::back::spv::BindingMap,
}
impl crate::DynPipelineLayout for PipelineLayout {}
#[derive(Debug)]
pub struct BindGroup {
set: gpu_descriptor::DescriptorSet<vk::DescriptorSet>,
}
impl crate::DynBindGroup for BindGroup {}
#[derive(Default)]
struct Temp {
marker: Vec<u8>,
buffer_barriers: Vec<vk::BufferMemoryBarrier<'static>>,
image_barriers: Vec<vk::ImageMemoryBarrier<'static>>,
}
impl Temp {
fn clear(&mut self) {
self.marker.clear();
self.buffer_barriers.clear();
self.image_barriers.clear();
}
fn make_c_str(&mut self, name: &str) -> &CStr {
self.marker.clear();
self.marker.extend_from_slice(name.as_bytes());
self.marker.push(0);
unsafe { CStr::from_bytes_with_nul_unchecked(&self.marker) }
}
}
pub struct CommandEncoder {
raw: vk::CommandPool,
device: Arc<DeviceShared>,
active: vk::CommandBuffer,
bind_point: vk::PipelineBindPoint,
temp: Temp,
free: Vec<vk::CommandBuffer>,
discarded: Vec<vk::CommandBuffer>,
rpass_debug_marker_active: bool,
end_of_pass_timer_query: Option<(vk::QueryPool, u32)>,
}
impl CommandEncoder {
pub unsafe fn raw_handle(&self) -> vk::CommandBuffer {
self.active
}
}
impl fmt::Debug for CommandEncoder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("CommandEncoder")
.field("raw", &self.raw)
.finish()
}
}
#[derive(Debug)]
pub struct CommandBuffer {
raw: vk::CommandBuffer,
}
impl crate::DynCommandBuffer for CommandBuffer {}
#[derive(Debug)]
#[allow(clippy::large_enum_variant)]
pub enum ShaderModule {
Raw(vk::ShaderModule),
Intermediate {
naga_shader: crate::NagaShader,
runtime_checks: bool,
},
}
impl crate::DynShaderModule for ShaderModule {}
#[derive(Debug)]
pub struct RenderPipeline {
raw: vk::Pipeline,
}
impl crate::DynRenderPipeline for RenderPipeline {}
#[derive(Debug)]
pub struct ComputePipeline {
raw: vk::Pipeline,
}
impl crate::DynComputePipeline for ComputePipeline {}
#[derive(Debug)]
pub struct PipelineCache {
raw: vk::PipelineCache,
}
impl crate::DynPipelineCache for PipelineCache {}
#[derive(Debug)]
pub struct QuerySet {
raw: vk::QueryPool,
}
impl crate::DynQuerySet for QuerySet {}
#[derive(Debug)]
pub enum Fence {
TimelineSemaphore(vk::Semaphore),
FencePool {
last_completed: crate::FenceValue,
active: Vec<(crate::FenceValue, vk::Fence)>,
free: Vec<vk::Fence>,
},
}
impl crate::DynFence for Fence {}
impl Fence {
fn check_active(
device: &ash::Device,
mut last_completed: crate::FenceValue,
active: &[(crate::FenceValue, vk::Fence)],
) -> Result<crate::FenceValue, crate::DeviceError> {
for &(value, raw) in active.iter() {
unsafe {
if value > last_completed
&& device
.get_fence_status(raw)
.map_err(map_host_device_oom_and_lost_err)?
{
last_completed = value;
}
}
}
Ok(last_completed)
}
fn get_latest(
&self,
device: &ash::Device,
extension: Option<&ExtensionFn<khr::timeline_semaphore::Device>>,
) -> Result<crate::FenceValue, crate::DeviceError> {
match *self {
Self::TimelineSemaphore(raw) => unsafe {
Ok(match *extension.unwrap() {
ExtensionFn::Extension(ref ext) => ext
.get_semaphore_counter_value(raw)
.map_err(map_host_device_oom_and_lost_err)?,
ExtensionFn::Promoted => device
.get_semaphore_counter_value(raw)
.map_err(map_host_device_oom_and_lost_err)?,
})
},
Self::FencePool {
last_completed,
ref active,
free: _,
} => Self::check_active(device, last_completed, active),
}
}
fn maintain(&mut self, device: &ash::Device) -> Result<(), crate::DeviceError> {
match *self {
Self::TimelineSemaphore(_) => {}
Self::FencePool {
ref mut last_completed,
ref mut active,
ref mut free,
} => {
let latest = Self::check_active(device, *last_completed, active)?;
let base_free = free.len();
for &(value, raw) in active.iter() {
if value <= latest {
free.push(raw);
}
}
if free.len() != base_free {
active.retain(|&(value, _)| value > latest);
unsafe { device.reset_fences(&free[base_free..]) }
.map_err(map_device_oom_err)?
}
*last_completed = latest;
}
}
Ok(())
}
}
impl crate::Queue for Queue {
type A = Api;
unsafe fn submit(
&self,
command_buffers: &[&CommandBuffer],
surface_textures: &[&SurfaceTexture],
(signal_fence, signal_value): (&mut Fence, crate::FenceValue),
) -> Result<(), crate::DeviceError> {
let mut fence_raw = vk::Fence::null();
let mut wait_stage_masks = Vec::new();
let mut wait_semaphores = Vec::new();
let mut signal_semaphores = Vec::new();
let mut signal_values = Vec::new();
debug_assert!(
{
let mut check = HashSet::with_capacity(surface_textures.len());
for st in surface_textures {
check.insert(Arc::as_ptr(&st.surface_semaphores));
}
check.len() == surface_textures.len()
},
"More than one surface texture is being used from the same swapchain. This will cause a deadlock in release."
);
let locked_swapchain_semaphores = surface_textures
.iter()
.map(|st| {
st.surface_semaphores
.try_lock()
.expect("Failed to lock surface semaphore.")
})
.collect::<Vec<_>>();
for mut swapchain_semaphore in locked_swapchain_semaphores {
swapchain_semaphore.set_used_fence_value(signal_value);
if let Some(sem) = swapchain_semaphore.get_acquire_wait_semaphore() {
wait_stage_masks.push(vk::PipelineStageFlags::TOP_OF_PIPE);
wait_semaphores.push(sem);
}
let signal_semaphore = swapchain_semaphore.get_submit_signal_semaphore(&self.device)?;
signal_semaphores.push(signal_semaphore);
signal_values.push(!0);
}
let semaphore_state = self.relay_semaphores.lock().advance(&self.device)?;
if let Some(sem) = semaphore_state.wait {
wait_stage_masks.push(vk::PipelineStageFlags::TOP_OF_PIPE);
wait_semaphores.push(sem);
}
signal_semaphores.push(semaphore_state.signal);
signal_values.push(!0);
signal_fence.maintain(&self.device.raw)?;
match *signal_fence {
Fence::TimelineSemaphore(raw) => {
signal_semaphores.push(raw);
signal_values.push(signal_value);
}
Fence::FencePool {
ref mut active,
ref mut free,
..
} => {
fence_raw = match free.pop() {
Some(raw) => raw,
None => unsafe {
self.device
.raw
.create_fence(&vk::FenceCreateInfo::default(), None)
.map_err(map_host_device_oom_err)?
},
};
active.push((signal_value, fence_raw));
}
}
let vk_cmd_buffers = command_buffers
.iter()
.map(|cmd| cmd.raw)
.collect::<Vec<_>>();
let mut vk_info = vk::SubmitInfo::default().command_buffers(&vk_cmd_buffers);
vk_info = vk_info
.wait_semaphores(&wait_semaphores)
.wait_dst_stage_mask(&wait_stage_masks)
.signal_semaphores(&signal_semaphores);
let mut vk_timeline_info;
if self.device.private_caps.timeline_semaphores {
vk_timeline_info =
vk::TimelineSemaphoreSubmitInfo::default().signal_semaphore_values(&signal_values);
vk_info = vk_info.push_next(&mut vk_timeline_info);
}
profiling::scope!("vkQueueSubmit");
unsafe {
self.device
.raw
.queue_submit(self.raw, &[vk_info], fence_raw)
.map_err(map_host_device_oom_and_lost_err)?
};
Ok(())
}
unsafe fn present(
&self,
surface: &Surface,
texture: SurfaceTexture,
) -> Result<(), crate::SurfaceError> {
let mut swapchain = surface.swapchain.write();
let ssc = swapchain.as_mut().unwrap();
let mut swapchain_semaphores = texture.surface_semaphores.lock();
let swapchains = [ssc.raw];
let image_indices = [texture.index];
let vk_info = vk::PresentInfoKHR::default()
.swapchains(&swapchains)
.image_indices(&image_indices)
.wait_semaphores(swapchain_semaphores.get_present_wait_semaphores());
let mut display_timing;
let present_times;
let vk_info = if let Some(present_time) = ssc.next_present_time.take() {
debug_assert!(
ssc.device
.features
.contains(wgt::Features::VULKAN_GOOGLE_DISPLAY_TIMING),
"`next_present_time` should only be set if `VULKAN_GOOGLE_DISPLAY_TIMING` is enabled"
);
present_times = [present_time];
display_timing = vk::PresentTimesInfoGOOGLE::default().times(&present_times);
vk_info.push_next(&mut display_timing)
} else {
vk_info
};
let suboptimal = {
profiling::scope!("vkQueuePresentKHR");
unsafe { self.swapchain_fn.queue_present(self.raw, &vk_info) }.map_err(|error| {
match error {
vk::Result::ERROR_OUT_OF_DATE_KHR => crate::SurfaceError::Outdated,
vk::Result::ERROR_SURFACE_LOST_KHR => crate::SurfaceError::Lost,
_ => map_host_device_oom_and_lost_err(error).into(),
}
})?
};
if suboptimal {
#[cfg(not(target_os = "android"))]
log::warn!("Suboptimal present of frame {}", texture.index);
}
Ok(())
}
unsafe fn get_timestamp_period(&self) -> f32 {
self.device.timestamp_period
}
}
fn map_host_device_oom_err(err: vk::Result) -> crate::DeviceError {
match err {
vk::Result::ERROR_OUT_OF_HOST_MEMORY | vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => {
get_oom_err(err)
}
e => get_unexpected_err(e),
}
}
fn map_host_device_oom_and_lost_err(err: vk::Result) -> crate::DeviceError {
match err {
vk::Result::ERROR_DEVICE_LOST => get_lost_err(),
other => map_host_device_oom_err(other),
}
}
fn map_host_device_oom_and_ioca_err(err: vk::Result) -> crate::DeviceError {
map_host_device_oom_err(err)
}
fn map_host_oom_err(err: vk::Result) -> crate::DeviceError {
match err {
vk::Result::ERROR_OUT_OF_HOST_MEMORY => get_oom_err(err),
e => get_unexpected_err(e),
}
}
fn map_device_oom_err(err: vk::Result) -> crate::DeviceError {
match err {
vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => get_oom_err(err),
e => get_unexpected_err(e),
}
}
fn map_host_oom_and_ioca_err(err: vk::Result) -> crate::DeviceError {
map_host_oom_err(err)
}
fn map_pipeline_err(err: vk::Result) -> crate::DeviceError {
map_host_device_oom_err(err)
}
fn get_unexpected_err(_err: vk::Result) -> crate::DeviceError {
#[cfg(feature = "internal_error_panic")]
panic!("Unexpected Vulkan error: {_err:?}");
#[allow(unreachable_code)]
crate::DeviceError::Unexpected
}
fn get_oom_err(_err: vk::Result) -> crate::DeviceError {
#[cfg(feature = "oom_panic")]
panic!("Out of memory ({_err:?})");
#[allow(unreachable_code)]
crate::DeviceError::OutOfMemory
}
fn get_lost_err() -> crate::DeviceError {
#[cfg(feature = "device_lost_panic")]
panic!("Device lost");
#[allow(unreachable_code)]
crate::DeviceError::Lost
}