use super::backends::{Constraint, CopyBufferImage, Layout};
use super::formats;
use super::types::{Error, Modifier, Result};
use super::utils;
use ash::vk;
use std::collections::HashMap;
use std::os::fd::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd};
use std::sync::{atomic, Arc, Mutex};
use std::{cmp, ffi, ptr, slice, thread};
const REQUIRED_API_VERSION: u32 = vk::API_VERSION_1_1;
#[derive(Clone, Copy)]
enum ExtId {
KhrDriverProperties,
KhrExternalMemoryFd,
KhrImageFormatList,
KhrMaintenance4,
ExtExternalMemoryDmaBuf,
ExtImageCompressionControl,
ExtImageDrmFormatModifier,
ExtPhysicalDeviceDrm,
ExtQueueFamilyForeign,
Count,
}
#[rustfmt::skip]
const EXT_TABLE: [(ExtId, &ffi::CStr, bool); ExtId::Count as usize] = [
(ExtId::KhrDriverProperties, ash::khr::driver_properties::NAME, false),
(ExtId::KhrExternalMemoryFd, ash::khr::external_memory_fd::NAME, true),
(ExtId::KhrImageFormatList, ash::khr::image_format_list::NAME, false),
(ExtId::KhrMaintenance4, ash::khr::maintenance4::NAME, true),
(ExtId::ExtExternalMemoryDmaBuf, ash::ext::external_memory_dma_buf::NAME, true),
(ExtId::ExtImageCompressionControl, ash::ext::image_compression_control::NAME, false),
(ExtId::ExtImageDrmFormatModifier, ash::ext::image_drm_format_modifier::NAME, false),
(ExtId::ExtPhysicalDeviceDrm, ash::ext::physical_device_drm::NAME, false),
(ExtId::ExtQueueFamilyForeign, ash::ext::queue_family_foreign::NAME, true),
];
fn has_api_version(ver: u32) -> Result<()> {
let req_major = vk::api_version_major(REQUIRED_API_VERSION);
let req_minor = vk::api_version_minor(REQUIRED_API_VERSION);
if vk::api_version_major(ver) == req_major && vk::api_version_minor(ver) >= req_minor {
Ok(())
} else {
Error::unsupported()
}
}
fn has_device_id(props: vk::PhysicalDeviceDrmPropertiesEXT, dev_id: u64) -> Result<()> {
if props.has_primary > 0 {
let primary_id = utils::makedev(props.primary_major as u64, props.primary_minor as u64);
if primary_id == dev_id {
return Ok(());
}
}
if props.has_render > 0 {
let render_id = utils::makedev(props.render_major as u64, props.render_minor as u64);
if render_id == dev_id {
return Ok(());
}
}
Error::unsupported()
}
fn can_export_import(props: vk::ExternalMemoryProperties) -> Result<()> {
let flags =
vk::ExternalMemoryFeatureFlags::EXPORTABLE | vk::ExternalMemoryFeatureFlags::IMPORTABLE;
if props.external_memory_features.contains(flags) {
Ok(())
} else {
Error::unsupported()
}
}
unsafe extern "system" fn debug_utils_messenger(
severity: vk::DebugUtilsMessageSeverityFlagsEXT,
_types: vk::DebugUtilsMessageTypeFlagsEXT,
data: *const vk::DebugUtilsMessengerCallbackDataEXT,
_user_data: *mut ffi::c_void,
) -> vk::Bool32 {
let lv = match severity {
vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => log::Level::Debug,
vk::DebugUtilsMessageSeverityFlagsEXT::INFO => log::Level::Info,
vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => log::Level::Warn,
vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => log::Level::Error,
_ => log::Level::Error,
};
let data = unsafe { &*data };
let msg_id = if !data.p_message_id_name.is_null() {
let cstr = unsafe { ffi::CStr::from_ptr(data.p_message_id_name) };
Some(cstr.to_str().unwrap())
} else {
None
};
let msg = if !data.p_message.is_null() {
let cstr = unsafe { ffi::CStr::from_ptr(data.p_message) };
Some(cstr.to_str().unwrap())
} else {
None
};
if msg_id.is_some() && msg.is_some() {
log::log!(lv, "vulkan: {}: {}", msg_id.unwrap(), msg.unwrap());
} else {
let msg = msg_id.or(msg);
if msg.is_some() {
log::log!(lv, "vulkan: {}", msg.unwrap());
}
}
vk::FALSE
}
struct Instance {
_entry: ash::Entry,
handle: ash::Instance,
}
impl Instance {
fn new(app_name: &str, debug: bool) -> Result<Self> {
let entry = Self::create_entry()?;
let handle = Self::create_instance(&entry, app_name, debug)?;
let instance = Self {
_entry: entry,
handle,
};
Ok(instance)
}
fn create_entry() -> Result<ash::Entry> {
let entry = unsafe { ash::Entry::load() }.or(Error::ctx("failed to load ash entry"))?;
Ok(entry)
}
fn get_enabled_extensions(entry: &ash::Entry) -> Vec<*const ffi::c_char> {
let exts = unsafe { entry.enumerate_instance_extension_properties(None) };
let exts = exts.unwrap_or_default();
let has_debug_utils = exts.iter().any(|ext| {
let name = unsafe { ffi::CStr::from_ptr(ext.extension_name.as_ptr()) };
name == ash::ext::debug_utils::NAME
});
if has_debug_utils {
vec![ash::ext::debug_utils::NAME.as_ptr()]
} else {
Vec::new()
}
}
fn create_instance(entry: &ash::Entry, app_name: &str, debug: bool) -> Result<ash::Instance> {
let ver = unsafe { entry.try_enumerate_instance_version() }?;
let ver = ver.unwrap_or(vk::API_VERSION_1_0);
has_api_version(ver).or(Error::ctx("unsupported api version"))?;
let c_name = ffi::CString::new(app_name)?;
let app_info = vk::ApplicationInfo::default()
.application_name(&c_name)
.api_version(REQUIRED_API_VERSION);
let mut instance_info = vk::InstanceCreateInfo::default().application_info(&app_info);
let mut enabled_exts = Vec::new();
if debug {
enabled_exts = Self::get_enabled_extensions(entry);
}
let mut msg_info = vk::DebugUtilsMessengerCreateInfoEXT::default();
if debug && !enabled_exts.is_empty() {
let msg_severity = vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE
| vk::DebugUtilsMessageSeverityFlagsEXT::INFO
| vk::DebugUtilsMessageSeverityFlagsEXT::WARNING
| vk::DebugUtilsMessageSeverityFlagsEXT::ERROR;
let msg_type = vk::DebugUtilsMessageTypeFlagsEXT::GENERAL
| vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION
| vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE;
msg_info = msg_info
.message_severity(msg_severity)
.message_type(msg_type)
.pfn_user_callback(Some(debug_utils_messenger));
instance_info = instance_info
.enabled_extension_names(&enabled_exts)
.push_next(&mut msg_info);
}
let handle = unsafe { entry.create_instance(&instance_info, None) }
.or(Error::ctx("failed to create instance"))?;
Ok(handle)
}
fn destroy(&self) {
unsafe {
self.handle.destroy_instance(None);
}
}
}
impl Drop for Instance {
fn drop(&mut self) {
self.destroy();
}
}
#[derive(Default)]
struct DeviceCreateInfo {
extensions: [bool; ExtId::Count as usize],
}
struct FormatProperties {
format_class: &'static formats::FormatClass,
modifiers: Vec<vk::DrmFormatModifierPropertiesEXT>,
}
#[derive(Default)]
struct PhysicalDeviceProperties {
ext_image_drm_format_modifier: bool,
driver_id: vk::DriverId,
max_image_dimension_2d: u32,
max_uniform_buffer_range: u32,
max_storage_buffer_range: u32,
max_buffer_size: vk::DeviceSize,
protected_memory: bool,
image_compression_control: bool,
queue_family: u32,
memory_types: Vec<vk::MemoryPropertyFlags>,
formats: HashMap<vk::Format, FormatProperties>,
external_memory_type: vk::ExternalMemoryHandleTypeFlags,
}
struct PhysicalDevice {
instance: Instance,
handle: vk::PhysicalDevice,
properties: PhysicalDeviceProperties,
}
impl PhysicalDevice {
fn new(
instance: Instance,
dev_idx: Option<usize>,
dev_id: Option<u64>,
) -> Result<(Self, DeviceCreateInfo)> {
let mut physical_dev = Self {
instance,
handle: Default::default(),
properties: Default::default(),
};
let dev_info = physical_dev.init(dev_idx, dev_id)?;
Ok((physical_dev, dev_info))
}
fn init(&mut self, dev_idx: Option<usize>, dev_id: Option<u64>) -> Result<DeviceCreateInfo> {
let handles = unsafe { self.instance.handle.enumerate_physical_devices() }
.or(Error::ctx("failed to enumerate devices"))?;
let dev_info = handles.into_iter().enumerate().find_map(|(idx, handle)| {
if let Some(dev_idx) = dev_idx {
if dev_idx != idx {
return None;
}
}
self.probe(handle, dev_id).ok()
});
dev_info.ok_or(Error::Context("failed to find any device"))
}
fn probe(
&mut self,
handle: vk::PhysicalDevice,
dev_id: Option<u64>,
) -> Result<DeviceCreateInfo> {
self.handle = handle;
self.properties = Default::default();
let mut dev_info = Default::default();
self.probe_extensions(dev_id, &mut dev_info)?;
self.probe_properties(dev_id)?;
self.probe_features();
self.probe_queue_families()?;
self.probe_memory_types();
self.probe_formats();
self.probe_external_memory();
Ok(dev_info)
}
fn probe_extensions(
&mut self,
dev_id: Option<u64>,
dev_info: &mut DeviceCreateInfo,
) -> Result<()> {
let exts = unsafe {
self.instance
.handle
.enumerate_device_extension_properties(self.handle)
}?;
for (idx, ext) in EXT_TABLE.iter().enumerate() {
let (id, name, required) = (ext.0, ext.1, ext.2);
assert_eq!(id as usize, idx);
dev_info.extensions[idx] = exts.iter().any(|ext| {
let ext_name = unsafe { ffi::CStr::from_ptr(ext.extension_name.as_ptr()) };
ext_name == name
});
if required && !dev_info.extensions[idx] {
return Error::unsupported();
}
}
if dev_id.is_some() && !dev_info.extensions[ExtId::ExtPhysicalDeviceDrm as usize] {
return Error::unsupported();
}
self.properties.ext_image_drm_format_modifier =
dev_info.extensions[ExtId::ExtImageDrmFormatModifier as usize];
Ok(())
}
fn probe_properties(&mut self, dev_id: Option<u64>) -> Result<()> {
let mut maint4_props = vk::PhysicalDeviceMaintenance4Properties::default();
let mut drv_props = vk::PhysicalDeviceDriverProperties::default();
let mut props = vk::PhysicalDeviceProperties2::default()
.push_next(&mut maint4_props)
.push_next(&mut drv_props);
let mut drm_props = vk::PhysicalDeviceDrmPropertiesEXT::default();
if dev_id.is_some() {
props = props.push_next(&mut drm_props);
}
unsafe {
self.instance
.handle
.get_physical_device_properties2(self.handle, &mut props);
}
let props = &props.properties;
has_api_version(props.api_version)?;
if let Some(dev_id) = dev_id {
has_device_id(drm_props, dev_id)?;
}
self.properties.driver_id = drv_props.driver_id;
if !self.properties.ext_image_drm_format_modifier {
if self.properties.driver_id == vk::DriverId::MESA_RADV {
log::warn!("no VK_EXT_image_drm_format_modifier support");
} else {
return Error::unsupported();
}
}
let limits = &props.limits;
self.properties.max_image_dimension_2d = limits.max_image_dimension2_d;
self.properties.max_uniform_buffer_range = limits.max_uniform_buffer_range;
self.properties.max_storage_buffer_range = limits.max_storage_buffer_range;
self.properties.max_buffer_size = maint4_props.max_buffer_size;
Ok(())
}
fn probe_features(&mut self) {
let mut mem_prot_feats = vk::PhysicalDeviceProtectedMemoryFeatures::default();
let mut img_comp_feats = vk::PhysicalDeviceImageCompressionControlFeaturesEXT::default();
let mut feats = vk::PhysicalDeviceFeatures2::default()
.push_next(&mut mem_prot_feats)
.push_next(&mut img_comp_feats);
unsafe {
self.instance
.handle
.get_physical_device_features2(self.handle, &mut feats);
}
self.properties.protected_memory = mem_prot_feats.protected_memory > 0;
self.properties.image_compression_control = img_comp_feats.image_compression_control > 0;
}
fn probe_queue_families(&mut self) -> Result<()> {
let props_list = unsafe {
self.instance
.handle
.get_physical_device_queue_family_properties(self.handle)
};
let required_granularity = vk::Extent3D {
width: 1,
height: 1,
depth: 1,
};
let required_flags = vk::QueueFlags::TRANSFER;
self.properties.queue_family = props_list
.into_iter()
.enumerate()
.find_map(|(idx, props)| {
if props.min_image_transfer_granularity == required_granularity
&& props.queue_flags.contains(required_flags)
{
Some(idx as u32)
} else {
None
}
})
.ok_or(Error::Unsupported)?;
Ok(())
}
fn probe_memory_types(&mut self) {
let props = unsafe {
self.instance
.handle
.get_physical_device_memory_properties(self.handle)
};
self.properties.memory_types = props
.memory_types_as_slice()
.iter()
.map(|mt| mt.property_flags)
.collect();
}
fn get_format_properties(
&self,
fmt: vk::Format,
fmt_plane_count: u32,
) -> Vec<vk::DrmFormatModifierPropertiesEXT> {
let mut mod_props_list = vk::DrmFormatModifierPropertiesListEXT::default();
let mut props = vk::FormatProperties2::default().push_next(&mut mod_props_list);
unsafe {
self.instance.handle.get_physical_device_format_properties2(
self.handle,
fmt,
&mut props,
);
}
if self.properties.ext_image_drm_format_modifier {
let mod_count = mod_props_list.drm_format_modifier_count as usize;
let mut mods = vec![Default::default(); mod_count];
if mod_count == 0 {
return mods;
}
let mut mod_props_list = vk::DrmFormatModifierPropertiesListEXT::default()
.drm_format_modifier_properties(&mut mods);
let mut props = vk::FormatProperties2::default().push_next(&mut mod_props_list);
unsafe {
self.instance.handle.get_physical_device_format_properties2(
self.handle,
fmt,
&mut props,
);
}
mods.into_iter()
.filter(|mod_props| mod_props.drm_format_modifier_plane_count <= 4)
.collect()
} else {
let linear_feats = props.format_properties.linear_tiling_features;
let optimal_feats = props.format_properties.optimal_tiling_features;
let mod_count = !linear_feats.is_empty() as usize + !optimal_feats.is_empty() as usize;
let mut mods = Vec::with_capacity(mod_count);
if mod_count == 0 {
return mods;
}
if !linear_feats.is_empty() {
let linear_props = vk::DrmFormatModifierPropertiesEXT {
drm_format_modifier: formats::MOD_LINEAR.0,
drm_format_modifier_plane_count: fmt_plane_count,
drm_format_modifier_tiling_features: linear_feats,
};
mods.push(linear_props);
}
if !optimal_feats.is_empty() && fmt_plane_count == 1 {
let optimal_props = vk::DrmFormatModifierPropertiesEXT {
drm_format_modifier: formats::MOD_INVALID.0,
drm_format_modifier_plane_count: fmt_plane_count,
drm_format_modifier_tiling_features: optimal_feats,
};
mods.push(optimal_props);
}
mods
}
}
fn probe_formats(&mut self) {
for drm_fmt in formats::KNOWN_FORMATS {
let fmt = formats::to_vk(drm_fmt);
if fmt.is_err() {
continue;
}
let fmt = fmt.unwrap().0;
if self.properties.formats.contains_key(&fmt) {
continue;
}
let fmt_class = formats::format_class(drm_fmt).unwrap();
let mods = self.get_format_properties(fmt, fmt_class.plane_count as u32);
if mods.is_empty() {
continue;
}
let fmt_props = FormatProperties {
format_class: fmt_class,
modifiers: mods,
};
self.properties.formats.insert(fmt, fmt_props);
}
}
fn probe_external_memory(&mut self) {
self.properties.external_memory_type = if self.properties.ext_image_drm_format_modifier {
vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT
} else {
vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD
};
}
}
pub struct BufferInfo {
pub flags: vk::BufferCreateFlags,
pub usage: vk::BufferUsageFlags,
pub external: bool,
}
pub struct BufferProperties {
pub max_size: vk::DeviceSize,
}
pub struct ImageInfo {
pub flags: vk::ImageCreateFlags,
pub usage: vk::ImageUsageFlags,
pub format: vk::Format,
pub external: bool,
pub no_compression: bool,
pub scanout_hack: bool,
}
pub struct ImageProperties {
pub max_extent: u32,
pub modifiers: Vec<Modifier>,
}
#[repr(C)]
struct WsiImageCreateInfoMESA {
s_type: vk::StructureType,
p_next: *const ffi::c_void,
scanout: bool,
blit_src: bool,
pad: [u64; 4],
}
impl Default for WsiImageCreateInfoMESA {
fn default() -> Self {
Self {
s_type: vk::StructureType::from_raw(1000001002),
p_next: ptr::null(),
scanout: true,
blit_src: false,
pad: Default::default(),
}
}
}
unsafe impl vk::ExtendsPhysicalDeviceImageFormatInfo2 for WsiImageCreateInfoMESA {}
unsafe impl vk::ExtendsImageCreateInfo for WsiImageCreateInfoMESA {}
struct DeviceDispatch {
memory: ash::khr::external_memory_fd::Device,
modifier: ash::ext::image_drm_format_modifier::Device,
}
pub struct Device {
physical_device: PhysicalDevice,
handle: ash::Device,
dispatch: DeviceDispatch,
}
impl Device {
pub fn build(
name: &str,
dev_idx: Option<usize>,
dev_id: Option<u64>,
debug: bool,
) -> Result<Arc<Device>> {
let instance = Instance::new(name, debug)?;
let (physical_dev, dev_info) = PhysicalDevice::new(instance, dev_idx, dev_id)?;
let dev = Self::new(physical_dev, dev_info)?;
Ok(Arc::new(dev))
}
fn new(physical_device: PhysicalDevice, dev_info: DeviceCreateInfo) -> Result<Self> {
let handle = Self::create_device(&physical_device, dev_info)?;
let dispatch = Self::create_dispatch(&handle, &physical_device);
let dev = Self {
physical_device,
handle,
dispatch,
};
Ok(dev)
}
fn create_device(
physical_dev: &PhysicalDevice,
dev_info: DeviceCreateInfo,
) -> Result<ash::Device> {
let props = &physical_dev.properties;
let queue_prio = 1.0;
let queue_info = vk::DeviceQueueCreateInfo::default()
.queue_family_index(props.queue_family)
.queue_priorities(slice::from_ref(&queue_prio));
let enabled_exts: Vec<*const ffi::c_char> = dev_info
.extensions
.into_iter()
.enumerate()
.filter_map(|(idx, avail)| {
if avail {
Some(EXT_TABLE[idx].1.as_ptr())
} else {
None
}
})
.collect();
let mut mem_prot_feats = vk::PhysicalDeviceProtectedMemoryFeatures::default()
.protected_memory(props.protected_memory);
let mut img_comp_feats = vk::PhysicalDeviceImageCompressionControlFeaturesEXT::default()
.image_compression_control(props.image_compression_control);
let mut feats = vk::PhysicalDeviceFeatures2::default()
.push_next(&mut mem_prot_feats)
.push_next(&mut img_comp_feats);
let dev_info = vk::DeviceCreateInfo::default()
.queue_create_infos(slice::from_ref(&queue_info))
.enabled_extension_names(&enabled_exts)
.push_next(&mut feats);
let handle = unsafe {
physical_dev
.instance
.handle
.create_device(physical_dev.handle, &dev_info, None)
}
.or(Error::ctx("failed to create device"))?;
Ok(handle)
}
fn create_dispatch(handle: &ash::Device, physical_dev: &PhysicalDevice) -> DeviceDispatch {
let instance_handle = &physical_dev.instance.handle;
DeviceDispatch {
memory: ash::khr::external_memory_fd::Device::new(instance_handle, handle),
modifier: ash::ext::image_drm_format_modifier::Device::new(instance_handle, handle),
}
}
fn destroy(&self) {
unsafe {
self.handle.destroy_device(None);
}
}
fn instance_handle(&self) -> &ash::Instance {
&self.physical_device.instance.handle
}
fn properties(&self) -> &PhysicalDeviceProperties {
&self.physical_device.properties
}
fn get_queue(&self) -> vk::Queue {
unsafe {
self.handle
.get_device_queue(self.properties().queue_family, 0)
}
}
fn format_plane_count(&self, fmt: vk::Format) -> u32 {
let fmt_props = self.properties().formats.get(&fmt).unwrap();
fmt_props.format_class.plane_count as u32
}
fn format_block_size(&self, fmt: vk::Format, plane: u32) -> u32 {
let fmt_props = self.properties().formats.get(&fmt).unwrap();
fmt_props.format_class.block_size[plane as usize] as u32
}
pub fn memory_plane_count(&self, fmt: vk::Format, modifier: Modifier) -> Result<u32> {
let fmt_props = self
.properties()
.formats
.get(&fmt)
.ok_or(Error::Unsupported)?;
fmt_props
.modifiers
.iter()
.find_map(|mod_props| {
if mod_props.drm_format_modifier == modifier.0 {
Some(mod_props.drm_format_modifier_plane_count)
} else {
None
}
})
.ok_or(Error::Unsupported)
}
pub fn buffer_properties(&self, buf_info: BufferInfo) -> Result<BufferProperties> {
if buf_info.flags.contains(vk::BufferCreateFlags::PROTECTED)
&& !self.properties().protected_memory
{
return Error::unsupported();
}
if buf_info.external {
let external_info = vk::PhysicalDeviceExternalBufferInfo::default()
.flags(buf_info.flags)
.usage(buf_info.usage)
.handle_type(self.properties().external_memory_type);
let mut external_props = vk::ExternalBufferProperties::default();
unsafe {
self.instance_handle()
.get_physical_device_external_buffer_properties(
self.physical_device.handle,
&external_info,
&mut external_props,
);
}
can_export_import(external_props.external_memory_properties)?;
}
let mut max_size = self.properties().max_buffer_size;
if buf_info
.usage
.contains(vk::BufferUsageFlags::UNIFORM_BUFFER)
{
max_size = cmp::min(max_size, self.properties().max_uniform_buffer_range as _);
}
if buf_info
.usage
.contains(vk::BufferUsageFlags::STORAGE_BUFFER)
{
max_size = cmp::min(max_size, self.properties().max_storage_buffer_range as _);
}
let props = BufferProperties { max_size };
Ok(props)
}
fn get_image_tiling(&self, modifier: Modifier) -> vk::ImageTiling {
if self.properties().ext_image_drm_format_modifier {
vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT
} else if modifier == formats::MOD_LINEAR {
vk::ImageTiling::LINEAR
} else {
vk::ImageTiling::OPTIMAL
}
}
fn has_image_support(
&self,
img_info: &ImageInfo,
compression: vk::ImageCompressionFlagsEXT,
modifier: Modifier,
) -> Result<()> {
let tiling = self.get_image_tiling(modifier);
let mut comp_info = vk::ImageCompressionControlEXT::default().flags(compression);
let mut fmt_info = vk::PhysicalDeviceImageFormatInfo2::default()
.format(img_info.format)
.ty(vk::ImageType::TYPE_2D)
.tiling(tiling)
.usage(img_info.usage)
.flags(img_info.flags)
.push_next(&mut comp_info);
let mut external_info = vk::PhysicalDeviceExternalImageFormatInfo::default();
if img_info.external {
external_info = external_info.handle_type(self.properties().external_memory_type);
fmt_info = fmt_info.push_next(&mut external_info);
}
let mut mod_info = vk::PhysicalDeviceImageDrmFormatModifierInfoEXT::default();
if tiling == vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT {
mod_info = mod_info.drm_format_modifier(modifier.0);
fmt_info = fmt_info.push_next(&mut mod_info);
}
let mut wsi_info = WsiImageCreateInfoMESA::default();
if img_info.scanout_hack && tiling != vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT {
fmt_info = fmt_info.push_next(&mut wsi_info);
}
let mut comp_props = vk::ImageCompressionPropertiesEXT::default();
let mut fmt_props = vk::ImageFormatProperties2::default().push_next(&mut comp_props);
let mut external_props = vk::ExternalImageFormatProperties::default();
if img_info.external {
fmt_props = fmt_props.push_next(&mut external_props)
}
unsafe {
self.instance_handle()
.get_physical_device_image_format_properties2(
self.physical_device.handle,
&fmt_info,
&mut fmt_props,
)
}?;
if img_info.external {
can_export_import(external_props.external_memory_properties)?;
}
if !comp_props.image_compression_flags.contains(compression) {
return Error::unsupported();
}
Ok(())
}
pub fn image_properties(
&self,
img_info: ImageInfo,
modifier: Modifier,
) -> Result<ImageProperties> {
if img_info.flags.contains(vk::ImageCreateFlags::PROTECTED)
&& !self.properties().protected_memory
{
return Error::unsupported();
}
let mut modifier = modifier;
let mut compression = vk::ImageCompressionFlagsEXT::DEFAULT;
if img_info.no_compression {
if self.properties().image_compression_control {
compression = vk::ImageCompressionFlagsEXT::DISABLED;
} else if modifier.is_invalid() {
modifier = formats::MOD_LINEAR;
} else {
return Error::unsupported();
}
}
let mut required_feats = vk::FormatFeatureFlags::empty();
if img_info.usage.contains(vk::ImageUsageFlags::SAMPLED) {
required_feats |= vk::FormatFeatureFlags::SAMPLED_IMAGE;
}
if img_info.usage.contains(vk::ImageUsageFlags::STORAGE) {
required_feats |= vk::FormatFeatureFlags::STORAGE_IMAGE;
}
if img_info
.usage
.contains(vk::ImageUsageFlags::COLOR_ATTACHMENT)
{
required_feats |= vk::FormatFeatureFlags::COLOR_ATTACHMENT;
}
let fmt_props = self
.properties()
.formats
.get(&img_info.format)
.ok_or(Error::Unsupported)?;
let mut mods: Vec<Modifier> = fmt_props
.modifiers
.iter()
.filter_map(|mod_props| {
let candidate = Modifier(mod_props.drm_format_modifier);
if !modifier.is_invalid() && candidate != modifier {
return None;
}
if !mod_props
.drm_format_modifier_tiling_features
.contains(required_feats)
{
return None;
}
if self
.has_image_support(&img_info, compression, candidate)
.is_ok()
{
Some(candidate)
} else {
None
}
})
.collect();
if mods.is_empty() {
return Error::unsupported();
}
if !self.properties().ext_image_drm_format_modifier && mods.len() > 1 {
mods = vec![formats::MOD_INVALID];
}
let props = ImageProperties {
max_extent: self.properties().max_image_dimension_2d,
modifiers: mods,
};
Ok(props)
}
fn get_dma_buf_mt_mask(&self, dmabuf: BorrowedFd) -> u32 {
let external_memory_type = vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT;
let mut fd_props = vk::MemoryFdPropertiesKHR::default();
let _ = unsafe {
self.dispatch.memory.get_memory_fd_properties(
external_memory_type,
dmabuf.as_raw_fd(),
&mut fd_props,
)
};
fd_props.memory_type_bits
}
pub fn memory_types(
&self,
mt_mask: u32,
required_flags: vk::MemoryPropertyFlags,
) -> Vec<(u32, vk::MemoryPropertyFlags)> {
self.properties()
.memory_types
.iter()
.enumerate()
.filter_map(|(mt_idx, mt_flags)| {
if (mt_mask & (1 << mt_idx)) != 0 && mt_flags.contains(required_flags) {
Some((mt_idx as u32, *mt_flags))
} else {
None
}
})
.collect()
}
}
impl Drop for Device {
fn drop(&mut self) {
self.destroy();
}
}
pub struct Memory {
device: Arc<Device>,
handle: vk::DeviceMemory,
}
impl Memory {
fn new(
device: Arc<Device>,
size: vk::DeviceSize,
mt_idx: u32,
dedicated_info: vk::MemoryDedicatedAllocateInfo,
external: bool,
dmabuf: Option<OwnedFd>,
) -> Result<Self> {
let handle =
Self::allocate_memory(&device, size, mt_idx, dedicated_info, external, dmabuf)?;
let mem = Self { device, handle };
Ok(mem)
}
fn with_buffer(buf: &Buffer, mt_idx: u32, dmabuf: Option<OwnedFd>) -> Result<Self> {
let dedicated_info = vk::MemoryDedicatedAllocateInfo::default().buffer(buf.handle);
Self::new(
buf.device.clone(),
buf.size,
mt_idx,
dedicated_info,
buf.external,
dmabuf,
)
}
fn with_image(img: &Image, mt_idx: u32, dmabuf: Option<OwnedFd>) -> Result<Self> {
let dedicated_info = vk::MemoryDedicatedAllocateInfo::default().image(img.handle);
Self::new(
img.device.clone(),
img.size,
mt_idx,
dedicated_info,
img.external,
dmabuf,
)
}
fn allocate_memory(
dev: &Device,
size: vk::DeviceSize,
mt_idx: u32,
mut dedicated_info: vk::MemoryDedicatedAllocateInfo,
external: bool,
dmabuf: Option<OwnedFd>,
) -> Result<vk::DeviceMemory> {
let mut mem_info = vk::MemoryAllocateInfo::default()
.allocation_size(size)
.memory_type_index(mt_idx)
.push_next(&mut dedicated_info);
let mut export_info = vk::ExportMemoryAllocateInfo::default();
if external {
export_info = export_info.handle_types(dev.properties().external_memory_type);
mem_info = mem_info.push_next(&mut export_info);
}
let mut raw_fd: RawFd = -1;
let mut import_info = vk::ImportMemoryFdInfoKHR::default();
if let Some(dmabuf) = dmabuf {
let mt_mask = dev.get_dma_buf_mt_mask(dmabuf.as_fd());
if mt_mask & (1 << mt_idx) == 0 {
return Error::user();
}
raw_fd = dmabuf.into_raw_fd();
import_info = import_info
.handle_type(dev.properties().external_memory_type)
.fd(raw_fd);
mem_info = mem_info.push_next(&mut import_info);
}
let handle = unsafe { dev.handle.allocate_memory(&mem_info, None) };
let handle = handle.map_err(|err| {
if raw_fd >= 0 {
unsafe {
OwnedFd::from_raw_fd(raw_fd);
}
}
err
})?;
Ok(handle)
}
fn destroy(&self) {
unsafe {
self.device.handle.free_memory(self.handle, None);
}
}
pub fn export_dma_buf(&self) -> Result<OwnedFd> {
let fd_info = vk::MemoryGetFdInfoKHR::default()
.memory(self.handle)
.handle_type(self.device.properties().external_memory_type);
let raw_fd = unsafe { self.device.dispatch.memory.get_memory_fd(&fd_info) }?;
let dmabuf = unsafe { OwnedFd::from_raw_fd(raw_fd) };
Ok(dmabuf)
}
pub fn map(&self, offset: vk::DeviceSize, size: vk::DeviceSize) -> Result<*mut ffi::c_void> {
let flags = vk::MemoryMapFlags::empty();
let ptr = unsafe {
self.device
.handle
.map_memory(self.handle, offset, size, flags)
}?;
Ok(ptr)
}
pub fn unmap(&self) {
unsafe { self.device.handle.unmap_memory(self.handle) };
}
pub fn flush(&self, offset: vk::DeviceSize, size: vk::DeviceSize) {
let range = vk::MappedMemoryRange::default()
.memory(self.handle)
.offset(offset)
.size(size);
let _ = unsafe {
self.device
.handle
.flush_mapped_memory_ranges(slice::from_ref(&range))
};
}
pub fn invalidate(&self, offset: vk::DeviceSize, size: vk::DeviceSize) {
let range = vk::MappedMemoryRange::default()
.memory(self.handle)
.offset(offset)
.size(size);
let _ = unsafe {
self.device
.handle
.invalidate_mapped_memory_ranges(slice::from_ref(&range))
};
}
}
impl Drop for Memory {
fn drop(&mut self) {
self.destroy();
}
}
pub struct Buffer {
device: Arc<Device>,
handle: vk::Buffer,
size: vk::DeviceSize,
mt_mask: u32,
external: bool,
memory: Option<Memory>,
}
impl Buffer {
fn new(device: Arc<Device>, buf_info: BufferInfo, size: vk::DeviceSize) -> Result<Self> {
let handle = Self::create_buffer(&device, &buf_info, size)?;
let mut buf = Self {
device,
handle,
size: 0,
mt_mask: 0,
external: buf_info.external,
memory: None,
};
buf.init_memory_requirements();
Ok(buf)
}
pub fn with_constraint(
dev: Arc<Device>,
buf_info: BufferInfo,
size: vk::DeviceSize,
con: Option<Constraint>,
) -> Result<Self> {
let mut buf = Self::new(dev, buf_info, size)?;
if let Some(con) = con {
buf.size = buf.size.next_multiple_of(con.size_align);
}
Ok(buf)
}
pub fn with_layout(
dev: Arc<Device>,
buf_info: BufferInfo,
size: vk::DeviceSize,
layout: Layout,
dmabuf: Option<BorrowedFd>,
) -> Result<Self> {
let mut buf = Self::new(dev, buf_info, size)?;
if buf.size > layout.size {
return Error::user();
}
if let Some(dmabuf) = dmabuf {
buf.mt_mask &= buf.device.get_dma_buf_mt_mask(dmabuf);
if buf.mt_mask == 0 {
return Error::user();
}
}
Ok(buf)
}
fn create_buffer(
dev: &Device,
buf_info: &BufferInfo,
size: vk::DeviceSize,
) -> Result<vk::Buffer> {
let external = buf_info.external;
let mut buf_info = vk::BufferCreateInfo::default()
.flags(buf_info.flags)
.size(size)
.usage(buf_info.usage);
let mut external_info = vk::ExternalMemoryBufferCreateInfo::default();
if external {
external_info = external_info.handle_types(dev.properties().external_memory_type);
buf_info = buf_info.push_next(&mut external_info);
}
let handle = unsafe { dev.handle.create_buffer(&buf_info, None) }?;
Ok(handle)
}
fn init_memory_requirements(&mut self) {
let reqs_info = vk::BufferMemoryRequirementsInfo2::default().buffer(self.handle);
let mut reqs = vk::MemoryRequirements2::default();
unsafe {
self.device
.handle
.get_buffer_memory_requirements2(&reqs_info, &mut reqs);
}
let reqs = reqs.memory_requirements;
self.size = reqs.size;
self.mt_mask = reqs.memory_type_bits;
}
fn destroy(&self) {
unsafe {
self.device.handle.destroy_buffer(self.handle, None);
}
}
pub fn size(&self) -> vk::DeviceSize {
self.size
}
pub fn layout(&self) -> Layout {
Layout::new().size(self.size)
}
pub fn memory_types(
&self,
required_flags: vk::MemoryPropertyFlags,
) -> Vec<(u32, vk::MemoryPropertyFlags)> {
self.device.memory_types(self.mt_mask, required_flags)
}
pub fn bind_memory(&mut self, mt_idx: u32, dmabuf: Option<OwnedFd>) -> Result<()> {
let mem = Memory::with_buffer(self, mt_idx, dmabuf)?;
let bind_info = vk::BindBufferMemoryInfo::default()
.buffer(self.handle)
.memory(mem.handle);
unsafe {
self.device
.handle
.bind_buffer_memory2(slice::from_ref(&bind_info))
}
.map_err(Error::from)?;
self.memory = Some(mem);
Ok(())
}
pub fn memory(&self) -> &Memory {
self.memory.as_ref().unwrap()
}
}
impl Drop for Buffer {
fn drop(&mut self) {
self.destroy();
}
}
pub struct Image {
device: Arc<Device>,
handle: vk::Image,
tiling: vk::ImageTiling,
format: vk::Format,
format_plane_count: u32,
modifier: Modifier,
size: vk::DeviceSize,
mt_mask: u32,
external: bool,
memory: Option<Memory>,
}
impl Image {
fn new(
device: Arc<Device>,
handle: vk::Image,
tiling: vk::ImageTiling,
format: vk::Format,
external: bool,
) -> Result<Self> {
let format_plane_count = device.format_plane_count(format);
let mut img = Self {
device,
handle,
tiling,
format,
format_plane_count,
modifier: formats::MOD_INVALID,
size: 0,
mt_mask: 0,
external,
memory: None,
};
img.init_modifier()?;
img.init_memory_requirements();
Ok(img)
}
pub fn with_constraint(
dev: Arc<Device>,
img_info: ImageInfo,
width: u32,
height: u32,
mods: &[Modifier],
con: Option<Constraint>,
) -> Result<Self> {
let mut mods = mods;
if let Some(con) = &con {
if !con.modifiers.is_empty() {
mods = &con.modifiers;
}
}
let tiling = dev.get_image_tiling(mods[0]);
let handle = Self::create_implicit_image(&dev, tiling, &img_info, width, height, mods)?;
let mut img = Self::new(dev, handle, tiling, img_info.format, img_info.external)?;
if let Some(con) = con {
img.size = img.size.next_multiple_of(con.size_align);
if tiling == vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT {
}
}
Ok(img)
}
pub fn with_layout(
dev: Arc<Device>,
img_info: ImageInfo,
width: u32,
height: u32,
layout: Layout,
dmabuf: Option<BorrowedFd>,
) -> Result<Self> {
let tiling = dev.get_image_tiling(layout.modifier);
let handle = if tiling == vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT {
Self::create_explicit_image(&dev, tiling, &img_info, width, height, &layout)?
} else {
Self::create_implicit_image(
&dev,
tiling,
&img_info,
width,
height,
slice::from_ref(&layout.modifier),
)?
};
let mut img = Self::new(dev, handle, tiling, img_info.format, img_info.external)?;
if img.size > layout.size {
return Error::user();
}
if let Some(dmabuf) = dmabuf {
img.mt_mask &= img.device.get_dma_buf_mt_mask(dmabuf);
if img.mt_mask == 0 {
return Error::user();
}
}
Ok(img)
}
fn create_implicit_image(
dev: &Device,
tiling: vk::ImageTiling,
img_info: &ImageInfo,
width: u32,
height: u32,
mods: &[Modifier],
) -> Result<vk::Image> {
let mods: Vec<u64> = mods.iter().map(|m| m.0).collect();
let mod_info =
vk::ImageDrmFormatModifierListCreateInfoEXT::default().drm_format_modifiers(&mods);
Self::create_image(dev, tiling, img_info, width, height, mod_info)
}
fn create_explicit_image(
dev: &Device,
tiling: vk::ImageTiling,
img_info: &ImageInfo,
width: u32,
height: u32,
layout: &Layout,
) -> Result<vk::Image> {
let count = layout.plane_count as usize;
let mut plane_layouts = Vec::with_capacity(count);
for plane in 0..count {
let subres_layout = vk::SubresourceLayout::default()
.offset(layout.offsets[plane])
.row_pitch(layout.strides[plane]);
plane_layouts.push(subres_layout);
}
let mod_info = vk::ImageDrmFormatModifierExplicitCreateInfoEXT::default()
.drm_format_modifier(layout.modifier.0)
.plane_layouts(&plane_layouts);
Self::create_image(dev, tiling, img_info, width, height, mod_info)
}
fn create_image<T: vk::ExtendsImageCreateInfo>(
dev: &Device,
tiling: vk::ImageTiling,
img_info: &ImageInfo,
width: u32,
height: u32,
mut mod_info: T,
) -> Result<vk::Image> {
let external = img_info.external;
let compression = if tiling == vk::ImageTiling::OPTIMAL && img_info.no_compression {
vk::ImageCompressionFlagsEXT::DISABLED
} else {
vk::ImageCompressionFlagsEXT::DEFAULT
};
let scanout_hack = img_info.scanout_hack;
let extent = vk::Extent3D {
width,
height,
depth: 1,
};
let mut img_info = vk::ImageCreateInfo::default()
.flags(img_info.flags)
.image_type(vk::ImageType::TYPE_2D)
.format(img_info.format)
.extent(extent)
.mip_levels(1)
.array_layers(1)
.samples(vk::SampleCountFlags::TYPE_1)
.tiling(tiling)
.usage(img_info.usage)
.initial_layout(vk::ImageLayout::UNDEFINED)
.push_next(&mut mod_info);
let mut external_info = vk::ExternalMemoryImageCreateInfo::default();
if external {
external_info = external_info.handle_types(dev.properties().external_memory_type);
img_info = img_info.push_next(&mut external_info);
}
let mut comp_info = vk::ImageCompressionControlEXT::default();
if compression != vk::ImageCompressionFlagsEXT::DEFAULT {
comp_info = comp_info.flags(compression);
img_info = img_info.push_next(&mut comp_info);
}
let mut wsi_info = WsiImageCreateInfoMESA::default();
if scanout_hack && tiling != vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT {
img_info = img_info.push_next(&mut wsi_info);
}
let handle = unsafe { dev.handle.create_image(&img_info, None) }?;
Ok(handle)
}
fn init_modifier(&mut self) -> Result<()> {
let modifier = match self.tiling {
vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT => {
let mut mod_props = vk::ImageDrmFormatModifierPropertiesEXT::default();
unsafe {
self.device
.dispatch
.modifier
.get_image_drm_format_modifier_properties(self.handle, &mut mod_props)
}?;
Modifier(mod_props.drm_format_modifier)
}
vk::ImageTiling::LINEAR => formats::MOD_LINEAR,
vk::ImageTiling::OPTIMAL => formats::MOD_INVALID,
_ => unreachable!(),
};
self.modifier = modifier;
Ok(())
}
fn init_memory_requirements(&mut self) {
let reqs_info = vk::ImageMemoryRequirementsInfo2::default().image(self.handle);
let mut reqs = vk::MemoryRequirements2::default();
unsafe {
self.device
.handle
.get_image_memory_requirements2(&reqs_info, &mut reqs);
}
let reqs = reqs.memory_requirements;
self.size = reqs.size;
self.mt_mask = reqs.memory_type_bits;
}
fn destroy(&self) {
unsafe {
self.device.handle.destroy_image(self.handle, None);
}
}
pub fn size(&self) -> vk::DeviceSize {
self.size
}
fn get_image_subresource_aspect(
&self,
mem_plane_count: u32,
plane: u32,
) -> vk::ImageAspectFlags {
match self.tiling {
vk::ImageTiling::DRM_FORMAT_MODIFIER_EXT => match plane {
0 => vk::ImageAspectFlags::MEMORY_PLANE_0_EXT,
1 => vk::ImageAspectFlags::MEMORY_PLANE_1_EXT,
2 => vk::ImageAspectFlags::MEMORY_PLANE_2_EXT,
3 => vk::ImageAspectFlags::MEMORY_PLANE_3_EXT,
_ => unreachable!(),
},
vk::ImageTiling::LINEAR | vk::ImageTiling::OPTIMAL => match plane {
0 => {
if mem_plane_count > 1 {
vk::ImageAspectFlags::PLANE_0
} else {
vk::ImageAspectFlags::COLOR
}
}
1 => vk::ImageAspectFlags::PLANE_1,
2 => vk::ImageAspectFlags::PLANE_2,
_ => unreachable!(),
},
_ => unreachable!(),
}
}
pub fn layout(&self) -> Layout {
let mem_plane_count = self
.device
.memory_plane_count(self.format, self.modifier)
.unwrap();
let mut layout = Layout::new()
.size(self.size)
.modifier(self.modifier)
.plane_count(mem_plane_count);
for plane in 0..mem_plane_count {
let aspect = self.get_image_subresource_aspect(mem_plane_count, plane);
let subres = vk::ImageSubresource::default().aspect_mask(aspect);
let subres_layout = unsafe {
self.device
.handle
.get_image_subresource_layout(self.handle, subres)
};
layout.offsets[plane as usize] = subres_layout.offset;
layout.strides[plane as usize] = subres_layout.row_pitch;
}
layout
}
pub fn memory_types(
&self,
required_flags: vk::MemoryPropertyFlags,
) -> Vec<(u32, vk::MemoryPropertyFlags)> {
self.device.memory_types(self.mt_mask, required_flags)
}
pub fn bind_memory(&mut self, mt_idx: u32, dmabuf: Option<OwnedFd>) -> Result<()> {
let mem = Memory::with_image(self, mt_idx, dmabuf)?;
let bind_info = vk::BindImageMemoryInfo::default()
.image(self.handle)
.memory(mem.handle);
unsafe {
self.device
.handle
.bind_image_memory2(slice::from_ref(&bind_info))
}
.map_err(Error::from)?;
self.memory = Some(mem);
Ok(())
}
pub fn memory(&self) -> &Memory {
self.memory.as_ref().unwrap()
}
pub fn get_copy_region(&self, copy: CopyBufferImage) -> vk::BufferImageCopy {
let aspect = match copy.plane {
0 => {
if self.format_plane_count > 1 {
vk::ImageAspectFlags::PLANE_0
} else {
vk::ImageAspectFlags::COLOR
}
}
1 => vk::ImageAspectFlags::PLANE_1,
2 => vk::ImageAspectFlags::PLANE_2,
_ => unreachable!(),
};
let bpp = self.device.format_block_size(self.format, copy.plane);
let row_len = copy.stride as u32 / bpp;
let subres = vk::ImageSubresourceLayers::default()
.aspect_mask(aspect)
.layer_count(1);
let offset = vk::Offset3D::default().x(copy.x as i32).y(copy.y as i32);
let extent = vk::Extent3D::default()
.width(copy.width)
.height(copy.height)
.depth(1);
vk::BufferImageCopy::default()
.buffer_offset(copy.offset)
.buffer_row_length(row_len)
.image_subresource(subres)
.image_offset(offset)
.image_extent(extent)
}
}
impl Drop for Image {
fn drop(&mut self) {
self.destroy();
}
}
struct SimpleCommandBuffer {
device: Arc<Device>,
pool: vk::CommandPool,
handle: vk::CommandBuffer,
fence: vk::Fence,
pending: atomic::AtomicBool,
}
impl SimpleCommandBuffer {
fn new(device: Arc<Device>) -> Result<Self> {
let mut cmd = Self {
device,
pool: Default::default(),
handle: Default::default(),
fence: Default::default(),
pending: atomic::AtomicBool::new(false),
};
cmd.init()?;
Ok(cmd)
}
fn init(&mut self) -> Result<()> {
self.init_command_pool()?;
self.init_command_buffer()?;
self.init_fence()?;
Ok(())
}
fn init_command_pool(&mut self) -> Result<()> {
let pool_info = vk::CommandPoolCreateInfo::default()
.flags(vk::CommandPoolCreateFlags::RESET_COMMAND_BUFFER)
.queue_family_index(self.device.properties().queue_family);
self.pool = unsafe { self.device.handle.create_command_pool(&pool_info, None) }
.map_err(Error::from)?;
Ok(())
}
fn init_command_buffer(&mut self) -> Result<()> {
let alloc_info = vk::CommandBufferAllocateInfo::default()
.command_pool(self.pool)
.level(vk::CommandBufferLevel::PRIMARY)
.command_buffer_count(1);
let cmds = unsafe { self.device.handle.allocate_command_buffers(&alloc_info) }?;
self.handle = cmds[0];
Ok(())
}
fn init_fence(&mut self) -> Result<()> {
let fence_info = vk::FenceCreateInfo::default();
self.fence =
unsafe { self.device.handle.create_fence(&fence_info, None) }.map_err(Error::from)?;
Ok(())
}
fn destroy(&self) {
let _ = self.ensure_idle_fence();
unsafe {
self.device.handle.destroy_command_pool(self.pool, None);
}
unsafe {
self.device.handle.destroy_fence(self.fence, None);
}
}
fn ensure_idle_fence(&self) -> Result<()> {
if self.pending.load(atomic::Ordering::Relaxed) {
if self.wait_fence().is_ok() {
self.pending.store(false, atomic::Ordering::Relaxed);
Ok(())
} else {
Error::device()
}
} else {
Ok(())
}
}
fn reset_fence(&self) -> Result<()> {
self.ensure_idle_fence()?;
unsafe {
self.device
.handle
.reset_fences(slice::from_ref(&self.fence))
}
.map_err(Error::from)
}
fn begin(&self) -> Result<()> {
let begin_info = vk::CommandBufferBeginInfo::default()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
unsafe {
self.device
.handle
.begin_command_buffer(self.handle, &begin_info)
}
.map_err(Error::from)
}
fn end(&self) -> Result<()> {
unsafe { self.device.handle.end_command_buffer(self.handle) }.map_err(Error::from)
}
fn wait_fence(&self) -> Result<()> {
unsafe {
self.device
.handle
.wait_for_fences(slice::from_ref(&self.fence), true, u64::MAX)
}
.map_err(|res| {
if res != vk::Result::ERROR_DEVICE_LOST {
self.pending.store(true, atomic::Ordering::Relaxed);
}
Error::from(res)
})
}
}
impl Drop for SimpleCommandBuffer {
fn drop(&mut self) {
self.destroy();
}
}
#[derive(PartialEq)]
enum PipelineBarrierType {
AcquireSrc,
AcquireDst,
ReleaseSrc,
ReleaseDst,
}
struct PipelineBarrierScope {
dependency_flags: vk::DependencyFlags,
src_queue_family: u32,
src_stage_mask: vk::PipelineStageFlags,
src_access_mask: vk::AccessFlags,
src_image_layout: vk::ImageLayout,
dst_queue_family: u32,
dst_stage_mask: vk::PipelineStageFlags,
dst_access_mask: vk::AccessFlags,
dst_image_layout: vk::ImageLayout,
}
pub struct CopyQueue {
device: Arc<Device>,
handle: Mutex<vk::Queue>,
per_thread_cmds: Mutex<HashMap<thread::ThreadId, Arc<SimpleCommandBuffer>>>,
}
impl CopyQueue {
pub fn new(device: Arc<Device>) -> Self {
let handle = device.get_queue();
let queue = Self {
device,
handle: Mutex::new(handle),
per_thread_cmds: Default::default(),
};
queue
}
fn lookup_per_thread_cmd(&self) -> Option<Arc<SimpleCommandBuffer>> {
let tid = thread::current().id();
let cmds = self.per_thread_cmds.lock().unwrap();
cmds.get(&tid).map(|cmd| cmd.clone())
}
fn create_per_thread_cmd(&self) -> Result<Arc<SimpleCommandBuffer>> {
let cmd = SimpleCommandBuffer::new(self.device.clone())?;
let cmd = Arc::new(cmd);
let tid = thread::current().id();
let mut cmds = self.per_thread_cmds.lock().unwrap();
cmds.insert(tid, cmd.clone());
Ok(cmd)
}
fn get_per_thread_cmd(&self) -> Result<Arc<SimpleCommandBuffer>> {
let cmd = match self.lookup_per_thread_cmd() {
Some(cmd) => cmd,
None => self.create_per_thread_cmd()?,
};
cmd.reset_fence()?;
cmd.begin()?;
Ok(cmd)
}
fn submit_cmd(&self, cmd: &SimpleCommandBuffer) -> Result<()> {
let submit_info = vk::SubmitInfo::default().command_buffers(slice::from_ref(&cmd.handle));
let handle = *self.handle.lock().unwrap();
unsafe {
self.device
.handle
.queue_submit(handle, slice::from_ref(&submit_info), cmd.fence)
}
.map_err(Error::from)
}
fn execute_per_thread_cmd(&self, cmd: Arc<SimpleCommandBuffer>) -> Result<()> {
cmd.end()?;
self.submit_cmd(&cmd)?;
cmd.wait_fence()
}
fn get_pipeline_barrier_scope(&self, ty: PipelineBarrierType) -> PipelineBarrierScope {
let src_queue_family;
let src_stage_mask;
let src_access_mask;
let src_image_layout;
let dst_queue_family;
let dst_stage_mask;
let dst_access_mask;
let dst_image_layout;
match ty {
PipelineBarrierType::AcquireSrc | PipelineBarrierType::AcquireDst => {
src_queue_family = vk::QUEUE_FAMILY_FOREIGN_EXT;
src_stage_mask = vk::PipelineStageFlags::NONE;
src_access_mask = vk::AccessFlags::NONE;
src_image_layout = vk::ImageLayout::GENERAL;
dst_queue_family = self.device.properties().queue_family;
dst_stage_mask = vk::PipelineStageFlags::TRANSFER;
if ty == PipelineBarrierType::AcquireSrc {
dst_access_mask = vk::AccessFlags::TRANSFER_READ;
dst_image_layout = vk::ImageLayout::TRANSFER_SRC_OPTIMAL;
} else {
dst_access_mask = vk::AccessFlags::TRANSFER_WRITE;
dst_image_layout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
}
}
PipelineBarrierType::ReleaseSrc | PipelineBarrierType::ReleaseDst => {
src_queue_family = self.device.properties().queue_family;
if ty == PipelineBarrierType::ReleaseSrc {
src_stage_mask = vk::PipelineStageFlags::NONE;
src_access_mask = vk::AccessFlags::NONE;
src_image_layout = vk::ImageLayout::TRANSFER_SRC_OPTIMAL;
} else {
src_stage_mask = vk::PipelineStageFlags::TRANSFER;
src_access_mask = vk::AccessFlags::TRANSFER_WRITE;
src_image_layout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
}
dst_queue_family = vk::QUEUE_FAMILY_FOREIGN_EXT;
dst_stage_mask = vk::PipelineStageFlags::NONE;
dst_access_mask = vk::AccessFlags::NONE;
dst_image_layout = vk::ImageLayout::GENERAL;
}
}
PipelineBarrierScope {
dependency_flags: vk::DependencyFlags::empty(),
src_queue_family,
src_stage_mask,
src_access_mask,
src_image_layout,
dst_queue_family,
dst_stage_mask,
dst_access_mask,
dst_image_layout,
}
}
fn cmd_buffer_barrier(
&self,
cmd: vk::CommandBuffer,
buf: vk::Buffer,
scope: PipelineBarrierScope,
) {
let buf_barrier = vk::BufferMemoryBarrier::default()
.src_access_mask(scope.src_access_mask)
.dst_access_mask(scope.dst_access_mask)
.src_queue_family_index(scope.src_queue_family)
.dst_queue_family_index(scope.dst_queue_family)
.buffer(buf)
.size(vk::WHOLE_SIZE);
unsafe {
self.device.handle.cmd_pipeline_barrier(
cmd,
scope.src_stage_mask,
scope.dst_stage_mask,
scope.dependency_flags,
&[],
slice::from_ref(&buf_barrier),
&[],
);
}
}
fn cmd_image_barrier(
&self,
cmd: vk::CommandBuffer,
img: vk::Image,
aspect: vk::ImageAspectFlags,
scope: PipelineBarrierScope,
) {
let img_subres = vk::ImageSubresourceRange::default()
.aspect_mask(aspect)
.level_count(1)
.layer_count(1);
let img_barrier = vk::ImageMemoryBarrier::default()
.src_access_mask(scope.src_access_mask)
.dst_access_mask(scope.dst_access_mask)
.old_layout(scope.src_image_layout)
.new_layout(scope.dst_image_layout)
.src_queue_family_index(scope.src_queue_family)
.dst_queue_family_index(scope.dst_queue_family)
.image(img)
.subresource_range(img_subres);
unsafe {
self.device.handle.cmd_pipeline_barrier(
cmd,
scope.src_stage_mask,
scope.dst_stage_mask,
scope.dependency_flags,
&[],
&[],
slice::from_ref(&img_barrier),
);
}
}
pub fn copy_buffer(&self, src: &Buffer, dst: &Buffer, region: vk::BufferCopy) -> Result<()> {
let cmd = self.get_per_thread_cmd()?;
let src_acquire = self.get_pipeline_barrier_scope(PipelineBarrierType::AcquireSrc);
let dst_acquire = self.get_pipeline_barrier_scope(PipelineBarrierType::AcquireDst);
let src_release = self.get_pipeline_barrier_scope(PipelineBarrierType::ReleaseSrc);
let dst_release = self.get_pipeline_barrier_scope(PipelineBarrierType::ReleaseDst);
self.cmd_buffer_barrier(cmd.handle, src.handle, src_acquire);
self.cmd_buffer_barrier(cmd.handle, dst.handle, dst_acquire);
unsafe {
self.device.handle.cmd_copy_buffer(
cmd.handle,
src.handle,
dst.handle,
slice::from_ref(®ion),
);
}
self.cmd_buffer_barrier(cmd.handle, src.handle, src_release);
self.cmd_buffer_barrier(cmd.handle, dst.handle, dst_release);
self.execute_per_thread_cmd(cmd)
}
pub fn copy_image_to_buffer(
&self,
img: &Image,
buf: &Buffer,
region: vk::BufferImageCopy,
) -> Result<()> {
let cmd = self.get_per_thread_cmd()?;
let img_acquire = self.get_pipeline_barrier_scope(PipelineBarrierType::AcquireSrc);
let buf_acquire = self.get_pipeline_barrier_scope(PipelineBarrierType::AcquireDst);
let img_release = self.get_pipeline_barrier_scope(PipelineBarrierType::ReleaseSrc);
let buf_release = self.get_pipeline_barrier_scope(PipelineBarrierType::ReleaseDst);
let img_aspect = region.image_subresource.aspect_mask;
let img_layout = img_acquire.dst_image_layout;
self.cmd_image_barrier(cmd.handle, img.handle, img_aspect, img_acquire);
self.cmd_buffer_barrier(cmd.handle, buf.handle, buf_acquire);
unsafe {
self.device.handle.cmd_copy_image_to_buffer(
cmd.handle,
img.handle,
img_layout,
buf.handle,
slice::from_ref(®ion),
);
}
self.cmd_image_barrier(cmd.handle, img.handle, img_aspect, img_release);
self.cmd_buffer_barrier(cmd.handle, buf.handle, buf_release);
self.execute_per_thread_cmd(cmd)
}
pub fn copy_buffer_to_image(
&self,
buf: &Buffer,
img: &Image,
region: vk::BufferImageCopy,
) -> Result<()> {
let cmd = self.get_per_thread_cmd()?;
let buf_acquire = self.get_pipeline_barrier_scope(PipelineBarrierType::AcquireSrc);
let img_acquire = self.get_pipeline_barrier_scope(PipelineBarrierType::AcquireDst);
let buf_release = self.get_pipeline_barrier_scope(PipelineBarrierType::ReleaseSrc);
let img_release = self.get_pipeline_barrier_scope(PipelineBarrierType::ReleaseDst);
let img_aspect = region.image_subresource.aspect_mask;
let img_layout = img_acquire.dst_image_layout;
self.cmd_buffer_barrier(cmd.handle, buf.handle, buf_acquire);
self.cmd_image_barrier(cmd.handle, img.handle, img_aspect, img_acquire);
unsafe {
self.device.handle.cmd_copy_buffer_to_image(
cmd.handle,
buf.handle,
img.handle,
img_layout,
slice::from_ref(®ion),
);
}
self.cmd_buffer_barrier(cmd.handle, buf.handle, buf_release);
self.cmd_image_barrier(cmd.handle, img.handle, img_aspect, img_release);
self.execute_per_thread_cmd(cmd)
}
}