use self::sorted_map::SortedMap;
use super::{
layout::DescriptorSetLayout,
pool::{
DescriptorPool, DescriptorPoolAlloc, DescriptorPoolCreateFlags, DescriptorPoolCreateInfo,
DescriptorSetAllocateInfo,
},
};
use crate::{
descriptor_set::layout::DescriptorType,
device::{Device, DeviceOwned},
instance::InstanceOwnedDebugWrapper,
Validated, VulkanError,
};
use crossbeam_queue::ArrayQueue;
use std::{cell::UnsafeCell, mem::ManuallyDrop, num::NonZeroU64, sync::Arc, thread};
use thread_local::ThreadLocal;
const MAX_POOLS: usize = 32;
pub unsafe trait DescriptorSetAllocator: DeviceOwned {
type Alloc: DescriptorSetAlloc;
fn allocate(
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<Self::Alloc, Validated<VulkanError>>;
}
pub trait DescriptorSetAlloc: Send + Sync {
fn inner(&self) -> &DescriptorPoolAlloc;
fn pool(&self) -> &DescriptorPool;
}
#[derive(Debug)]
pub struct StandardDescriptorSetAllocator {
device: InstanceOwnedDebugWrapper<Arc<Device>>,
pools: ThreadLocal<UnsafeCell<SortedMap<NonZeroU64, Entry>>>,
create_info: StandardDescriptorSetAllocatorCreateInfo,
}
#[derive(Debug)]
enum Entry {
Fixed(FixedEntry),
Variable(VariableEntry),
}
unsafe impl Send for Entry {}
impl StandardDescriptorSetAllocator {
#[inline]
pub fn new(
device: Arc<Device>,
create_info: StandardDescriptorSetAllocatorCreateInfo,
) -> StandardDescriptorSetAllocator {
StandardDescriptorSetAllocator {
device: InstanceOwnedDebugWrapper(device),
pools: ThreadLocal::new(),
create_info,
}
}
#[inline]
pub fn clear(&self, layout: &Arc<DescriptorSetLayout>) {
unsafe { &mut *self.pools.get_or(Default::default).get() }.remove(layout.id())
}
#[inline]
pub fn clear_all(&self) {
unsafe { *self.pools.get_or(Default::default).get() = SortedMap::default() };
}
}
unsafe impl DescriptorSetAllocator for StandardDescriptorSetAllocator {
type Alloc = StandardDescriptorSetAlloc;
#[inline]
fn allocate(
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
let max_count = layout.variable_descriptor_count();
let pools = self.pools.get_or(Default::default);
let entry = unsafe { &mut *pools.get() }.get_or_try_insert(layout.id(), || {
if max_count == 0 {
FixedEntry::new(layout.clone(), &self.create_info).map(Entry::Fixed)
} else {
VariableEntry::new(layout.clone(), &self.create_info).map(Entry::Variable)
}
})?;
match entry {
Entry::Fixed(entry) => entry.allocate(&self.create_info),
Entry::Variable(entry) => entry.allocate(variable_descriptor_count, &self.create_info),
}
}
}
unsafe impl<T: DescriptorSetAllocator> DescriptorSetAllocator for Arc<T> {
type Alloc = T::Alloc;
#[inline]
fn allocate(
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<Self::Alloc, Validated<VulkanError>> {
(**self).allocate(layout, variable_descriptor_count)
}
}
unsafe impl DeviceOwned for StandardDescriptorSetAllocator {
#[inline]
fn device(&self) -> &Arc<Device> {
&self.device
}
}
#[derive(Debug)]
struct FixedEntry {
pool: Arc<FixedPool>,
set_count: usize,
layout: Arc<DescriptorSetLayout>,
}
impl FixedEntry {
fn new(
layout: Arc<DescriptorSetLayout>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<Self, Validated<VulkanError>> {
Ok(FixedEntry {
pool: FixedPool::new(&layout, create_info)?,
set_count: create_info.set_count,
layout,
})
}
fn allocate(
&mut self,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
let inner = if let Some(inner) = self.pool.reserve.pop() {
inner
} else {
self.set_count *= 2;
self.pool = FixedPool::new(&self.layout, create_info)?;
self.pool.reserve.pop().unwrap()
};
Ok(StandardDescriptorSetAlloc {
inner: ManuallyDrop::new(inner),
parent: AllocParent::Fixed(self.pool.clone()),
})
}
}
#[derive(Debug)]
struct FixedPool {
inner: DescriptorPool,
reserve: ArrayQueue<DescriptorPoolAlloc>,
}
impl FixedPool {
fn new(
layout: &Arc<DescriptorSetLayout>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<Arc<Self>, Validated<VulkanError>> {
let inner = DescriptorPool::new(
layout.device().clone(),
DescriptorPoolCreateInfo {
flags: create_info
.update_after_bind
.then_some(DescriptorPoolCreateFlags::UPDATE_AFTER_BIND)
.unwrap_or_default(),
max_sets: create_info.set_count as u32,
pool_sizes: layout
.descriptor_counts()
.iter()
.map(|(&ty, &count)| {
assert!(ty != DescriptorType::InlineUniformBlock);
(ty, count * create_info.set_count as u32)
})
.collect(),
..Default::default()
},
)
.map_err(Validated::unwrap)?;
let allocate_infos =
(0..create_info.set_count).map(|_| DescriptorSetAllocateInfo::new(layout.clone()));
let allocs = unsafe {
inner
.allocate_descriptor_sets(allocate_infos)
.map_err(|err| match err {
Validated::ValidationError(_) => err,
Validated::Error(vk_err) => match vk_err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
unreachable!();
}
VulkanError::OutOfPoolMemory => {
unreachable!();
}
_ => {
unreachable!();
}
},
})?
};
let reserve = ArrayQueue::new(create_info.set_count);
for alloc in allocs {
let _ = reserve.push(alloc);
}
Ok(Arc::new(FixedPool { inner, reserve }))
}
}
#[derive(Debug)]
struct VariableEntry {
pool: Arc<VariablePool>,
reserve: Arc<ArrayQueue<DescriptorPool>>,
layout: Arc<DescriptorSetLayout>,
allocations: usize,
}
impl VariableEntry {
fn new(
layout: Arc<DescriptorSetLayout>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<Self, Validated<VulkanError>> {
let reserve = Arc::new(ArrayQueue::new(MAX_POOLS));
Ok(VariableEntry {
pool: VariablePool::new(&layout, reserve.clone(), create_info)?,
reserve,
layout,
allocations: 0,
})
}
fn allocate(
&mut self,
variable_descriptor_count: u32,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
if self.allocations >= create_info.set_count {
self.pool = if let Some(inner) = self.reserve.pop() {
Arc::new(VariablePool {
inner: ManuallyDrop::new(inner),
reserve: self.reserve.clone(),
})
} else {
VariablePool::new(&self.layout, self.reserve.clone(), create_info)?
};
self.allocations = 0;
}
let allocate_info = DescriptorSetAllocateInfo {
variable_descriptor_count,
..DescriptorSetAllocateInfo::new(self.layout.clone())
};
let mut sets = unsafe {
self.pool
.inner
.allocate_descriptor_sets([allocate_info])
.map_err(|err| match err {
Validated::ValidationError(_) => err,
Validated::Error(vk_err) => match vk_err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
unreachable!();
}
VulkanError::OutOfPoolMemory => {
unreachable!();
}
_ => {
unreachable!();
}
},
})?
};
self.allocations += 1;
Ok(StandardDescriptorSetAlloc {
inner: ManuallyDrop::new(sets.next().unwrap()),
parent: AllocParent::Variable(self.pool.clone()),
})
}
}
#[derive(Debug)]
struct VariablePool {
inner: ManuallyDrop<DescriptorPool>,
reserve: Arc<ArrayQueue<DescriptorPool>>,
}
impl VariablePool {
fn new(
layout: &Arc<DescriptorSetLayout>,
reserve: Arc<ArrayQueue<DescriptorPool>>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<Arc<Self>, VulkanError> {
DescriptorPool::new(
layout.device().clone(),
DescriptorPoolCreateInfo {
flags: create_info
.update_after_bind
.then_some(DescriptorPoolCreateFlags::UPDATE_AFTER_BIND)
.unwrap_or_default(),
max_sets: create_info.set_count as u32,
pool_sizes: layout
.descriptor_counts()
.iter()
.map(|(&ty, &count)| {
assert!(ty != DescriptorType::InlineUniformBlock);
(ty, count * create_info.set_count as u32)
})
.collect(),
..Default::default()
},
)
.map(|inner| {
Arc::new(Self {
inner: ManuallyDrop::new(inner),
reserve,
})
})
.map_err(Validated::unwrap)
}
}
impl Drop for VariablePool {
fn drop(&mut self) {
let inner = unsafe { ManuallyDrop::take(&mut self.inner) };
if thread::panicking() {
return;
}
unsafe { inner.reset() }.unwrap();
let _ = self.reserve.push(inner);
}
}
#[derive(Clone, Debug)]
pub struct StandardDescriptorSetAllocatorCreateInfo {
pub set_count: usize,
pub update_after_bind: bool,
pub _ne: crate::NonExhaustive,
}
impl Default for StandardDescriptorSetAllocatorCreateInfo {
#[inline]
fn default() -> Self {
StandardDescriptorSetAllocatorCreateInfo {
set_count: 32,
update_after_bind: false,
_ne: crate::NonExhaustive(()),
}
}
}
#[derive(Debug)]
pub struct StandardDescriptorSetAlloc {
inner: ManuallyDrop<DescriptorPoolAlloc>,
parent: AllocParent,
}
#[derive(Debug)]
enum AllocParent {
Fixed(Arc<FixedPool>),
Variable(Arc<VariablePool>),
}
impl AllocParent {
#[inline]
fn pool(&self) -> &DescriptorPool {
match self {
Self::Fixed(pool) => &pool.inner,
Self::Variable(pool) => &pool.inner,
}
}
}
unsafe impl Send for StandardDescriptorSetAlloc {}
unsafe impl Sync for StandardDescriptorSetAlloc {}
impl DescriptorSetAlloc for StandardDescriptorSetAlloc {
#[inline]
fn inner(&self) -> &DescriptorPoolAlloc {
&self.inner
}
#[inline]
fn pool(&self) -> &DescriptorPool {
self.parent.pool()
}
}
impl Drop for StandardDescriptorSetAlloc {
#[inline]
fn drop(&mut self) {
let inner = unsafe { ManuallyDrop::take(&mut self.inner) };
match &self.parent {
AllocParent::Fixed(pool) => {
let _ = pool.reserve.push(inner);
}
AllocParent::Variable(_) => {}
}
}
}
mod sorted_map {
use smallvec::SmallVec;
#[derive(Debug)]
pub(super) struct SortedMap<K, V> {
inner: SmallVec<[(K, V); 8]>,
}
impl<K, V> Default for SortedMap<K, V> {
fn default() -> Self {
Self {
inner: SmallVec::default(),
}
}
}
impl<K: Ord + Copy, V> SortedMap<K, V> {
pub fn get_or_try_insert<E>(
&mut self,
key: K,
f: impl FnOnce() -> Result<V, E>,
) -> Result<&mut V, E> {
match self.inner.binary_search_by_key(&key, |&(k, _)| k) {
Ok(index) => Ok(&mut self.inner[index].1),
Err(index) => {
self.inner.insert(index, (key, f()?));
Ok(&mut self.inner[index].1)
}
}
}
pub fn remove(&mut self, key: K) {
if let Ok(index) = self.inner.binary_search_by_key(&key, |&(k, _)| k) {
self.inner.remove(index);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
descriptor_set::layout::{
DescriptorSetLayoutBinding, DescriptorSetLayoutCreateInfo, DescriptorType,
},
shader::ShaderStages,
VulkanObject,
};
use std::thread;
#[test]
fn threads_use_different_pools() {
let (device, _) = gfx_dev_and_queue!();
let layout = DescriptorSetLayout::new(
device.clone(),
DescriptorSetLayoutCreateInfo {
bindings: [(
0,
DescriptorSetLayoutBinding {
stages: ShaderStages::all_graphics(),
..DescriptorSetLayoutBinding::descriptor_type(DescriptorType::UniformBuffer)
},
)]
.into(),
..Default::default()
},
)
.unwrap();
let allocator = StandardDescriptorSetAllocator::new(device, Default::default());
let pool1 =
if let AllocParent::Fixed(pool) = &allocator.allocate(&layout, 0).unwrap().parent {
pool.inner.handle()
} else {
unreachable!()
};
thread::spawn(move || {
let pool2 =
if let AllocParent::Fixed(pool) = &allocator.allocate(&layout, 0).unwrap().parent {
pool.inner.handle()
} else {
unreachable!()
};
assert_ne!(pool1, pool2);
})
.join()
.unwrap();
}
}