use crate::buffer::BufferAccess;
use crate::command_buffer::pool::UnsafeCommandPoolAlloc;
use crate::command_buffer::sys::UnsafeCommandBuffer;
use crate::command_buffer::sys::UnsafeCommandBufferBuilder;
use crate::command_buffer::sys::UnsafeCommandBufferBuilderPipelineBarrier;
use crate::command_buffer::CommandBufferExecError;
use crate::command_buffer::CommandBufferLevel;
use crate::command_buffer::CommandBufferUsage;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::device::Queue;
use crate::image::ImageAccess;
use crate::image::ImageLayout;
use crate::render_pass::FramebufferAbstract;
use crate::sync::AccessCheckError;
use crate::sync::AccessError;
use crate::sync::AccessFlags;
use crate::sync::GpuFuture;
use crate::sync::PipelineMemoryAccess;
use crate::sync::PipelineStages;
use crate::OomError;
use fnv::FnvHashMap;
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::hash_map::Entry;
use std::error;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use std::sync::Mutex;
pub struct SyncCommandBufferBuilder {
inner: UnsafeCommandBufferBuilder,
buffers: Vec<(ResourceLocation, PipelineMemoryAccess)>,
images: Vec<(
ResourceLocation,
PipelineMemoryAccess,
ImageLayout,
ImageLayout,
)>,
resources: FnvHashMap<BuilderKey, ResourceState>,
pending_barrier: UnsafeCommandBufferBuilderPipelineBarrier,
commands: Arc<Mutex<Commands>>,
barriers: Vec<usize>,
is_poisoned: bool,
is_secondary: bool,
}
impl fmt::Debug for SyncCommandBufferBuilder {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.inner, f)
}
}
#[derive(Debug, Clone)]
pub enum SyncCommandBufferBuilderError {
Conflict {
command1_name: &'static str,
command1_param: Cow<'static, str>,
command1_offset: usize,
command2_name: &'static str,
command2_param: Cow<'static, str>,
command2_offset: usize,
},
ExecError(CommandBufferExecError),
}
impl error::Error for SyncCommandBufferBuilderError {}
impl fmt::Display for SyncCommandBufferBuilderError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
SyncCommandBufferBuilderError::Conflict { .. } => write!(fmt, "unsolvable conflict"),
SyncCommandBufferBuilderError::ExecError(err) => err.fmt(fmt),
}
}
}
impl From<CommandBufferExecError> for SyncCommandBufferBuilderError {
#[inline]
fn from(val: CommandBufferExecError) -> Self {
SyncCommandBufferBuilderError::ExecError(val)
}
}
struct Commands {
first_unflushed: usize,
latest_render_pass_enter: Option<usize>,
commands: Vec<Box<dyn Command + Send + Sync>>,
}
pub trait Command {
fn name(&self) -> &'static str;
unsafe fn send(&mut self, out: &mut UnsafeCommandBufferBuilder);
fn into_final_command(self: Box<Self>) -> Box<dyn FinalCommand + Send + Sync>;
fn buffer(&self, _num: usize) -> &dyn BufferAccess {
panic!()
}
fn image(&self, _num: usize) -> &dyn ImageAccess {
panic!()
}
fn buffer_name(&self, _num: usize) -> Cow<'static, str> {
panic!()
}
fn image_name(&self, _num: usize) -> Cow<'static, str> {
panic!()
}
}
struct CmdPipelineBarrier;
impl Command for CmdPipelineBarrier {
fn name(&self) -> &'static str {
"vkCmdPipelineBarrier"
}
unsafe fn send(&mut self, out: &mut UnsafeCommandBufferBuilder) {}
fn into_final_command(self: Box<Self>) -> Box<dyn FinalCommand + Send + Sync> {
struct Fin;
impl FinalCommand for Fin {
fn name(&self) -> &'static str {
"vkCmdPipelineBarrier"
}
}
Box::new(Fin)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum KeyTy {
Buffer,
Image,
}
#[derive(Clone, Copy, Debug)]
struct ResourceLocation {
command_id: usize,
resource_index: usize,
}
struct BuilderKey {
commands: Arc<Mutex<Commands>>,
command_ids: RefCell<Vec<usize>>,
resource_ty: KeyTy,
resource_index: usize,
}
impl BuilderKey {
fn into_cb_key(
self,
final_commands: Arc<Vec<Box<dyn FinalCommand + Send + Sync>>>,
) -> CbKey<'static> {
CbKey::Command {
commands: final_commands,
command_ids: self.command_ids.borrow().clone(),
resource_ty: self.resource_ty,
resource_index: self.resource_index,
}
}
#[inline]
fn conflicts_buffer(&self, commands_lock: &Commands, buf: &dyn BufferAccess) -> bool {
match self.resource_ty {
KeyTy::Buffer => self.command_ids.borrow().iter().any(|command_id| {
let c = &commands_lock.commands[*command_id];
c.buffer(self.resource_index).conflicts_buffer(buf)
}),
KeyTy::Image => self.command_ids.borrow().iter().any(|command_id| {
let c = &commands_lock.commands[*command_id];
c.image(self.resource_index).conflicts_buffer(buf)
}),
}
}
#[inline]
fn conflicts_image(&self, commands_lock: &Commands, img: &dyn ImageAccess) -> bool {
match self.resource_ty {
KeyTy::Buffer => self.command_ids.borrow().iter().any(|command_id| {
let c = &commands_lock.commands[*command_id];
c.buffer(self.resource_index).conflicts_image(img)
}),
KeyTy::Image => self.command_ids.borrow().iter().any(|command_id| {
let c = &commands_lock.commands[*command_id];
c.image(self.resource_index).conflicts_image(img)
}),
}
}
}
impl PartialEq for BuilderKey {
#[inline]
fn eq(&self, other: &BuilderKey) -> bool {
debug_assert!(Arc::ptr_eq(&self.commands, &other.commands));
let commands_lock = self.commands.lock().unwrap();
match other.resource_ty {
KeyTy::Buffer => other.command_ids.borrow().iter().any(|command_id| {
let c = &commands_lock.commands[*command_id];
self.conflicts_buffer(&commands_lock, c.buffer(other.resource_index))
}),
KeyTy::Image => other.command_ids.borrow().iter().any(|command_id| {
let c = &commands_lock.commands[*command_id];
self.conflicts_image(&commands_lock, c.image(other.resource_index))
}),
}
}
}
impl Eq for BuilderKey {}
impl Hash for BuilderKey {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
let commands_lock = self.commands.lock().unwrap();
match self.resource_ty {
KeyTy::Buffer => {
let c = &commands_lock.commands[self.command_ids.borrow()[0]];
c.buffer(self.resource_index).conflict_key().hash(state)
}
KeyTy::Image => {
let c = &commands_lock.commands[self.command_ids.borrow()[0]];
c.image(self.resource_index).conflict_key().hash(state);
c.image(self.resource_index)
.current_miplevels_access()
.hash(state);
c.image(self.resource_index)
.current_layer_levels_access()
.hash(state);
}
}
}
}
#[derive(Debug, Clone)]
struct ResourceState {
memory: PipelineMemoryAccess,
exclusive_any: bool,
initial_layout: ImageLayout,
current_layout: ImageLayout,
}
impl ResourceState {
#[inline]
fn finalize(self) -> ResourceFinalState {
ResourceFinalState {
final_stages: self.memory.stages,
final_access: self.memory.access,
exclusive: self.exclusive_any,
initial_layout: self.initial_layout,
final_layout: self.current_layout,
}
}
}
impl SyncCommandBufferBuilder {
pub unsafe fn new<F>(
pool_alloc: &UnsafeCommandPoolAlloc,
level: CommandBufferLevel<F>,
usage: CommandBufferUsage,
) -> Result<SyncCommandBufferBuilder, OomError>
where
F: FramebufferAbstract,
{
let (is_secondary, inside_render_pass) = match level {
CommandBufferLevel::Primary => (false, false),
CommandBufferLevel::Secondary(ref inheritance) => {
(true, inheritance.render_pass.is_some())
}
};
let cmd = UnsafeCommandBufferBuilder::new(pool_alloc, level, usage)?;
Ok(SyncCommandBufferBuilder::from_unsafe_cmd(
cmd,
is_secondary,
inside_render_pass,
))
}
#[inline]
pub unsafe fn from_unsafe_cmd(
cmd: UnsafeCommandBufferBuilder,
is_secondary: bool,
inside_render_pass: bool,
) -> SyncCommandBufferBuilder {
let latest_render_pass_enter = if inside_render_pass { Some(0) } else { None };
SyncCommandBufferBuilder {
inner: cmd,
buffers: Vec::new(),
images: Vec::new(),
resources: FnvHashMap::default(),
pending_barrier: UnsafeCommandBufferBuilderPipelineBarrier::new(),
commands: Arc::new(Mutex::new(Commands {
first_unflushed: 0,
latest_render_pass_enter,
commands: Vec::new(),
})),
barriers: Vec::new(),
is_poisoned: false,
is_secondary,
}
}
#[inline]
pub(super) fn append_command<C>(
&mut self,
command: C,
resources: &[(
KeyTy,
Option<(PipelineMemoryAccess, ImageLayout, ImageLayout)>,
)],
) -> Result<(), SyncCommandBufferBuilderError>
where
C: Command + Send + Sync + 'static,
{
assert!(
!self.is_poisoned,
"The builder has been put in an inconsistent state by a previous error"
);
let (latest_command_id, end) = {
let mut commands_lock = self.commands.lock().unwrap();
commands_lock.commands.push(Box::new(command));
let latest_command_id = commands_lock.commands.len() - 1;
let end = commands_lock
.latest_render_pass_enter
.unwrap_or(latest_command_id);
(latest_command_id, end)
};
let mut last_cmd_buffer = 0;
let mut last_cmd_image = 0;
for &(resource_ty, resource) in resources {
if let Some((memory, start_layout, end_layout)) = resource {
debug_assert!(memory.exclusive || start_layout == end_layout);
debug_assert!(memory.access.is_compatible_with(&memory.stages));
debug_assert!(resource_ty != KeyTy::Image || end_layout != ImageLayout::Undefined);
debug_assert!(
resource_ty != KeyTy::Buffer || start_layout == ImageLayout::Undefined
);
debug_assert!(resource_ty != KeyTy::Buffer || end_layout == ImageLayout::Undefined);
debug_assert_ne!(end_layout, ImageLayout::Preinitialized);
let resource_index = match resource_ty {
KeyTy::Buffer => last_cmd_buffer,
KeyTy::Image => last_cmd_image,
};
let key = BuilderKey {
commands: self.commands.clone(),
command_ids: RefCell::new(vec![latest_command_id]),
resource_ty,
resource_index,
};
match self.resources.entry(key) {
Entry::Occupied(entry) => {
let collision_cmd_ids = entry.key().command_ids.borrow().clone();
debug_assert!(collision_cmd_ids.iter().all(|id| *id <= latest_command_id));
let entry_key_resource_index = entry.key().resource_index;
let entry_key_resource_ty = entry.key().resource_ty;
if memory.exclusive
|| entry.get().memory.exclusive
|| entry.get().current_layout != start_layout
{
let first_unflushed_cmd_id = {
let commands_lock = self.commands.lock().unwrap();
commands_lock.first_unflushed
};
if collision_cmd_ids
.iter()
.any(|command_id| *command_id >= first_unflushed_cmd_id)
|| entry.get().current_layout != start_layout
{
unsafe {
self.inner.pipeline_barrier(&self.pending_barrier);
self.pending_barrier =
UnsafeCommandBufferBuilderPipelineBarrier::new();
{
let mut commands_lock = self.commands.lock().unwrap();
let start = commands_lock.first_unflushed;
self.barriers.push(start);
if let Some(collision_cmd_id) = collision_cmd_ids
.iter()
.find(|command_id| **command_id >= end)
{
self.is_poisoned = true;
let cmd1 = &commands_lock.commands[*collision_cmd_id];
let cmd2 = &commands_lock.commands[latest_command_id];
return Err(SyncCommandBufferBuilderError::Conflict {
command1_name: cmd1.name(),
command1_param: match entry_key_resource_ty {
KeyTy::Buffer => {
cmd1.buffer_name(entry_key_resource_index)
}
KeyTy::Image => {
cmd1.image_name(entry_key_resource_index)
}
},
command1_offset: *collision_cmd_id,
command2_name: cmd2.name(),
command2_param: match resource_ty {
KeyTy::Buffer => {
cmd2.buffer_name(resource_index)
}
KeyTy::Image => cmd2.image_name(resource_index),
},
command2_offset: latest_command_id,
});
}
for command in &mut commands_lock.commands[start..end] {
command.send(&mut self.inner);
}
commands_lock.first_unflushed = end;
}
}
}
entry.key().command_ids.borrow_mut().push(latest_command_id);
let entry = entry.into_mut();
unsafe {
let commands_lock = self.commands.lock().unwrap();
match resource_ty {
KeyTy::Buffer => {
let buf = commands_lock.commands[latest_command_id]
.buffer(resource_index);
let b = &mut self.pending_barrier;
b.add_buffer_memory_barrier(
buf,
entry.memory.stages,
entry.memory.access,
memory.stages,
memory.access,
true,
None,
0,
buf.size(),
);
}
KeyTy::Image => {
let img = commands_lock.commands[latest_command_id]
.image(resource_index);
let b = &mut self.pending_barrier;
b.add_image_memory_barrier(
img,
img.current_miplevels_access(),
img.current_layer_levels_access(),
entry.memory.stages,
entry.memory.access,
memory.stages,
memory.access,
true,
None,
entry.current_layout,
start_layout,
);
}
};
}
entry.memory = memory;
entry.exclusive_any = true;
if memory.exclusive || end_layout != ImageLayout::Undefined {
entry.current_layout = end_layout;
}
} else {
let entry = entry.into_mut();
entry.memory.stages |= memory.stages;
entry.memory.access |= memory.access;
}
}
Entry::Vacant(entry) => {
let mut actually_exclusive = memory.exclusive;
let mut actual_start_layout = start_layout;
if !self.is_secondary
&& resource_ty == KeyTy::Image
&& start_layout != ImageLayout::Undefined
&& start_layout != ImageLayout::Preinitialized
{
let commands_lock = self.commands.lock().unwrap();
let img =
commands_lock.commands[latest_command_id].image(resource_index);
let initial_layout_requirement = img.initial_layout_requirement();
let is_layout_initialized = img.is_layout_initialized();
if initial_layout_requirement != start_layout || !is_layout_initialized
{
unsafe {
let from_layout = if is_layout_initialized {
actually_exclusive = true;
initial_layout_requirement
} else {
if img.preinitialized_layout() {
ImageLayout::Preinitialized
} else {
ImageLayout::Undefined
}
};
if initial_layout_requirement != start_layout {
actual_start_layout = initial_layout_requirement;
}
let b = &mut self.pending_barrier;
b.add_image_memory_barrier(
img,
img.current_miplevels_access(),
img.current_layer_levels_access(),
PipelineStages {
bottom_of_pipe: true,
..PipelineStages::none()
},
AccessFlags::none(),
memory.stages,
memory.access,
true,
None,
from_layout,
start_layout,
);
img.layout_initialized();
}
}
}
entry.insert(ResourceState {
memory: PipelineMemoryAccess {
stages: memory.stages,
access: memory.access,
exclusive: actually_exclusive,
},
exclusive_any: actually_exclusive,
initial_layout: actual_start_layout,
current_layout: end_layout,
});
}
}
let location = ResourceLocation {
command_id: latest_command_id,
resource_index,
};
match resource_ty {
KeyTy::Buffer => {
self.buffers.push((location, memory));
last_cmd_buffer += 1;
}
KeyTy::Image => {
self.images
.push((location, memory, start_layout, end_layout));
last_cmd_image += 1;
}
}
} else {
match resource_ty {
KeyTy::Buffer => {
last_cmd_buffer += 1;
}
KeyTy::Image => {
last_cmd_image += 1;
}
}
}
}
Ok(())
}
#[inline]
pub(super) fn prev_cmd_entered_render_pass(&mut self) {
assert!(
!self.is_poisoned,
"The builder has been put in an inconsistent state by a previous error"
);
let mut cmd_lock = self.commands.lock().unwrap();
cmd_lock.latest_render_pass_enter = Some(cmd_lock.commands.len() - 1);
}
#[inline]
pub(super) fn prev_cmd_left_render_pass(&mut self) {
assert!(
!self.is_poisoned,
"The builder has been put in an inconsistent state by a previous error"
);
let mut cmd_lock = self.commands.lock().unwrap();
debug_assert!(cmd_lock.latest_render_pass_enter.is_some());
cmd_lock.latest_render_pass_enter = None;
}
#[inline]
pub fn build(mut self) -> Result<SyncCommandBuffer, OomError> {
assert!(
!self.is_poisoned,
"The builder has been put in an inconsistent state by a previous error"
);
let mut commands_lock = self.commands.lock().unwrap();
debug_assert!(
commands_lock.latest_render_pass_enter.is_none() || self.pending_barrier.is_empty()
);
unsafe {
self.inner.pipeline_barrier(&self.pending_barrier);
let start = commands_lock.first_unflushed;
self.barriers.push(start);
for command in &mut commands_lock.commands[start..] {
command.send(&mut self.inner);
}
}
if !self.is_secondary {
unsafe {
let mut barrier = UnsafeCommandBufferBuilderPipelineBarrier::new();
for (key, state) in &mut self.resources {
if key.resource_ty != KeyTy::Image {
continue;
}
let img = commands_lock.commands[key.command_ids.borrow()[0]]
.image(key.resource_index);
let requested_layout = img.final_layout_requirement();
if requested_layout == state.current_layout {
continue;
}
barrier.add_image_memory_barrier(
img,
img.current_miplevels_access(),
img.current_layer_levels_access(),
state.memory.stages,
state.memory.access,
PipelineStages {
top_of_pipe: true,
..PipelineStages::none()
},
AccessFlags::none(),
true,
None,
state.current_layout,
requested_layout,
);
state.exclusive_any = true;
state.current_layout = requested_layout;
}
self.inner.pipeline_barrier(&barrier);
}
}
let final_commands = {
let mut final_commands = Vec::with_capacity(commands_lock.commands.len());
for command in commands_lock.commands.drain(..) {
final_commands.push(command.into_final_command());
}
Arc::new(final_commands)
};
let final_resources_states: FnvHashMap<_, _> = {
self.resources
.into_iter()
.map(|(resource, state)| {
(
resource.into_cb_key(final_commands.clone()),
state.finalize(),
)
})
.collect()
};
Ok(SyncCommandBuffer {
inner: self.inner.build()?,
buffers: self.buffers,
images: self.images,
resources: final_resources_states,
commands: final_commands,
barriers: self.barriers,
})
}
}
unsafe impl DeviceOwned for SyncCommandBufferBuilder {
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.device()
}
}
pub struct SyncCommandBuffer {
inner: UnsafeCommandBuffer,
buffers: Vec<(ResourceLocation, PipelineMemoryAccess)>,
images: Vec<(
ResourceLocation,
PipelineMemoryAccess,
ImageLayout,
ImageLayout,
)>,
resources: FnvHashMap<CbKey<'static>, ResourceFinalState>,
commands: Arc<Vec<Box<dyn FinalCommand + Send + Sync>>>,
barriers: Vec<usize>,
}
impl SyncCommandBuffer {
pub fn lock_submit(
&self,
future: &dyn GpuFuture,
queue: &Queue,
) -> Result<(), CommandBufferExecError> {
let mut locked_resources = 0;
let mut ret_value = Ok(());
for (key, entry) in self.resources.iter() {
let (command_ids, resource_ty, resource_index) = match *key {
CbKey::Command {
ref command_ids,
resource_ty,
resource_index,
..
} => (command_ids, resource_ty, resource_index),
_ => unreachable!(),
};
let command = &self.commands[command_ids[0]];
match resource_ty {
KeyTy::Buffer => {
let buf = command.buffer(resource_index);
let prev_err = match future.check_buffer_access(&buf, entry.exclusive, queue) {
Ok(_) => {
unsafe {
buf.increase_gpu_lock();
}
locked_resources += 1;
continue;
}
Err(err) => err,
};
match (buf.try_gpu_lock(entry.exclusive, queue), prev_err) {
(Ok(_), _) => (),
(Err(err), AccessCheckError::Unknown)
| (_, AccessCheckError::Denied(err)) => {
ret_value = Err(CommandBufferExecError::AccessError {
error: err,
command_name: command.name().into(),
command_param: command.buffer_name(resource_index),
command_offset: command_ids[0],
});
break;
}
};
}
KeyTy::Image => {
let img = command.image(resource_index);
let prev_err = match future.check_image_access(
img,
entry.initial_layout,
entry.exclusive,
queue,
) {
Ok(_) => {
unsafe {
img.increase_gpu_lock();
}
locked_resources += 1;
continue;
}
Err(err) => err,
};
match (
img.try_gpu_lock(entry.exclusive, entry.initial_layout),
prev_err,
) {
(Ok(_), _) => (),
(Err(err), AccessCheckError::Unknown)
| (_, AccessCheckError::Denied(err)) => {
ret_value = Err(CommandBufferExecError::AccessError {
error: err,
command_name: command.name().into(),
command_param: command.image_name(resource_index),
command_offset: command_ids[0],
});
break;
}
};
}
}
locked_resources += 1;
}
if let Err(_) = ret_value {
for (key, val) in self.resources.iter().take(locked_resources) {
let (command_ids, resource_ty, resource_index) = match *key {
CbKey::Command {
ref command_ids,
resource_ty,
resource_index,
..
} => (command_ids, resource_ty, resource_index),
_ => unreachable!(),
};
let command = &self.commands[command_ids[0]];
match resource_ty {
KeyTy::Buffer => {
let buf = command.buffer(resource_index);
unsafe {
buf.unlock();
}
}
KeyTy::Image => {
let command = &self.commands[command_ids[0]];
let img = command.image(resource_index);
let trans = if val.final_layout != val.initial_layout {
Some(val.final_layout)
} else {
None
};
unsafe {
img.unlock(trans);
}
}
}
}
}
ret_value
}
pub unsafe fn unlock(&self) {
for (key, val) in self.resources.iter() {
let (command_ids, resource_ty, resource_index) = match *key {
CbKey::Command {
ref command_ids,
resource_ty,
resource_index,
..
} => (command_ids, resource_ty, resource_index),
_ => unreachable!(),
};
let command = &self.commands[command_ids[0]];
match resource_ty {
KeyTy::Buffer => {
let buf = command.buffer(resource_index);
buf.unlock();
}
KeyTy::Image => {
let img = command.image(resource_index);
let trans = if val.final_layout != val.initial_layout {
Some(val.final_layout)
} else {
None
};
img.unlock(trans);
}
}
}
}
#[inline]
pub fn check_buffer_access(
&self,
buffer: &dyn BufferAccess,
exclusive: bool,
queue: &Queue,
) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> {
if let Some(value) = self.resources.get(&CbKey::BufferRef(buffer)) {
if !value.exclusive && exclusive {
return Err(AccessCheckError::Unknown);
}
return Ok(Some((value.final_stages, value.final_access)));
}
Err(AccessCheckError::Unknown)
}
#[inline]
pub fn check_image_access(
&self,
image: &dyn ImageAccess,
layout: ImageLayout,
exclusive: bool,
queue: &Queue,
) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> {
if let Some(value) = self.resources.get(&CbKey::ImageRef(image)) {
if layout != ImageLayout::Undefined && value.final_layout != layout {
return Err(AccessCheckError::Denied(
AccessError::UnexpectedImageLayout {
allowed: value.final_layout,
requested: layout,
},
));
}
if !value.exclusive && exclusive {
return Err(AccessCheckError::Unknown);
}
return Ok(Some((value.final_stages, value.final_access)));
}
Err(AccessCheckError::Unknown)
}
#[inline]
pub fn num_buffers(&self) -> usize {
self.buffers.len()
}
#[inline]
pub fn buffer(&self, index: usize) -> Option<(&dyn BufferAccess, PipelineMemoryAccess)> {
self.buffers.get(index).map(|(location, memory)| {
let cmd = &self.commands[location.command_id];
(cmd.buffer(location.resource_index), *memory)
})
}
#[inline]
pub fn num_images(&self) -> usize {
self.images.len()
}
#[inline]
pub fn image(
&self,
index: usize,
) -> Option<(
&dyn ImageAccess,
PipelineMemoryAccess,
ImageLayout,
ImageLayout,
)> {
self.images
.get(index)
.map(|(location, memory, start_layout, end_layout)| {
let cmd = &self.commands[location.command_id];
(
cmd.image(location.resource_index),
*memory,
*start_layout,
*end_layout,
)
})
}
}
impl AsRef<UnsafeCommandBuffer> for SyncCommandBuffer {
#[inline]
fn as_ref(&self) -> &UnsafeCommandBuffer {
&self.inner
}
}
unsafe impl DeviceOwned for SyncCommandBuffer {
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.device()
}
}
#[derive(Debug, Clone)]
struct ResourceFinalState {
final_stages: PipelineStages,
final_access: AccessFlags,
exclusive: bool,
initial_layout: ImageLayout,
final_layout: ImageLayout,
}
pub trait FinalCommand {
fn name(&self) -> &'static str;
fn buffer(&self, _num: usize) -> &dyn BufferAccess {
panic!()
}
fn image(&self, _num: usize) -> &dyn ImageAccess {
panic!()
}
fn buffer_name(&self, _num: usize) -> Cow<'static, str> {
panic!()
}
fn image_name(&self, _num: usize) -> Cow<'static, str> {
panic!()
}
}
impl FinalCommand for &'static str {
fn name(&self) -> &'static str {
*self
}
}
enum CbKey<'a> {
Command {
commands: Arc<Vec<Box<dyn FinalCommand + Send + Sync>>>,
command_ids: Vec<usize>,
resource_ty: KeyTy,
resource_index: usize,
},
BufferRef(&'a dyn BufferAccess),
ImageRef(&'a dyn ImageAccess),
}
unsafe impl<'a> Send for CbKey<'a> {}
unsafe impl<'a> Sync for CbKey<'a> {}
impl<'a> CbKey<'a> {
#[inline]
fn conflicts_buffer(
&self,
commands_external: Option<&Vec<Box<dyn FinalCommand + Send + Sync>>>,
buf: &dyn BufferAccess,
) -> bool {
match *self {
CbKey::Command {
ref commands,
ref command_ids,
resource_ty,
resource_index,
} => {
let commands = commands_external.unwrap_or(commands);
match resource_ty {
KeyTy::Buffer => command_ids.iter().any(|command_id| {
let c = &commands[*command_id];
c.buffer(resource_index).conflicts_buffer(buf)
}),
KeyTy::Image => command_ids.iter().any(|command_id| {
let c = &commands[*command_id];
c.image(resource_index).conflicts_buffer(buf)
}),
}
}
CbKey::BufferRef(b) => b.conflicts_buffer(buf),
CbKey::ImageRef(i) => i.conflicts_buffer(buf),
}
}
#[inline]
fn conflicts_image(
&self,
commands_external: Option<&Vec<Box<dyn FinalCommand + Send + Sync>>>,
img: &dyn ImageAccess,
) -> bool {
match *self {
CbKey::Command {
ref commands,
ref command_ids,
resource_ty,
resource_index,
} => {
let commands = commands_external.unwrap_or(commands);
match resource_ty {
KeyTy::Buffer => command_ids.iter().any(|command_id| {
let c = &commands[*command_id];
c.buffer(resource_index).conflicts_image(img)
}),
KeyTy::Image => command_ids.iter().any(|command_id| {
let c = &commands[*command_id];
c.image(resource_index).conflicts_image(img)
}),
}
}
CbKey::BufferRef(b) => b.conflicts_image(img),
CbKey::ImageRef(i) => i.conflicts_image(img),
}
}
}
impl<'a> PartialEq for CbKey<'a> {
#[inline]
fn eq(&self, other: &CbKey) -> bool {
match *self {
CbKey::BufferRef(a) => other.conflicts_buffer(None, a),
CbKey::ImageRef(a) => other.conflicts_image(None, a),
CbKey::Command {
ref commands,
ref command_ids,
resource_ty,
resource_index,
} => {
let commands_lock = commands;
match resource_ty {
KeyTy::Buffer => command_ids.iter().any(|command_id| {
let c = &commands_lock[*command_id];
other.conflicts_buffer(Some(&commands_lock), c.buffer(resource_index))
}),
KeyTy::Image => command_ids.iter().any(|command_id| {
let c = &commands_lock[*command_id];
other.conflicts_image(Some(&commands_lock), c.image(resource_index))
}),
}
}
}
}
}
impl<'a> Eq for CbKey<'a> {}
impl<'a> Hash for CbKey<'a> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
match *self {
CbKey::Command {
ref commands,
ref command_ids,
resource_ty,
resource_index,
} => match resource_ty {
KeyTy::Buffer => {
let c = &commands[command_ids[0]];
c.buffer(resource_index).conflict_key().hash(state)
}
KeyTy::Image => {
let c = &commands[command_ids[0]];
c.image(resource_index).conflict_key().hash(state)
}
},
CbKey::BufferRef(buf) => buf.conflict_key().hash(state),
CbKey::ImageRef(img) => img.conflict_key().hash(state),
}
}
}
#[cfg(test)]
mod tests {
use super::SyncCommandBufferBuilder;
use super::SyncCommandBufferBuilderError;
use crate::buffer::BufferUsage;
use crate::buffer::CpuAccessibleBuffer;
use crate::buffer::ImmutableBuffer;
use crate::command_buffer::pool::CommandPool;
use crate::command_buffer::pool::CommandPoolBuilderAlloc;
use crate::command_buffer::AutoCommandBufferBuilder;
use crate::command_buffer::CommandBufferLevel;
use crate::command_buffer::CommandBufferUsage;
use crate::device::Device;
use crate::sync::GpuFuture;
use std::sync::Arc;
#[test]
fn basic_creation() {
unsafe {
let (device, queue) = gfx_dev_and_queue!();
let pool = Device::standard_command_pool(&device, queue.family());
let pool_builder_alloc = pool.alloc(false, 1).unwrap().next().unwrap();
assert!(matches!(
SyncCommandBufferBuilder::new(
&pool_builder_alloc.inner(),
CommandBufferLevel::primary(),
CommandBufferUsage::MultipleSubmit,
),
Ok(_)
));
}
}
#[test]
fn basic_conflict() {
unsafe {
let (device, queue) = gfx_dev_and_queue!();
let pool = Device::standard_command_pool(&device, queue.family());
let pool_builder_alloc = pool.alloc(false, 1).unwrap().next().unwrap();
let mut sync = SyncCommandBufferBuilder::new(
&pool_builder_alloc.inner(),
CommandBufferLevel::primary(),
CommandBufferUsage::MultipleSubmit,
)
.unwrap();
let buf =
CpuAccessibleBuffer::from_data(device, BufferUsage::all(), false, 0u32).unwrap();
assert!(matches!(
sync.copy_buffer(buf.clone(), buf.clone(), std::iter::once((0, 0, 4))),
Err(SyncCommandBufferBuilderError::Conflict { .. })
));
}
}
#[test]
fn secondary_conflicting_writes() {
unsafe {
let (device, queue) = gfx_dev_and_queue!();
let (buf, future) = ImmutableBuffer::from_data(
0u32,
BufferUsage::transfer_destination(),
queue.clone(),
)
.unwrap();
future
.then_signal_fence_and_flush()
.unwrap()
.wait(None)
.unwrap();
let secondary = (0..2)
.map(|_| {
let mut builder = AutoCommandBufferBuilder::secondary_compute(
device.clone(),
queue.family(),
CommandBufferUsage::SimultaneousUse,
)
.unwrap();
builder.fill_buffer(buf.clone(), 42u32).unwrap();
Arc::new(builder.build().unwrap())
})
.collect::<Vec<_>>();
let pool = Device::standard_command_pool(&device, queue.family());
let allocs = pool.alloc(false, 2).unwrap().collect::<Vec<_>>();
{
let mut builder = SyncCommandBufferBuilder::new(
allocs[0].inner(),
CommandBufferLevel::primary(),
CommandBufferUsage::SimultaneousUse,
)
.unwrap();
secondary.iter().cloned().for_each(|secondary| {
let mut ec = builder.execute_commands();
ec.add(secondary);
ec.submit().unwrap();
});
let primary = builder.build().unwrap();
let names = primary
.commands
.iter()
.map(|c| c.name())
.collect::<Vec<_>>();
assert_eq!(&names, &["vkCmdExecuteCommands", "vkCmdExecuteCommands"]);
assert_eq!(&primary.barriers, &[0, 1]);
}
{
let mut builder = SyncCommandBufferBuilder::new(
allocs[1].inner(),
CommandBufferLevel::primary(),
CommandBufferUsage::SimultaneousUse,
)
.unwrap();
let mut ec = builder.execute_commands();
secondary.into_iter().for_each(|secondary| {
ec.add(secondary);
});
assert!(matches!(
ec.submit(),
Err(SyncCommandBufferBuilderError::Conflict { .. })
));
}
}
}
}