use crate::buffer::BufferAccess;
use crate::command_buffer::DynamicState;
use crate::descriptor::DescriptorSet;
use crate::pipeline::input_assembly::IndexType;
use crate::pipeline::ComputePipelineAbstract;
use crate::pipeline::GraphicsPipelineAbstract;
use smallvec::SmallVec;
use std::ops::Range;
use crate::vk;
use crate::VulkanObject;
pub struct StateCacher {
dynamic_state: DynamicState,
compute_pipeline: vk::Pipeline,
graphics_pipeline: vk::Pipeline,
compute_descriptor_sets: SmallVec<[(vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
graphics_descriptor_sets: SmallVec<[(vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
poisoned_descriptor_sets: bool,
vertex_buffers: SmallVec<[(vk::Buffer, vk::DeviceSize); 12]>,
poisoned_vertex_buffers: bool,
index_buffer: Option<(vk::Buffer, usize, IndexType)>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum StateCacherOutcome {
NeedChange,
AlreadyOk,
}
impl StateCacher {
#[inline]
pub fn new() -> StateCacher {
StateCacher {
dynamic_state: DynamicState::none(),
compute_pipeline: 0,
graphics_pipeline: 0,
compute_descriptor_sets: SmallVec::new(),
graphics_descriptor_sets: SmallVec::new(),
poisoned_descriptor_sets: false,
vertex_buffers: SmallVec::new(),
poisoned_vertex_buffers: false,
index_buffer: None,
}
}
#[inline]
pub fn invalidate(&mut self) {
self.dynamic_state = DynamicState::none();
self.compute_pipeline = 0;
self.graphics_pipeline = 0;
self.compute_descriptor_sets = SmallVec::new();
self.graphics_descriptor_sets = SmallVec::new();
self.vertex_buffers = SmallVec::new();
self.index_buffer = None;
}
pub fn dynamic_state(&mut self, incoming: &DynamicState) -> DynamicState {
let mut changed = DynamicState::none();
macro_rules! cmp {
($field:ident) => {
if self.dynamic_state.$field != incoming.$field {
changed.$field = incoming.$field.clone();
if incoming.$field.is_some() {
self.dynamic_state.$field = incoming.$field.clone();
}
}
};
}
cmp!(line_width);
cmp!(viewports);
cmp!(scissors);
cmp!(compare_mask);
cmp!(reference);
cmp!(write_mask);
changed
}
#[inline]
pub fn bind_descriptor_sets(&mut self, graphics: bool) -> StateCacherDescriptorSets {
if self.poisoned_descriptor_sets {
self.compute_descriptor_sets = SmallVec::new();
self.graphics_descriptor_sets = SmallVec::new();
}
self.poisoned_descriptor_sets = true;
StateCacherDescriptorSets {
poisoned: &mut self.poisoned_descriptor_sets,
state: if graphics {
&mut self.graphics_descriptor_sets
} else {
&mut self.compute_descriptor_sets
},
offset: 0,
found_diff: None,
}
}
pub fn bind_graphics_pipeline<P>(&mut self, pipeline: &P) -> StateCacherOutcome
where
P: GraphicsPipelineAbstract,
{
let inner = GraphicsPipelineAbstract::inner(pipeline).internal_object();
if inner == self.graphics_pipeline {
StateCacherOutcome::AlreadyOk
} else {
self.graphics_pipeline = inner;
StateCacherOutcome::NeedChange
}
}
pub fn bind_compute_pipeline<P>(&mut self, pipeline: &P) -> StateCacherOutcome
where
P: ComputePipelineAbstract,
{
let inner = pipeline.inner().internal_object();
if inner == self.compute_pipeline {
StateCacherOutcome::AlreadyOk
} else {
self.compute_pipeline = inner;
StateCacherOutcome::NeedChange
}
}
#[inline]
pub fn bind_vertex_buffers(&mut self) -> StateCacherVertexBuffers {
if self.poisoned_vertex_buffers {
self.vertex_buffers = SmallVec::new();
}
self.poisoned_vertex_buffers = true;
StateCacherVertexBuffers {
poisoned: &mut self.poisoned_vertex_buffers,
state: &mut self.vertex_buffers,
offset: 0,
first_diff: None,
last_diff: 0,
}
}
pub fn bind_index_buffer<B>(&mut self, index_buffer: &B, ty: IndexType) -> StateCacherOutcome
where
B: ?Sized + BufferAccess,
{
let value = {
let inner = index_buffer.inner();
(inner.buffer.internal_object(), inner.offset, ty)
};
if self.index_buffer == Some(value) {
StateCacherOutcome::AlreadyOk
} else {
self.index_buffer = Some(value);
StateCacherOutcome::NeedChange
}
}
}
pub struct StateCacherDescriptorSets<'s> {
poisoned: &'s mut bool,
state: &'s mut SmallVec<[(vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
offset: usize,
found_diff: Option<u32>,
}
impl<'s> StateCacherDescriptorSets<'s> {
#[inline]
pub fn add<S>(&mut self, set: &S, dynamic_offsets: &SmallVec<[u32; 32]>)
where
S: ?Sized + DescriptorSet,
{
let raw = set.inner().internal_object();
if self.offset < self.state.len() {
if (&self.state[self.offset].0, &self.state[self.offset].1) == (&raw, dynamic_offsets) {
self.offset += 1;
return;
}
self.state[self.offset] = (raw, dynamic_offsets.clone());
} else {
self.state.push((raw, dynamic_offsets.clone()));
}
if self.found_diff.is_none() {
self.found_diff = Some(self.offset as u32);
}
self.offset += 1;
}
#[inline]
pub fn compare(self) -> Option<u32> {
*self.poisoned = false;
self.state.truncate(self.offset);
self.found_diff
}
}
pub struct StateCacherVertexBuffers<'s> {
poisoned: &'s mut bool,
state: &'s mut SmallVec<[(vk::Buffer, vk::DeviceSize); 12]>,
offset: usize,
first_diff: Option<u32>,
last_diff: u32,
}
impl<'s> StateCacherVertexBuffers<'s> {
#[inline]
pub fn add<B>(&mut self, buffer: &B)
where
B: ?Sized + BufferAccess,
{
let raw = {
let inner = buffer.inner();
let raw = inner.buffer.internal_object();
let offset = inner.offset as vk::DeviceSize;
(raw, offset)
};
if self.offset < self.state.len() {
if self.state[self.offset] == raw {
self.offset += 1;
return;
}
self.state[self.offset] = raw;
} else {
self.state.push(raw);
}
self.last_diff = self.offset as u32;
if self.first_diff.is_none() {
self.first_diff = Some(self.offset as u32);
}
self.offset += 1;
}
#[inline]
pub fn compare(self) -> Option<Range<u32>> {
*self.poisoned = false;
self.state.truncate(self.offset);
self.first_diff.map(|first| {
debug_assert!(first <= self.last_diff);
first..(self.last_diff + 1)
})
}
}
#[cfg(test)]
mod tests {
use crate::buffer::BufferUsage;
use crate::buffer::CpuAccessibleBuffer;
use crate::command_buffer::state_cacher::StateCacher;
#[test]
fn vb_caching_single() {
let (device, queue) = gfx_dev_and_queue!();
const EMPTY: [i32; 0] = [];
let buf =
CpuAccessibleBuffer::from_data(device, BufferUsage::vertex_buffer(), false, EMPTY)
.unwrap();
let mut cacher = StateCacher::new();
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf);
assert_eq!(bind_vb.compare(), Some(0..1));
}
for _ in 0..3 {
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf);
assert_eq!(bind_vb.compare(), None);
}
}
#[test]
fn vb_caching_invalidated() {
let (device, queue) = gfx_dev_and_queue!();
const EMPTY: [i32; 0] = [];
let buf =
CpuAccessibleBuffer::from_data(device, BufferUsage::vertex_buffer(), false, EMPTY)
.unwrap();
let mut cacher = StateCacher::new();
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf);
assert_eq!(bind_vb.compare(), Some(0..1));
}
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf);
assert_eq!(bind_vb.compare(), None);
}
cacher.invalidate();
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf);
assert_eq!(bind_vb.compare(), Some(0..1));
}
}
#[test]
fn vb_caching_multi() {
let (device, queue) = gfx_dev_and_queue!();
const EMPTY: [i32; 0] = [];
let buf1 = CpuAccessibleBuffer::from_data(
device.clone(),
BufferUsage::vertex_buffer(),
false,
EMPTY,
)
.unwrap();
let buf2 = CpuAccessibleBuffer::from_data(
device.clone(),
BufferUsage::vertex_buffer(),
false,
EMPTY,
)
.unwrap();
let buf3 =
CpuAccessibleBuffer::from_data(device, BufferUsage::vertex_buffer(), false, EMPTY)
.unwrap();
let mut cacher = StateCacher::new();
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf1);
bind_vb.add(&buf2);
assert_eq!(bind_vb.compare(), Some(0..2));
}
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf1);
bind_vb.add(&buf2);
bind_vb.add(&buf3);
assert_eq!(bind_vb.compare(), Some(2..3));
}
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf1);
assert_eq!(bind_vb.compare(), None);
}
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf1);
bind_vb.add(&buf3);
assert_eq!(bind_vb.compare(), Some(1..2));
}
{
let mut bind_vb = cacher.bind_vertex_buffers();
bind_vb.add(&buf2);
bind_vb.add(&buf3);
assert_eq!(bind_vb.compare(), Some(0..1));
}
}
}