use crate::{
buffer::{BufferUsage, IndexBuffer, Subbuffer},
device::{Device, DeviceOwned},
format::{Format, FormatFeatures},
instance::InstanceOwnedDebugWrapper,
macros::{impl_id_counter, vulkan_bitflags, vulkan_enum},
DeviceAddress, DeviceSize, NonNullDeviceAddress, Packed24_8, Requires, RequiresAllOf,
RequiresOneOf, Validated, ValidationError, VulkanError, VulkanObject,
};
use bytemuck::{Pod, Zeroable};
use std::{fmt::Debug, hash::Hash, mem::MaybeUninit, num::NonZeroU64, ptr, sync::Arc};
#[derive(Debug)]
pub struct AccelerationStructure {
device: InstanceOwnedDebugWrapper<Arc<Device>>,
handle: ash::vk::AccelerationStructureKHR,
id: NonZeroU64,
create_flags: AccelerationStructureCreateFlags,
buffer: Subbuffer<[u8]>,
ty: AccelerationStructureType,
}
impl AccelerationStructure {
#[inline]
pub unsafe fn new(
device: Arc<Device>,
create_info: AccelerationStructureCreateInfo,
) -> Result<Arc<Self>, Validated<VulkanError>> {
Self::validate_new(&device, &create_info)?;
Ok(Self::new_unchecked(device, create_info)?)
}
fn validate_new(
device: &Device,
create_info: &AccelerationStructureCreateInfo,
) -> Result<(), Box<ValidationError>> {
if !device.enabled_extensions().khr_acceleration_structure {
return Err(Box::new(ValidationError {
requires_one_of: RequiresOneOf(&[RequiresAllOf(&[Requires::DeviceExtension(
"khr_acceleration_structure",
)])]),
..Default::default()
}));
}
if !device.enabled_features().acceleration_structure {
return Err(Box::new(ValidationError {
requires_one_of: RequiresOneOf(&[RequiresAllOf(&[Requires::Feature(
"acceleration_structure",
)])]),
vuids: &["VUID-vkCreateAccelerationStructureKHR-accelerationStructure-03611"],
..Default::default()
}));
}
create_info
.validate(device)
.map_err(|err| err.add_context("create_info"))?;
Ok(())
}
#[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
pub unsafe fn new_unchecked(
device: Arc<Device>,
create_info: AccelerationStructureCreateInfo,
) -> Result<Arc<Self>, VulkanError> {
let &AccelerationStructureCreateInfo {
create_flags,
ref buffer,
ty,
_ne: _,
} = &create_info;
let create_info_vk = ash::vk::AccelerationStructureCreateInfoKHR {
create_flags: create_flags.into(),
buffer: buffer.buffer().handle(),
offset: buffer.offset(),
size: buffer.size(),
ty: ty.into(),
device_address: 0, ..Default::default()
};
let handle = {
let fns = device.fns();
let mut output = MaybeUninit::uninit();
(fns.khr_acceleration_structure
.create_acceleration_structure_khr)(
device.handle(),
&create_info_vk,
ptr::null(),
output.as_mut_ptr(),
)
.result()
.map_err(VulkanError::from)?;
output.assume_init()
};
Ok(Self::from_handle(device, handle, create_info))
}
pub unsafe fn from_handle(
device: Arc<Device>,
handle: ash::vk::AccelerationStructureKHR,
create_info: AccelerationStructureCreateInfo,
) -> Arc<Self> {
let AccelerationStructureCreateInfo {
create_flags,
buffer,
ty,
_ne: _,
} = create_info;
Arc::new(Self {
device: InstanceOwnedDebugWrapper(device),
handle,
id: Self::next_id(),
create_flags,
buffer,
ty,
})
}
#[inline]
pub fn create_flags(&self) -> AccelerationStructureCreateFlags {
self.create_flags
}
#[inline]
pub fn buffer(&self) -> &Subbuffer<[u8]> {
&self.buffer
}
#[inline]
pub fn size(&self) -> DeviceSize {
self.buffer.size()
}
#[inline]
pub fn ty(&self) -> AccelerationStructureType {
self.ty
}
pub fn device_address(&self) -> NonNullDeviceAddress {
let info_vk = ash::vk::AccelerationStructureDeviceAddressInfoKHR {
acceleration_structure: self.handle,
..Default::default()
};
let ptr = unsafe {
let fns = self.device.fns();
(fns.khr_acceleration_structure
.get_acceleration_structure_device_address_khr)(
self.device.handle(), &info_vk
)
};
NonNullDeviceAddress::new(ptr).unwrap()
}
}
impl Drop for AccelerationStructure {
#[inline]
fn drop(&mut self) {
unsafe {
let fns = self.device.fns();
(fns.khr_acceleration_structure
.destroy_acceleration_structure_khr)(
self.device.handle(), self.handle, ptr::null()
)
}
}
}
unsafe impl VulkanObject for AccelerationStructure {
type Handle = ash::vk::AccelerationStructureKHR;
#[inline]
fn handle(&self) -> Self::Handle {
self.handle
}
}
unsafe impl DeviceOwned for AccelerationStructure {
#[inline]
fn device(&self) -> &Arc<Device> {
&self.device
}
}
impl_id_counter!(AccelerationStructure);
vulkan_enum! {
#[non_exhaustive]
AccelerationStructureType = AccelerationStructureTypeKHR(i32);
TopLevel = TOP_LEVEL,
BottomLevel = BOTTOM_LEVEL,
Generic = GENERIC,
}
#[derive(Clone, Debug)]
pub struct AccelerationStructureCreateInfo {
pub create_flags: AccelerationStructureCreateFlags,
pub buffer: Subbuffer<[u8]>,
pub ty: AccelerationStructureType,
pub _ne: crate::NonExhaustive,
}
impl AccelerationStructureCreateInfo {
#[inline]
pub fn new(buffer: Subbuffer<[u8]>) -> Self {
Self {
create_flags: AccelerationStructureCreateFlags::empty(),
buffer,
ty: AccelerationStructureType::Generic,
_ne: crate::NonExhaustive(()),
}
}
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
create_flags,
ref buffer,
ty,
_ne: _,
} = self;
create_flags.validate_device(device).map_err(|err| {
err.add_context("create_flags")
.set_vuids(&["VUID-VkAccelerationStructureCreateInfoKHR-createFlags-parameter"])
})?;
ty.validate_device(device).map_err(|err| {
err.add_context("ty")
.set_vuids(&["VUID-VkAccelerationStructureCreateInfoKHR-type-parameter"])
})?;
if !buffer
.buffer()
.usage()
.intersects(BufferUsage::ACCELERATION_STRUCTURE_STORAGE)
{
return Err(Box::new(ValidationError {
context: "buffer".into(),
problem: "the buffer was not created with the `ACCELERATION_STRUCTURE_STORAGE` \
usage"
.into(),
vuids: &["VUID-VkAccelerationStructureCreateInfoKHR-buffer-03614"],
..Default::default()
}));
}
if buffer.offset() % 256 != 0 {
return Err(Box::new(ValidationError {
context: "buffer".into(),
problem: "the offset of the buffer is not a multiple of 256".into(),
vuids: &["VUID-VkAccelerationStructureCreateInfoKHR-offset-03734"],
..Default::default()
}));
}
Ok(())
}
}
vulkan_bitflags! {
#[non_exhaustive]
AccelerationStructureCreateFlags = AccelerationStructureCreateFlagsKHR(u32);
}
#[derive(Clone, Debug)]
pub struct AccelerationStructureBuildGeometryInfo {
pub flags: BuildAccelerationStructureFlags,
pub mode: BuildAccelerationStructureMode,
pub dst_acceleration_structure: Option<Arc<AccelerationStructure>>,
pub geometries: AccelerationStructureGeometries,
pub scratch_data: Option<Subbuffer<[u8]>>,
pub _ne: crate::NonExhaustive,
}
impl AccelerationStructureBuildGeometryInfo {
#[inline]
pub fn new(geometries: AccelerationStructureGeometries) -> Self {
Self {
flags: BuildAccelerationStructureFlags::empty(),
mode: BuildAccelerationStructureMode::Build,
dst_acceleration_structure: None,
geometries,
scratch_data: None,
_ne: crate::NonExhaustive(()),
}
}
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
flags,
ref mode,
ref dst_acceleration_structure,
ref geometries,
scratch_data: _,
_ne: _,
} = self;
flags.validate_device(device).map_err(|err| {
err.add_context("flags")
.set_vuids(&["VUID-VkAccelerationStructureBuildGeometryInfoKHR-flags-parameter"])
})?;
let max_geometry_count = device
.physical_device()
.properties()
.max_geometry_count
.unwrap();
match geometries {
AccelerationStructureGeometries::Triangles(geometries) => {
for (index, triangles_data) in geometries.iter().enumerate() {
triangles_data
.validate(device)
.map_err(|err| err.add_context(format!("geometries[{}]", index)))?;
}
if geometries.len() as u64 > max_geometry_count {
return Err(Box::new(ValidationError {
context: "geometries".into(),
problem: "the length exceeds the `max_geometry_count` limit".into(),
vuids: &["VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03793"],
..Default::default()
}));
}
}
AccelerationStructureGeometries::Aabbs(geometries) => {
for (index, aabbs_data) in geometries.iter().enumerate() {
aabbs_data
.validate(device)
.map_err(|err| err.add_context(format!("geometries[{}]", index)))?;
}
if geometries.len() as u64 > max_geometry_count {
return Err(Box::new(ValidationError {
context: "geometries".into(),
problem: "the length exceeds the `max_geometry_count` limit".into(),
vuids: &["VUID-VkAccelerationStructureBuildGeometryInfoKHR-type-03793"],
..Default::default()
}));
}
}
AccelerationStructureGeometries::Instances(instances_data) => {
instances_data
.validate(device)
.map_err(|err| err.add_context("geometries"))?;
}
}
if let Some(dst_acceleration_structure) = dst_acceleration_structure {
assert_eq!(device, dst_acceleration_structure.device().as_ref());
}
if let BuildAccelerationStructureMode::Update(src_acceleration_structure) = mode {
assert_eq!(device, src_acceleration_structure.device().as_ref());
}
if flags.contains(
BuildAccelerationStructureFlags::PREFER_FAST_TRACE
| BuildAccelerationStructureFlags::PREFER_FAST_BUILD,
) {
return Err(Box::new(ValidationError {
context: "flags".into(),
problem: "contains both `BuildAccelerationStructureFlags::PREFER_FAST_TRACE` and \
`BuildAccelerationStructureFlags::PREFER_FAST_BUILD`"
.into(),
vuids: &["VUID-VkAccelerationStructureBuildGeometryInfoKHR-flags-03796"],
..Default::default()
}));
}
Ok(())
}
pub(crate) fn to_vulkan(
&self,
) -> (
ash::vk::AccelerationStructureBuildGeometryInfoKHR,
Vec<ash::vk::AccelerationStructureGeometryKHR>,
) {
let &Self {
flags,
ref mode,
ref dst_acceleration_structure,
ref geometries,
ref scratch_data,
_ne: _,
} = self;
let (ty, geometries_vk): (_, Vec<_>) = match geometries {
AccelerationStructureGeometries::Triangles(geometries) => (
ash::vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
geometries
.iter()
.map(|triangles_data| {
let &AccelerationStructureGeometryTrianglesData {
flags,
vertex_format,
ref vertex_data,
vertex_stride,
max_vertex,
ref index_data,
ref transform_data,
_ne,
} = triangles_data;
ash::vk::AccelerationStructureGeometryKHR {
geometry_type: ash::vk::GeometryTypeKHR::TRIANGLES,
geometry: ash::vk::AccelerationStructureGeometryDataKHR {
triangles: ash::vk::AccelerationStructureGeometryTrianglesDataKHR {
vertex_format: vertex_format.into(),
vertex_data: ash::vk::DeviceOrHostAddressConstKHR {
device_address: vertex_data.as_ref().map_or(
0,
|vertex_data| {
vertex_data.device_address().unwrap().into()
},
),
},
vertex_stride: vertex_stride as DeviceSize,
max_vertex,
index_type: index_data
.as_ref()
.map_or(ash::vk::IndexType::NONE_KHR, |index_data| {
index_data.index_type().into()
}),
index_data: ash::vk::DeviceOrHostAddressConstKHR {
device_address: index_data.as_ref().map_or(
0,
|index_data| {
index_data
.as_bytes()
.device_address()
.unwrap()
.get()
},
),
},
transform_data: ash::vk::DeviceOrHostAddressConstKHR {
device_address: transform_data.as_ref().map_or(
0,
|transform_data| {
transform_data.device_address().unwrap().get()
},
),
},
..Default::default()
},
},
flags: flags.into(),
..Default::default()
}
})
.collect(),
),
AccelerationStructureGeometries::Aabbs(geometries) => (
ash::vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
geometries
.iter()
.map(|aabbs_data| {
let &AccelerationStructureGeometryAabbsData {
flags,
ref data,
stride,
_ne: _,
} = aabbs_data;
ash::vk::AccelerationStructureGeometryKHR {
geometry_type: ash::vk::GeometryTypeKHR::AABBS,
geometry: ash::vk::AccelerationStructureGeometryDataKHR {
aabbs: ash::vk::AccelerationStructureGeometryAabbsDataKHR {
data: ash::vk::DeviceOrHostAddressConstKHR {
device_address: data.as_ref().map_or(0, |data| {
data.device_address().unwrap().into()
}),
},
stride: stride as DeviceSize,
..Default::default()
},
},
flags: flags.into(),
..Default::default()
}
})
.collect(),
),
AccelerationStructureGeometries::Instances(instances_data) => {
(ash::vk::AccelerationStructureTypeKHR::TOP_LEVEL, {
let &AccelerationStructureGeometryInstancesData {
flags,
ref data,
_ne: _,
} = instances_data;
let (array_of_pointers, data) = match data {
AccelerationStructureGeometryInstancesDataType::Values(data) => (
ash::vk::FALSE,
ash::vk::DeviceOrHostAddressConstKHR {
device_address: data
.as_ref()
.map_or(0, |data| data.device_address().unwrap().into()),
},
),
AccelerationStructureGeometryInstancesDataType::Pointers(data) => (
ash::vk::TRUE,
ash::vk::DeviceOrHostAddressConstKHR {
device_address: data
.as_ref()
.map_or(0, |data| data.device_address().unwrap().into()),
},
),
};
[ash::vk::AccelerationStructureGeometryKHR {
geometry_type: ash::vk::GeometryTypeKHR::INSTANCES,
geometry: ash::vk::AccelerationStructureGeometryDataKHR {
instances: ash::vk::AccelerationStructureGeometryInstancesDataKHR {
array_of_pointers,
data,
..Default::default()
},
},
flags: flags.into(),
..Default::default()
}]
.into_iter()
.collect()
})
}
};
(
ash::vk::AccelerationStructureBuildGeometryInfoKHR {
ty,
flags: flags.into(),
mode: mode.into(),
src_acceleration_structure: match mode {
BuildAccelerationStructureMode::Build => Default::default(),
BuildAccelerationStructureMode::Update(src_acceleration_structure) => {
src_acceleration_structure.handle()
}
},
dst_acceleration_structure: dst_acceleration_structure
.as_ref()
.map_or_else(Default::default, VulkanObject::handle),
geometry_count: 0,
p_geometries: ptr::null(),
pp_geometries: ptr::null(),
scratch_data: ash::vk::DeviceOrHostAddressKHR {
device_address: scratch_data.as_ref().map_or(0, |scratch_data| {
scratch_data.device_address().unwrap().into()
}),
},
..Default::default()
},
geometries_vk,
)
}
}
vulkan_bitflags! {
#[non_exhaustive]
BuildAccelerationStructureFlags = BuildAccelerationStructureFlagsKHR(u32);
ALLOW_UPDATE = ALLOW_UPDATE,
ALLOW_COMPACTION = ALLOW_COMPACTION,
PREFER_FAST_TRACE = PREFER_FAST_TRACE,
PREFER_FAST_BUILD = PREFER_FAST_BUILD,
LOW_MEMORY = LOW_MEMORY,
}
#[derive(Clone, Debug)]
#[repr(i32)]
pub enum BuildAccelerationStructureMode {
Build = ash::vk::BuildAccelerationStructureModeKHR::BUILD.as_raw(),
Update(Arc<AccelerationStructure>) =
ash::vk::BuildAccelerationStructureModeKHR::UPDATE.as_raw(),
}
impl From<&BuildAccelerationStructureMode> for ash::vk::BuildAccelerationStructureModeKHR {
#[inline]
fn from(val: &BuildAccelerationStructureMode) -> Self {
match val {
BuildAccelerationStructureMode::Build => {
ash::vk::BuildAccelerationStructureModeKHR::BUILD
}
BuildAccelerationStructureMode::Update(_) => {
ash::vk::BuildAccelerationStructureModeKHR::UPDATE
}
}
}
}
#[derive(Clone, Debug)]
pub enum AccelerationStructureGeometries {
Triangles(Vec<AccelerationStructureGeometryTrianglesData>),
Aabbs(Vec<AccelerationStructureGeometryAabbsData>),
Instances(AccelerationStructureGeometryInstancesData),
}
impl AccelerationStructureGeometries {
#[inline]
pub fn len(&self) -> usize {
match self {
AccelerationStructureGeometries::Triangles(geometries) => geometries.len(),
AccelerationStructureGeometries::Aabbs(geometries) => geometries.len(),
AccelerationStructureGeometries::Instances(_) => 1,
}
}
}
impl From<Vec<AccelerationStructureGeometryTrianglesData>> for AccelerationStructureGeometries {
#[inline]
fn from(value: Vec<AccelerationStructureGeometryTrianglesData>) -> Self {
Self::Triangles(value)
}
}
impl From<Vec<AccelerationStructureGeometryAabbsData>> for AccelerationStructureGeometries {
#[inline]
fn from(value: Vec<AccelerationStructureGeometryAabbsData>) -> Self {
Self::Aabbs(value)
}
}
impl From<AccelerationStructureGeometryInstancesData> for AccelerationStructureGeometries {
#[inline]
fn from(value: AccelerationStructureGeometryInstancesData) -> Self {
Self::Instances(value)
}
}
vulkan_bitflags! {
#[non_exhaustive]
GeometryFlags = GeometryFlagsKHR(u32);
OPAQUE = OPAQUE,
NO_DUPLICATE_ANY_HIT_INVOCATION = NO_DUPLICATE_ANY_HIT_INVOCATION,
}
#[derive(Clone, Debug)]
pub struct AccelerationStructureGeometryTrianglesData {
pub flags: GeometryFlags,
pub vertex_format: Format,
pub vertex_data: Option<Subbuffer<[u8]>>,
pub vertex_stride: u32,
pub max_vertex: u32,
pub index_data: Option<IndexBuffer>,
pub transform_data: Option<Subbuffer<TransformMatrix>>,
pub _ne: crate::NonExhaustive,
}
impl AccelerationStructureGeometryTrianglesData {
#[inline]
pub fn new(vertex_format: Format) -> Self {
Self {
flags: GeometryFlags::empty(),
vertex_format,
vertex_data: None,
vertex_stride: 0,
max_vertex: 0,
index_data: None,
transform_data: None,
_ne: crate::NonExhaustive(()),
}
}
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
flags,
vertex_format,
vertex_data: _,
vertex_stride,
max_vertex: _,
ref index_data,
transform_data: _,
_ne: _,
} = self;
flags.validate_device(device).map_err(|err| {
err.add_context("flags")
.set_vuids(&["VUID-VkAccelerationStructureGeometryKHR-flags-parameter"])
})?;
vertex_format.validate_device(device).map_err(|err| {
err.add_context("vertex_format").set_vuids(&[
"VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexFormat-parameter",
])
})?;
if unsafe {
!device
.physical_device()
.format_properties_unchecked(vertex_format)
.buffer_features
.intersects(FormatFeatures::ACCELERATION_STRUCTURE_VERTEX_BUFFER)
} {
return Err(Box::new(ValidationError {
context: "vertex_format".into(),
problem: "format features do not contain \
`FormatFeature::ACCELERATION_STRUCTURE_VERTEX_BUFFER`"
.into(),
vuids: &["VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexFormat-03797"],
..Default::default()
}));
}
let smallest_component_bits = vertex_format
.components()
.into_iter()
.filter(|&c| c != 0)
.min()
.unwrap() as u32;
let smallest_component_bytes = ((smallest_component_bits + 7) & !7) / 8;
if vertex_stride % smallest_component_bytes != 0 {
return Err(Box::new(ValidationError {
problem: "`vertex_stride` is not a multiple of the byte size of the \
smallest component of `vertex_format`"
.into(),
vuids: &["VUID-VkAccelerationStructureGeometryTrianglesDataKHR-vertexStride-03735"],
..Default::default()
}));
}
if let Some(index_data) = index_data.as_ref() {
if !matches!(index_data, IndexBuffer::U16(_) | IndexBuffer::U32(_)) {
return Err(Box::new(ValidationError {
context: "index_data".into(),
problem: "is not `IndexBuffer::U16` or `IndexBuffer::U32`".into(),
vuids: &[
"VUID-VkAccelerationStructureGeometryTrianglesDataKHR-indexType-03798",
],
..Default::default()
}));
}
}
Ok(())
}
}
pub type TransformMatrix = [[f32; 4]; 3];
#[derive(Clone, Debug)]
pub struct AccelerationStructureGeometryAabbsData {
pub flags: GeometryFlags,
pub data: Option<Subbuffer<[u8]>>,
pub stride: u32,
pub _ne: crate::NonExhaustive,
}
impl Default for AccelerationStructureGeometryAabbsData {
#[inline]
fn default() -> Self {
Self {
flags: GeometryFlags::empty(),
data: None,
stride: 0,
_ne: crate::NonExhaustive(()),
}
}
}
impl AccelerationStructureGeometryAabbsData {
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
flags,
data: _,
stride,
_ne: _,
} = self;
flags.validate_device(device).map_err(|err| {
err.add_context("flags")
.set_vuids(&["VUID-VkAccelerationStructureGeometryKHR-flags-parameter"])
})?;
if stride % 8 != 0 {
return Err(Box::new(ValidationError {
context: "stride".into(),
problem: "is not a multiple of 8".into(),
vuids: &["VUID-VkAccelerationStructureGeometryAabbsDataKHR-stride-03545"],
..Default::default()
}));
}
Ok(())
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Zeroable, Pod)]
#[repr(C)]
pub struct AabbPositions {
pub min: [f32; 3],
pub max: [f32; 3],
}
#[derive(Clone, Debug)]
pub struct AccelerationStructureGeometryInstancesData {
pub flags: GeometryFlags,
pub data: AccelerationStructureGeometryInstancesDataType,
pub _ne: crate::NonExhaustive,
}
impl AccelerationStructureGeometryInstancesData {
#[inline]
pub fn new(data: AccelerationStructureGeometryInstancesDataType) -> Self {
Self {
flags: GeometryFlags::empty(),
data,
_ne: crate::NonExhaustive(()),
}
}
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
flags,
data: _,
_ne: _,
} = self;
flags.validate_device(device).map_err(|err| {
err.add_context("flags")
.set_vuids(&["VUID-VkAccelerationStructureGeometryKHR-flags-parameter"])
})?;
Ok(())
}
}
#[derive(Clone, Debug)]
pub enum AccelerationStructureGeometryInstancesDataType {
Values(Option<Subbuffer<[AccelerationStructureInstance]>>),
Pointers(Option<Subbuffer<[DeviceSize]>>),
}
impl From<Subbuffer<[AccelerationStructureInstance]>>
for AccelerationStructureGeometryInstancesDataType
{
#[inline]
fn from(value: Subbuffer<[AccelerationStructureInstance]>) -> Self {
Self::Values(Some(value))
}
}
impl From<Subbuffer<[DeviceSize]>> for AccelerationStructureGeometryInstancesDataType {
#[inline]
fn from(value: Subbuffer<[DeviceSize]>) -> Self {
Self::Pointers(Some(value))
}
}
#[derive(Clone, Copy, Debug, PartialEq, Zeroable, Pod)]
#[repr(C)]
pub struct AccelerationStructureInstance {
pub transform: TransformMatrix,
pub instance_custom_index_and_mask: Packed24_8,
pub instance_shader_binding_table_record_offset_and_flags: Packed24_8,
pub acceleration_structure_reference: DeviceAddress,
}
impl Default for AccelerationStructureInstance {
#[inline]
fn default() -> Self {
Self {
transform: [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
],
instance_custom_index_and_mask: Packed24_8::new(0, 0xff),
instance_shader_binding_table_record_offset_and_flags: Packed24_8::new(0, 0),
acceleration_structure_reference: 0,
}
}
}
vulkan_bitflags! {
#[non_exhaustive]
GeometryInstanceFlags = GeometryInstanceFlagsKHR(u32);
TRIANGLE_FACING_CULL_DISABLE = TRIANGLE_FACING_CULL_DISABLE,
TRIANGLE_FLIP_FACING = TRIANGLE_FLIP_FACING,
FORCE_OPAQUE = FORCE_OPAQUE,
FORCE_NO_OPAQUE = FORCE_NO_OPAQUE,
}
impl From<GeometryInstanceFlags> for u8 {
#[inline]
fn from(value: GeometryInstanceFlags) -> Self {
value.0 as u8
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Zeroable, Pod)]
#[repr(C)]
pub struct AccelerationStructureBuildRangeInfo {
pub primitive_count: u32,
pub primitive_offset: u32,
pub first_vertex: u32,
pub transform_offset: u32,
}
#[derive(Clone, Debug)]
pub struct CopyAccelerationStructureInfo {
pub src: Arc<AccelerationStructure>,
pub dst: Arc<AccelerationStructure>,
pub mode: CopyAccelerationStructureMode,
pub _ne: crate::NonExhaustive,
}
impl CopyAccelerationStructureInfo {
#[inline]
pub fn new(src: Arc<AccelerationStructure>, dst: Arc<AccelerationStructure>) -> Self {
Self {
src,
dst,
mode: CopyAccelerationStructureMode::Clone,
_ne: crate::NonExhaustive(()),
}
}
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
ref src,
ref dst,
mode,
_ne: _,
} = self;
assert_eq!(device, src.device().as_ref());
assert_eq!(device, dst.device().as_ref());
mode.validate_device(device).map_err(|err| {
err.add_context("mode")
.set_vuids(&["VUID-VkCopyAccelerationStructureInfoKHR-mode-parameter"])
})?;
if !matches!(
mode,
CopyAccelerationStructureMode::Compact | CopyAccelerationStructureMode::Clone
) {
return Err(Box::new(ValidationError {
context: "mode".into(),
problem: "is not `CopyAccelerationStructureMode::Compact` or \
`CopyAccelerationStructureMode::Clone`"
.into(),
vuids: &["VUID-VkCopyAccelerationStructureInfoKHR-mode-03410"],
..Default::default()
}));
}
if src.buffer() == dst.buffer() {
return Err(Box::new(ValidationError {
problem: "`src` and `dst` share the same buffer".into(),
vuids: &["VUID-VkCopyAccelerationStructureInfoKHR-dst-07791"],
..Default::default()
}));
}
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct CopyAccelerationStructureToMemoryInfo {
pub src: Arc<AccelerationStructure>,
pub dst: Subbuffer<[u8]>,
pub mode: CopyAccelerationStructureMode,
pub _ne: crate::NonExhaustive,
}
impl CopyAccelerationStructureToMemoryInfo {
#[inline]
pub fn new(src: Arc<AccelerationStructure>, dst: Subbuffer<[u8]>) -> Self {
Self {
src,
dst,
mode: CopyAccelerationStructureMode::Serialize,
_ne: crate::NonExhaustive(()),
}
}
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
ref src,
ref dst,
mode,
_ne: _,
} = self;
assert_eq!(device, src.device().as_ref());
assert_eq!(device, dst.device().as_ref());
mode.validate_device(device).map_err(|err| {
err.add_context("mode")
.set_vuids(&["VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-parameter"])
})?;
if !matches!(mode, CopyAccelerationStructureMode::Serialize) {
return Err(Box::new(ValidationError {
context: "mode".into(),
problem: "is not `CopyAccelerationStructureMode::Serialize`".into(),
vuids: &["VUID-VkCopyAccelerationStructureToMemoryInfoKHR-mode-03412"],
..Default::default()
}));
}
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct CopyMemoryToAccelerationStructureInfo {
pub src: Subbuffer<[u8]>,
pub dst: Arc<AccelerationStructure>,
pub mode: CopyAccelerationStructureMode,
pub _ne: crate::NonExhaustive,
}
impl CopyMemoryToAccelerationStructureInfo {
#[inline]
pub fn new(src: Subbuffer<[u8]>, dst: Arc<AccelerationStructure>) -> Self {
Self {
src,
dst,
mode: CopyAccelerationStructureMode::Deserialize,
_ne: crate::NonExhaustive(()),
}
}
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
ref src,
ref dst,
mode,
_ne: _,
} = self;
assert_eq!(device, src.device().as_ref());
assert_eq!(device, dst.device().as_ref());
mode.validate_device(device).map_err(|err| {
err.add_context("mode")
.set_vuids(&["VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-parameter"])
})?;
if !matches!(mode, CopyAccelerationStructureMode::Deserialize) {
return Err(Box::new(ValidationError {
context: "mode".into(),
problem: "is not `CopyAccelerationStructureMode::Deserialize`".into(),
vuids: &["VUID-VkCopyMemoryToAccelerationStructureInfoKHR-mode-03413"],
..Default::default()
}));
}
Ok(())
}
}
vulkan_enum! {
#[non_exhaustive]
CopyAccelerationStructureMode = CopyAccelerationStructureModeKHR(i32);
Clone = CLONE,
Compact = COMPACT,
Serialize = SERIALIZE,
Deserialize = DESERIALIZE,
}
vulkan_enum! {
#[non_exhaustive]
AccelerationStructureBuildType = AccelerationStructureBuildTypeKHR(i32);
Host = HOST,
Device = DEVICE,
HostOrDevice = HOST_OR_DEVICE,
}
#[derive(Clone, Debug)]
pub struct AccelerationStructureBuildSizesInfo {
pub acceleration_structure_size: DeviceSize,
pub update_scratch_size: DeviceSize,
pub build_scratch_size: DeviceSize,
pub _ne: crate::NonExhaustive,
}