1use ash::{khr, vk};
2use std::{mem, num::NonZeroU32, path::PathBuf, ptr, sync::Mutex};
3
4mod command;
5mod descriptor;
6mod init;
7mod pipeline;
8mod resource;
9mod surface;
10
11const QUERY_POOL_SIZE: usize = crate::limits::PASS_COUNT + 1;
12
13#[derive(Debug)]
14pub enum PlatformError {
15 Loading(ash::LoadingError),
16 Init(vk::Result),
17}
18
19struct Instance {
20 core: ash::Instance,
21 _debug_utils: ash::ext::debug_utils::Instance,
22 get_physical_device_properties2: khr::get_physical_device_properties2::Instance,
23 get_surface_capabilities2: khr::get_surface_capabilities2::Instance,
24 surface: Option<khr::surface::Instance>,
25}
26
27#[derive(Clone)]
28struct RayTracingDevice {
29 acceleration_structure: khr::acceleration_structure::Device,
30}
31
32#[derive(Clone, Default)]
33struct CommandScopeDevice {}
34#[derive(Clone, Default)]
35struct TimingDevice {
36 period: f32,
37}
38
39#[derive(Clone)]
40struct Workarounds {
41 extra_sync_src_access: vk::AccessFlags,
42 extra_sync_dst_access: vk::AccessFlags,
43 extra_descriptor_pool_create_flags: vk::DescriptorPoolCreateFlags,
44}
45
46#[derive(Clone)]
47struct Device {
48 core: ash::Device,
49 device_information: crate::DeviceInformation,
50 swapchain: Option<khr::swapchain::Device>,
51 debug_utils: ash::ext::debug_utils::Device,
52 timeline_semaphore: khr::timeline_semaphore::Device,
53 dynamic_rendering: khr::dynamic_rendering::Device,
54 ray_tracing: Option<RayTracingDevice>,
55 buffer_marker: Option<ash::amd::buffer_marker::Device>,
56 shader_info: Option<ash::amd::shader_info::Device>,
57 full_screen_exclusive: Option<ash::ext::full_screen_exclusive::Device>,
58 command_scope: Option<CommandScopeDevice>,
59 timing: Option<TimingDevice>,
60 workarounds: Workarounds,
61}
62
63struct MemoryManager {
64 allocator: gpu_alloc::GpuAllocator<vk::DeviceMemory>,
65 slab: slab::Slab<gpu_alloc::MemoryBlock<vk::DeviceMemory>>,
66 valid_ash_memory_types: u32,
67}
68
69struct Queue {
70 raw: vk::Queue,
71 timeline_semaphore: vk::Semaphore,
72 present_semaphore: vk::Semaphore,
73 last_progress: u64,
74}
75
76#[derive(Clone, Copy, Debug, Default, PartialEq)]
77struct InternalFrame {
78 acquire_semaphore: vk::Semaphore,
79 image: vk::Image,
80 view: vk::ImageView,
81}
82
83#[derive(Clone, Copy, Debug, PartialEq)]
84struct Swapchain {
85 raw: vk::SwapchainKHR,
86 format: crate::TextureFormat,
87 alpha: crate::AlphaMode,
88 target_size: [u16; 2],
89}
90
91pub struct Surface {
92 device: khr::swapchain::Device,
93 raw: vk::SurfaceKHR,
94 frames: Vec<InternalFrame>,
95 next_semaphore: vk::Semaphore,
96 swapchain: Swapchain,
97 full_screen_exclusive: bool,
98}
99
100#[derive(Clone, Copy, Debug, PartialEq)]
101struct Presentation {
102 swapchain: vk::SwapchainKHR,
103 image_index: u32,
104 acquire_semaphore: vk::Semaphore,
105}
106
107#[derive(Clone, Copy, Debug, PartialEq)]
108pub struct Frame {
109 swapchain: Swapchain,
110 image_index: u32,
111 internal: InternalFrame,
112}
113
114impl Frame {
115 pub fn texture(&self) -> Texture {
116 Texture {
117 raw: self.internal.image,
118 memory_handle: !0,
119 target_size: self.swapchain.target_size,
120 format: self.swapchain.format,
121 }
122 }
123
124 pub fn texture_view(&self) -> TextureView {
125 TextureView {
126 raw: self.internal.view,
127 target_size: self.swapchain.target_size,
128 aspects: crate::TexelAspects::COLOR,
129 }
130 }
131}
132
133fn map_timeout(millis: u32) -> u64 {
134 if millis == !0 {
135 !0
136 } else {
137 millis as u64 * 1_000_000
138 }
139}
140
141pub struct Context {
142 memory: Mutex<MemoryManager>,
143 device: Device,
144 queue_family_index: u32,
145 queue: Mutex<Queue>,
146 physical_device: vk::PhysicalDevice,
147 naga_flags: naga::back::spv::WriterFlags,
148 shader_debug_path: Option<PathBuf>,
149 instance: Instance,
150 entry: ash::Entry,
151}
152
153#[derive(Clone, Copy, Debug, Hash, PartialEq)]
154pub struct Buffer {
155 raw: vk::Buffer,
156 memory_handle: usize,
157 mapped_data: *mut u8,
158}
159
160impl Default for Buffer {
161 fn default() -> Self {
162 Self {
163 raw: vk::Buffer::null(),
164 memory_handle: !0,
165 mapped_data: ptr::null_mut(),
166 }
167 }
168}
169
170impl Buffer {
171 pub fn data(&self) -> *mut u8 {
172 self.mapped_data
173 }
174}
175
176unsafe impl Send for Buffer {}
177unsafe impl Sync for Buffer {}
178
179#[derive(Clone, Copy, Debug, Hash, PartialEq)]
180pub struct Texture {
181 raw: vk::Image,
182 memory_handle: usize,
183 target_size: [u16; 2],
184 format: crate::TextureFormat,
185}
186
187impl Default for Texture {
188 fn default() -> Self {
189 Self {
190 raw: vk::Image::default(),
191 memory_handle: !0,
192 target_size: [0; 2],
193 format: crate::TextureFormat::Rgba8Unorm,
194 }
195 }
196}
197
198#[derive(Clone, Copy, Debug, Default, Hash, PartialEq)]
199pub struct TextureView {
200 raw: vk::ImageView,
201 target_size: [u16; 2],
202 aspects: crate::TexelAspects,
203}
204
205#[derive(Clone, Copy, Debug, Hash, PartialEq)]
206pub struct Sampler {
207 raw: vk::Sampler,
208}
209
210#[derive(Clone, Copy, Debug, Default, Hash, PartialEq)]
211pub struct AccelerationStructure {
212 raw: vk::AccelerationStructureKHR,
213 buffer: vk::Buffer,
214 memory_handle: usize,
215}
216
217#[derive(Debug, Default)]
218struct DescriptorSetLayout {
219 raw: vk::DescriptorSetLayout,
220 update_template: vk::DescriptorUpdateTemplate,
221 template_size: u32,
222 template_offsets: Box<[u32]>,
223}
224
225impl DescriptorSetLayout {
226 fn is_empty(&self) -> bool {
227 self.template_size == 0
228 }
229}
230
231#[derive(Debug)]
232struct PipelineLayout {
233 raw: vk::PipelineLayout,
234 descriptor_set_layouts: Vec<DescriptorSetLayout>,
235}
236
237pub struct PipelineContext<'a> {
238 update_data: &'a mut [u8],
239 template_offsets: &'a [u32],
240}
241
242#[derive(Debug)]
243pub struct ComputePipeline {
244 raw: vk::Pipeline,
245 layout: PipelineLayout,
246 wg_size: [u32; 3],
247}
248
249impl ComputePipeline {
250 pub fn get_workgroup_size(&self) -> [u32; 3] {
251 self.wg_size
252 }
253}
254
255pub struct RenderPipeline {
256 raw: vk::Pipeline,
257 layout: PipelineLayout,
258}
259
260#[derive(Debug)]
261struct CommandBuffer {
262 raw: vk::CommandBuffer,
263 descriptor_pool: descriptor::DescriptorPool,
264 query_pool: vk::QueryPool,
265 timed_pass_names: Vec<String>,
266}
267
268struct CrashHandler {
269 name: String,
270 marker_buf: Buffer,
271 raw_string: Box<[u8]>,
272 next_offset: usize,
273}
274
275pub struct CommandEncoder {
276 pool: vk::CommandPool,
277 buffers: Box<[CommandBuffer]>,
278 device: Device,
279 update_data: Vec<u8>,
280 present: Option<Presentation>,
281 crash_handler: Option<CrashHandler>,
282 temp_label: Vec<u8>,
283 timings: crate::Timings,
284}
285pub struct TransferCommandEncoder<'a> {
286 raw: vk::CommandBuffer,
287 device: &'a Device,
288}
289pub struct AccelerationStructureCommandEncoder<'a> {
290 raw: vk::CommandBuffer,
291 device: &'a Device,
292}
293pub struct ComputeCommandEncoder<'a> {
294 cmd_buf: &'a mut CommandBuffer,
295 device: &'a Device,
296 update_data: &'a mut Vec<u8>,
297}
298pub struct RenderCommandEncoder<'a> {
299 cmd_buf: &'a mut CommandBuffer,
300 device: &'a Device,
301 update_data: &'a mut Vec<u8>,
302}
303pub struct PipelineEncoder<'a, 'p> {
304 cmd_buf: &'a mut CommandBuffer,
305 layout: &'p PipelineLayout,
306 bind_point: vk::PipelineBindPoint,
307 device: &'a Device,
308 update_data: &'a mut Vec<u8>,
309}
310
311#[derive(Clone, Debug)]
312pub struct SyncPoint {
313 progress: u64,
314}
315
316#[hidden_trait::expose]
317impl crate::traits::CommandDevice for Context {
318 type CommandEncoder = CommandEncoder;
319 type SyncPoint = SyncPoint;
320
321 fn create_command_encoder(&self, desc: super::CommandEncoderDesc) -> CommandEncoder {
322 const ROUGH_SET_COUNT: u32 = 60000;
325 let mut descriptor_sizes = vec![
326 vk::DescriptorPoolSize {
327 ty: vk::DescriptorType::INLINE_UNIFORM_BLOCK_EXT,
328 descriptor_count: ROUGH_SET_COUNT * crate::limits::PLAIN_DATA_SIZE,
329 },
330 vk::DescriptorPoolSize {
331 ty: vk::DescriptorType::STORAGE_BUFFER,
332 descriptor_count: ROUGH_SET_COUNT,
333 },
334 vk::DescriptorPoolSize {
335 ty: vk::DescriptorType::SAMPLED_IMAGE,
336 descriptor_count: 2 * ROUGH_SET_COUNT,
337 },
338 vk::DescriptorPoolSize {
339 ty: vk::DescriptorType::SAMPLER,
340 descriptor_count: ROUGH_SET_COUNT,
341 },
342 vk::DescriptorPoolSize {
343 ty: vk::DescriptorType::STORAGE_IMAGE,
344 descriptor_count: ROUGH_SET_COUNT,
345 },
346 ];
347 if self.device.ray_tracing.is_some() {
348 descriptor_sizes.push(vk::DescriptorPoolSize {
349 ty: vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
350 descriptor_count: ROUGH_SET_COUNT,
351 });
352 }
353
354 let pool_info = vk::CommandPoolCreateInfo {
355 flags: vk::CommandPoolCreateFlags::RESET_COMMAND_BUFFER,
356 ..Default::default()
357 };
358 let pool = unsafe {
359 self.device
360 .core
361 .create_command_pool(&pool_info, None)
362 .unwrap()
363 };
364 let cmd_buf_info = vk::CommandBufferAllocateInfo {
365 command_pool: pool,
366 command_buffer_count: desc.buffer_count,
367 ..Default::default()
368 };
369 let cmd_buffers = unsafe {
370 self.device
371 .core
372 .allocate_command_buffers(&cmd_buf_info)
373 .unwrap()
374 };
375
376 let buffers = cmd_buffers
377 .into_iter()
378 .map(|raw| {
379 if !desc.name.is_empty() {
380 self.set_object_name(raw, desc.name);
381 };
382 let descriptor_pool = self.device.create_descriptor_pool();
383 let query_pool = if self.device.timing.is_some() {
384 let query_pool_info = vk::QueryPoolCreateInfo::default()
385 .query_type(vk::QueryType::TIMESTAMP)
386 .query_count(QUERY_POOL_SIZE as u32);
387 unsafe {
388 self.device
389 .core
390 .create_query_pool(&query_pool_info, None)
391 .unwrap()
392 }
393 } else {
394 vk::QueryPool::null()
395 };
396 CommandBuffer {
397 raw,
398 descriptor_pool,
399 query_pool,
400 timed_pass_names: Vec::new(),
401 }
402 })
403 .collect();
404
405 let crash_handler = if self.device.buffer_marker.is_some() {
406 Some(CrashHandler {
407 name: desc.name.to_string(),
408 marker_buf: self.create_buffer(crate::BufferDesc {
409 name: "_marker",
410 size: 4,
411 memory: crate::Memory::Shared,
412 }),
413 raw_string: vec![0; 0x1000].into_boxed_slice(),
414 next_offset: 0,
415 })
416 } else {
417 None
418 };
419
420 CommandEncoder {
421 pool,
422 buffers,
423 device: self.device.clone(),
424 update_data: Vec::new(),
425 present: None,
426 crash_handler,
427 temp_label: Vec::new(),
428 timings: Default::default(),
429 }
430 }
431
432 fn destroy_command_encoder(&self, command_encoder: &mut CommandEncoder) {
433 for cmd_buf in command_encoder.buffers.iter_mut() {
434 let raw_cmd_buffers = [cmd_buf.raw];
435 unsafe {
436 self.device
437 .core
438 .free_command_buffers(command_encoder.pool, &raw_cmd_buffers);
439 }
440 self.device
441 .destroy_descriptor_pool(&mut cmd_buf.descriptor_pool);
442 if self.device.timing.is_some() {
443 unsafe {
444 self.device
445 .core
446 .destroy_query_pool(cmd_buf.query_pool, None);
447 }
448 }
449 }
450 unsafe {
451 self.device
452 .core
453 .destroy_command_pool(mem::take(&mut command_encoder.pool), None)
454 };
455 if let Some(crash_handler) = command_encoder.crash_handler.take() {
456 self.destroy_buffer(crash_handler.marker_buf);
457 };
458 }
459
460 fn submit(&self, encoder: &mut CommandEncoder) -> SyncPoint {
461 let raw_cmd_buf = encoder.finish();
462 let mut queue = self.queue.lock().unwrap();
463 queue.last_progress += 1;
464 let progress = queue.last_progress;
465 let command_buffers = [raw_cmd_buf];
466 let wait_values_all = [0];
467 let mut wait_semaphores_all = [vk::Semaphore::null()];
468 let wait_stages = [vk::PipelineStageFlags::ALL_COMMANDS];
469 let signal_semaphores_all = [queue.timeline_semaphore, queue.present_semaphore];
470 let signal_values_all = [progress, 0];
471 let (num_wait_semaphores, num_signal_sepahores) = match encoder.present {
472 Some(ref presentation) => {
473 wait_semaphores_all[0] = presentation.acquire_semaphore;
474 (1, 2)
475 }
476 None => (0, 1),
477 };
478 let mut timeline_info = vk::TimelineSemaphoreSubmitInfo::default()
479 .wait_semaphore_values(&wait_values_all[..num_wait_semaphores])
480 .signal_semaphore_values(&signal_values_all[..num_signal_sepahores]);
481 let vk_info = vk::SubmitInfo::default()
482 .command_buffers(&command_buffers)
483 .wait_semaphores(&wait_semaphores_all[..num_wait_semaphores])
484 .wait_dst_stage_mask(&wait_stages[..num_wait_semaphores])
485 .signal_semaphores(&signal_semaphores_all[..num_signal_sepahores])
486 .push_next(&mut timeline_info);
487 let ret = unsafe {
488 self.device
489 .core
490 .queue_submit(queue.raw, &[vk_info], vk::Fence::null())
491 };
492 encoder.check_gpu_crash(ret);
493
494 if let Some(presentation) = encoder.present.take() {
495 let khr_swapchain = self.device.swapchain.as_ref().unwrap();
496 let swapchains = [presentation.swapchain];
497 let image_indices = [presentation.image_index];
498 let wait_semaphores = [queue.present_semaphore];
499 let present_info = vk::PresentInfoKHR::default()
500 .swapchains(&swapchains)
501 .image_indices(&image_indices)
502 .wait_semaphores(&wait_semaphores);
503 let ret = unsafe { khr_swapchain.queue_present(queue.raw, &present_info) };
504 let _ = encoder.check_gpu_crash(ret);
505 }
506
507 SyncPoint { progress }
508 }
509
510 fn wait_for(&self, sp: &SyncPoint, timeout_ms: u32) -> bool {
511 let timeline_semaphore = self.queue.lock().unwrap().timeline_semaphore;
514 let semaphores = [timeline_semaphore];
515 let semaphore_values = [sp.progress];
516 let wait_info = vk::SemaphoreWaitInfoKHR::default()
517 .semaphores(&semaphores)
518 .values(&semaphore_values);
519 let timeout_ns = map_timeout(timeout_ms);
520 unsafe {
521 self.device
522 .timeline_semaphore
523 .wait_semaphores(&wait_info, timeout_ns)
524 .is_ok()
525 }
526 }
527}
528
529fn map_texture_format(format: crate::TextureFormat) -> vk::Format {
530 use crate::TextureFormat as Tf;
531 match format {
532 Tf::R8Unorm => vk::Format::R8_UNORM,
533 Tf::Rg8Unorm => vk::Format::R8G8_UNORM,
534 Tf::Rg8Snorm => vk::Format::R8G8_SNORM,
535 Tf::Rgba8Unorm => vk::Format::R8G8B8A8_UNORM,
536 Tf::Rgba8UnormSrgb => vk::Format::R8G8B8A8_SRGB,
537 Tf::Bgra8Unorm => vk::Format::B8G8R8A8_UNORM,
538 Tf::Bgra8UnormSrgb => vk::Format::B8G8R8A8_SRGB,
539 Tf::Rgba8Snorm => vk::Format::R8G8B8A8_SNORM,
540 Tf::R16Float => vk::Format::R16_SFLOAT,
541 Tf::Rg16Float => vk::Format::R16G16_SFLOAT,
542 Tf::Rgba16Float => vk::Format::R16G16B16A16_SFLOAT,
543 Tf::R32Float => vk::Format::R32_SFLOAT,
544 Tf::Rg32Float => vk::Format::R32G32_SFLOAT,
545 Tf::Rgba32Float => vk::Format::R32G32B32A32_SFLOAT,
546 Tf::R32Uint => vk::Format::R32_UINT,
547 Tf::Rg32Uint => vk::Format::R32G32_UINT,
548 Tf::Rgba32Uint => vk::Format::R32G32B32A32_UINT,
549 Tf::Depth32Float => vk::Format::D32_SFLOAT,
550 Tf::Bc1Unorm => vk::Format::BC1_RGBA_SRGB_BLOCK,
551 Tf::Bc1UnormSrgb => vk::Format::BC1_RGBA_UNORM_BLOCK,
552 Tf::Bc2Unorm => vk::Format::BC2_UNORM_BLOCK,
553 Tf::Bc2UnormSrgb => vk::Format::BC2_SRGB_BLOCK,
554 Tf::Bc3Unorm => vk::Format::BC3_UNORM_BLOCK,
555 Tf::Bc3UnormSrgb => vk::Format::BC3_SRGB_BLOCK,
556 Tf::Bc4Unorm => vk::Format::BC4_UNORM_BLOCK,
557 Tf::Bc4Snorm => vk::Format::BC4_SNORM_BLOCK,
558 Tf::Bc5Unorm => vk::Format::BC5_UNORM_BLOCK,
559 Tf::Bc5Snorm => vk::Format::BC5_SNORM_BLOCK,
560 Tf::Bc6hUfloat => vk::Format::BC6H_UFLOAT_BLOCK,
561 Tf::Bc6hFloat => vk::Format::BC6H_SFLOAT_BLOCK,
562 Tf::Bc7Unorm => vk::Format::BC7_UNORM_BLOCK,
563 Tf::Bc7UnormSrgb => vk::Format::BC7_SRGB_BLOCK,
564 Tf::Rgb10a2Unorm => vk::Format::A2B10G10R10_UNORM_PACK32,
565 Tf::Rg11b10Ufloat => vk::Format::B10G11R11_UFLOAT_PACK32,
566 Tf::Rgb9e5Ufloat => vk::Format::E5B9G9R9_UFLOAT_PACK32,
567 }
568}
569
570fn map_aspects(aspects: crate::TexelAspects) -> vk::ImageAspectFlags {
571 let mut flags = vk::ImageAspectFlags::empty();
572 if aspects.contains(crate::TexelAspects::COLOR) {
573 flags |= vk::ImageAspectFlags::COLOR;
574 }
575 if aspects.contains(crate::TexelAspects::DEPTH) {
576 flags |= vk::ImageAspectFlags::DEPTH;
577 }
578 if aspects.contains(crate::TexelAspects::STENCIL) {
579 flags |= vk::ImageAspectFlags::STENCIL;
580 }
581 flags
582}
583
584fn map_extent_3d(extent: &crate::Extent) -> vk::Extent3D {
585 vk::Extent3D {
586 width: extent.width,
587 height: extent.height,
588 depth: extent.depth,
589 }
590}
591
592fn map_subresource_range(
593 subresources: &crate::TextureSubresources,
594 aspects: crate::TexelAspects,
595) -> vk::ImageSubresourceRange {
596 vk::ImageSubresourceRange {
597 aspect_mask: map_aspects(aspects),
598 base_mip_level: subresources.base_mip_level,
599 level_count: subresources
600 .mip_level_count
601 .map_or(vk::REMAINING_MIP_LEVELS, NonZeroU32::get),
602 base_array_layer: subresources.base_array_layer,
603 layer_count: subresources
604 .array_layer_count
605 .map_or(vk::REMAINING_ARRAY_LAYERS, NonZeroU32::get),
606 }
607}
608
609fn map_comparison(fun: crate::CompareFunction) -> vk::CompareOp {
610 use crate::CompareFunction as Cf;
611 match fun {
612 Cf::Never => vk::CompareOp::NEVER,
613 Cf::Less => vk::CompareOp::LESS,
614 Cf::LessEqual => vk::CompareOp::LESS_OR_EQUAL,
615 Cf::Equal => vk::CompareOp::EQUAL,
616 Cf::GreaterEqual => vk::CompareOp::GREATER_OR_EQUAL,
617 Cf::Greater => vk::CompareOp::GREATER,
618 Cf::NotEqual => vk::CompareOp::NOT_EQUAL,
619 Cf::Always => vk::CompareOp::ALWAYS,
620 }
621}
622
623fn map_index_type(index_type: crate::IndexType) -> vk::IndexType {
624 match index_type {
625 crate::IndexType::U16 => vk::IndexType::UINT16,
626 crate::IndexType::U32 => vk::IndexType::UINT32,
627 }
628}
629
630fn map_vertex_format(vertex_format: crate::VertexFormat) -> vk::Format {
631 use crate::VertexFormat as Vf;
632 match vertex_format {
633 Vf::F32 => vk::Format::R32_SFLOAT,
634 Vf::F32Vec2 => vk::Format::R32G32_SFLOAT,
635 Vf::F32Vec3 => vk::Format::R32G32B32_SFLOAT,
636 Vf::F32Vec4 => vk::Format::R32G32B32A32_SFLOAT,
637 Vf::U32 => vk::Format::R32_UINT,
638 Vf::U32Vec2 => vk::Format::R32G32_UINT,
639 Vf::U32Vec3 => vk::Format::R32G32B32_UINT,
640 Vf::U32Vec4 => vk::Format::R32G32B32A32_UINT,
641 Vf::I32 => vk::Format::R32_SINT,
642 Vf::I32Vec2 => vk::Format::R32G32_SINT,
643 Vf::I32Vec3 => vk::Format::R32G32B32_SINT,
644 Vf::I32Vec4 => vk::Format::R32G32B32A32_SINT,
645 }
646}
647
648struct BottomLevelAccelerationStructureInput<'a> {
649 max_primitive_counts: Box<[u32]>,
650 build_range_infos: Box<[vk::AccelerationStructureBuildRangeInfoKHR]>,
651 _geometries: Box<[vk::AccelerationStructureGeometryKHR<'a>]>,
652 build_info: vk::AccelerationStructureBuildGeometryInfoKHR<'a>,
653}
654
655impl Device {
656 fn get_device_address(&self, piece: &crate::BufferPiece) -> u64 {
657 let vk_info = vk::BufferDeviceAddressInfo {
658 buffer: piece.buffer.raw,
659 ..Default::default()
660 };
661 let base = unsafe { self.core.get_buffer_device_address(&vk_info) };
662 base + piece.offset
663 }
664
665 fn map_acceleration_structure_meshes(
666 &self,
667 meshes: &[crate::AccelerationStructureMesh],
668 ) -> BottomLevelAccelerationStructureInput {
669 let mut total_primitive_count = 0;
670 let mut max_primitive_counts = Vec::with_capacity(meshes.len());
671 let mut build_range_infos = Vec::with_capacity(meshes.len());
672 let mut geometries = Vec::with_capacity(meshes.len());
673 for mesh in meshes {
674 total_primitive_count += mesh.triangle_count;
675 max_primitive_counts.push(mesh.triangle_count);
676 build_range_infos.push(vk::AccelerationStructureBuildRangeInfoKHR {
677 primitive_count: mesh.triangle_count,
678 primitive_offset: 0,
679 first_vertex: 0,
680 transform_offset: 0,
681 });
682
683 let mut triangles = vk::AccelerationStructureGeometryTrianglesDataKHR {
684 vertex_format: map_vertex_format(mesh.vertex_format),
685 vertex_data: {
686 let device_address = self.get_device_address(&mesh.vertex_data);
687 assert!(
688 device_address & 0x3 == 0,
689 "Vertex data address {device_address} is not aligned"
690 );
691 vk::DeviceOrHostAddressConstKHR { device_address }
692 },
693 vertex_stride: mesh.vertex_stride as u64,
694 max_vertex: mesh.vertex_count.saturating_sub(1),
695 ..Default::default()
696 };
697 if let Some(index_type) = mesh.index_type {
698 let device_address = self.get_device_address(&mesh.index_data);
699 assert!(
700 device_address & 0x3 == 0,
701 "Index data address {device_address} is not aligned"
702 );
703 triangles.index_type = map_index_type(index_type);
704 triangles.index_data = vk::DeviceOrHostAddressConstKHR { device_address };
705 }
706 if mesh.transform_data.buffer.raw != vk::Buffer::null() {
707 let device_address = self.get_device_address(&mesh.transform_data);
708 assert!(
709 device_address & 0xF == 0,
710 "Transform data address {device_address} is not aligned"
711 );
712 triangles.transform_data = vk::DeviceOrHostAddressConstKHR { device_address };
713 }
714
715 let geometry = vk::AccelerationStructureGeometryKHR {
716 geometry_type: vk::GeometryTypeKHR::TRIANGLES,
717 geometry: vk::AccelerationStructureGeometryDataKHR { triangles },
718 flags: if mesh.is_opaque {
719 vk::GeometryFlagsKHR::OPAQUE
720 } else {
721 vk::GeometryFlagsKHR::empty()
722 },
723 ..Default::default()
724 };
725 geometries.push(geometry);
726 }
727 let build_info = vk::AccelerationStructureBuildGeometryInfoKHR {
728 ty: vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
729 flags: vk::BuildAccelerationStructureFlagsKHR::PREFER_FAST_TRACE,
730 mode: vk::BuildAccelerationStructureModeKHR::BUILD,
731 geometry_count: geometries.len() as u32,
732 p_geometries: geometries.as_ptr(),
733 ..Default::default()
734 };
735
736 log::debug!(
737 "BLAS total {} primitives in {} geometries",
738 total_primitive_count,
739 geometries.len()
740 );
741 BottomLevelAccelerationStructureInput {
742 max_primitive_counts: max_primitive_counts.into_boxed_slice(),
743 build_range_infos: build_range_infos.into_boxed_slice(),
744 _geometries: geometries.into_boxed_slice(),
745 build_info,
746 }
747 }
748}