1use crate::RawPtr;
4use crate::prelude::*;
5use crate::vk;
6use alloc::vec::Vec;
7use core::mem;
8
9impl crate::nv::ray_tracing::Device {
10 #[inline]
12 pub unsafe fn create_acceleration_structure(
13 &self,
14 create_info: &vk::AccelerationStructureCreateInfoNV<'_>,
15 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
16 ) -> VkResult<vk::AccelerationStructureNV> {
17 unsafe {
18 let mut accel_struct = mem::MaybeUninit::uninit();
19 (self.fp.create_acceleration_structure_nv)(
20 self.handle,
21 create_info,
22 allocation_callbacks.as_raw_ptr(),
23 accel_struct.as_mut_ptr(),
24 )
25 .assume_init_on_success(accel_struct)
26 }
27 }
28
29 #[inline]
31 pub unsafe fn destroy_acceleration_structure(
32 &self,
33 accel_struct: vk::AccelerationStructureNV,
34 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
35 ) {
36 unsafe {
37 (self.fp.destroy_acceleration_structure_nv)(
38 self.handle,
39 accel_struct,
40 allocation_callbacks.as_raw_ptr(),
41 );
42 }
43 }
44
45 #[inline]
47 pub unsafe fn get_acceleration_structure_memory_requirements(
48 &self,
49 info: &vk::AccelerationStructureMemoryRequirementsInfoNV<'_>,
50 ) -> vk::MemoryRequirements2KHR<'_> {
51 unsafe {
52 let mut requirements = Default::default();
53 (self.fp.get_acceleration_structure_memory_requirements_nv)(
54 self.handle,
55 info,
56 &mut requirements,
57 );
58 requirements
59 }
60 }
61
62 #[inline]
64 pub unsafe fn bind_acceleration_structure_memory(
65 &self,
66 bind_info: &[vk::BindAccelerationStructureMemoryInfoNV<'_>],
67 ) -> VkResult<()> {
68 unsafe {
69 (self.fp.bind_acceleration_structure_memory_nv)(
70 self.handle,
71 bind_info.len() as u32,
72 bind_info.as_ptr(),
73 )
74 .result()
75 }
76 }
77
78 #[inline]
80 pub unsafe fn cmd_build_acceleration_structure(
81 &self,
82 command_buffer: vk::CommandBuffer,
83 info: &vk::AccelerationStructureInfoNV<'_>,
84 instance_data: vk::Buffer,
85 instance_offset: vk::DeviceSize,
86 update: bool,
87 dst: vk::AccelerationStructureNV,
88 src: vk::AccelerationStructureNV,
89 scratch: vk::Buffer,
90 scratch_offset: vk::DeviceSize,
91 ) {
92 unsafe {
93 (self.fp.cmd_build_acceleration_structure_nv)(
94 command_buffer,
95 info,
96 instance_data,
97 instance_offset,
98 if update { vk::TRUE } else { vk::FALSE },
99 dst,
100 src,
101 scratch,
102 scratch_offset,
103 );
104 }
105 }
106
107 #[inline]
109 pub unsafe fn cmd_copy_acceleration_structure(
110 &self,
111 command_buffer: vk::CommandBuffer,
112 dst: vk::AccelerationStructureNV,
113 src: vk::AccelerationStructureNV,
114 mode: vk::CopyAccelerationStructureModeNV,
115 ) {
116 unsafe {
117 (self.fp.cmd_copy_acceleration_structure_nv)(command_buffer, dst, src, mode);
118 }
119 }
120
121 #[inline]
123 pub unsafe fn cmd_trace_rays(
124 &self,
125 command_buffer: vk::CommandBuffer,
126 raygen_shader_binding_table_buffer: vk::Buffer,
127 raygen_shader_binding_offset: vk::DeviceSize,
128 miss_shader_binding_table_buffer: vk::Buffer,
129 miss_shader_binding_offset: vk::DeviceSize,
130 miss_shader_binding_stride: vk::DeviceSize,
131 hit_shader_binding_table_buffer: vk::Buffer,
132 hit_shader_binding_offset: vk::DeviceSize,
133 hit_shader_binding_stride: vk::DeviceSize,
134 callable_shader_binding_table_buffer: vk::Buffer,
135 callable_shader_binding_offset: vk::DeviceSize,
136 callable_shader_binding_stride: vk::DeviceSize,
137 width: u32,
138 height: u32,
139 depth: u32,
140 ) {
141 unsafe {
142 (self.fp.cmd_trace_rays_nv)(
143 command_buffer,
144 raygen_shader_binding_table_buffer,
145 raygen_shader_binding_offset,
146 miss_shader_binding_table_buffer,
147 miss_shader_binding_offset,
148 miss_shader_binding_stride,
149 hit_shader_binding_table_buffer,
150 hit_shader_binding_offset,
151 hit_shader_binding_stride,
152 callable_shader_binding_table_buffer,
153 callable_shader_binding_offset,
154 callable_shader_binding_stride,
155 width,
156 height,
157 depth,
158 );
159 }
160 }
161
162 #[inline]
168 pub unsafe fn create_ray_tracing_pipelines(
169 &self,
170 pipeline_cache: vk::PipelineCache,
171 create_infos: &[vk::RayTracingPipelineCreateInfoNV<'_>],
172 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
173 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
174 unsafe {
175 let mut pipelines = Vec::with_capacity(create_infos.len());
176 let err_code = (self.fp.create_ray_tracing_pipelines_nv)(
177 self.handle,
178 pipeline_cache,
179 create_infos.len() as u32,
180 create_infos.as_ptr(),
181 allocation_callbacks.as_raw_ptr(),
182 pipelines.as_mut_ptr(),
183 );
184 pipelines.set_len(create_infos.len());
185 match err_code {
186 vk::Result::SUCCESS => Ok(pipelines),
187 _ => Err((pipelines, err_code)),
188 }
189 }
190 }
191
192 #[inline]
194 pub unsafe fn get_ray_tracing_shader_group_handles(
195 &self,
196 pipeline: vk::Pipeline,
197 first_group: u32,
198 group_count: u32,
199 data: &mut [u8],
200 ) -> VkResult<()> {
201 unsafe {
202 (self.fp.get_ray_tracing_shader_group_handles_nv)(
203 self.handle,
204 pipeline,
205 first_group,
206 group_count,
207 data.len(),
208 data.as_mut_ptr().cast(),
209 )
210 .result()
211 }
212 }
213
214 #[inline]
216 pub unsafe fn get_acceleration_structure_handle(
217 &self,
218 accel_struct: vk::AccelerationStructureNV,
219 ) -> VkResult<u64> {
220 unsafe {
221 let mut handle = mem::MaybeUninit::<u64>::uninit();
222 (self.fp.get_acceleration_structure_handle_nv)(
223 self.handle,
224 accel_struct,
225 size_of_val(&handle),
226 handle.as_mut_ptr().cast(),
227 )
228 .assume_init_on_success(handle)
229 }
230 }
231
232 #[inline]
234 pub unsafe fn cmd_write_acceleration_structures_properties(
235 &self,
236 command_buffer: vk::CommandBuffer,
237 structures: &[vk::AccelerationStructureNV],
238 query_type: vk::QueryType,
239 query_pool: vk::QueryPool,
240 first_query: u32,
241 ) {
242 unsafe {
243 (self.fp.cmd_write_acceleration_structures_properties_nv)(
244 command_buffer,
245 structures.len() as u32,
246 structures.as_ptr(),
247 query_type,
248 query_pool,
249 first_query,
250 );
251 }
252 }
253
254 #[inline]
256 pub unsafe fn compile_deferred(&self, pipeline: vk::Pipeline, shader: u32) -> VkResult<()> {
257 unsafe { (self.fp.compile_deferred_nv)(self.handle, pipeline, shader).result() }
258 }
259}