1use ash::vk;
2use std::{str, time::Duration};
3
4impl super::CrashHandler {
5 fn add_marker(&mut self, marker: &str) -> u32 {
6 if self.next_offset < self.raw_string.len() {
7 self.raw_string[self.next_offset] = b'|';
8 self.next_offset += 1;
9 }
10 let len = marker.as_bytes().len().min(self.raw_string.len());
11 if self.next_offset + len > self.raw_string.len() {
12 self.next_offset = 0;
13 }
14 let start = self.next_offset;
15 self.next_offset += len;
16 let end = self.next_offset;
17 self.raw_string[start..end].copy_from_slice(&marker.as_bytes()[..len]);
18 start as u32 | (end << 16) as u32
19 }
20
21 pub(super) fn extract(&self, id: u32) -> (&str, &str) {
22 let start = id as usize & 0xFFFF;
23 let end = (id >> 16) as usize;
24 let history = str::from_utf8(&self.raw_string[..start]).unwrap_or_default();
25 let marker = str::from_utf8(&self.raw_string[start..end]).unwrap();
26 (history, marker)
27 }
28}
29
30impl super::PipelineContext<'_> {
31 #[inline]
32 fn write<T>(&mut self, index: u32, value: T) {
33 let offset = self.template_offsets[index as usize];
34 unsafe {
35 std::ptr::write(
36 self.update_data.as_mut_ptr().offset(offset as isize) as *mut T,
37 value,
38 )
39 };
40 }
41
42 #[inline]
43 fn write_array<I: Iterator>(&mut self, index: u32, iter: I) {
44 let base_offset = self.template_offsets[index as usize];
45 let base_ptr =
46 unsafe { self.update_data.as_mut_ptr().offset(base_offset as isize) as *mut I::Item };
47 for (i, value) in iter.enumerate() {
48 unsafe { std::ptr::write(base_ptr.add(i), value) };
49 }
50 }
51}
52
53impl<T: bytemuck::Pod> crate::ShaderBindable for T {
54 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
55 ctx.write(index, *self);
56 }
57}
58impl crate::ShaderBindable for super::TextureView {
59 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
60 ctx.write(
61 index,
62 vk::DescriptorImageInfo {
63 sampler: vk::Sampler::null(),
64 image_view: self.raw,
65 image_layout: vk::ImageLayout::GENERAL,
66 },
67 );
68 }
69}
70impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::TextureArray<N> {
71 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
72 ctx.write_array(
73 index,
74 self.data.iter().map(|view| vk::DescriptorImageInfo {
75 sampler: vk::Sampler::null(),
76 image_view: view.raw,
77 image_layout: vk::ImageLayout::GENERAL,
78 }),
79 );
80 }
81}
82impl crate::ShaderBindable for super::Sampler {
83 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
84 ctx.write(
85 index,
86 vk::DescriptorImageInfo {
87 sampler: self.raw,
88 image_view: vk::ImageView::null(),
89 image_layout: vk::ImageLayout::UNDEFINED,
90 },
91 );
92 }
93}
94impl crate::ShaderBindable for crate::BufferPiece {
95 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
96 ctx.write(
97 index,
98 vk::DescriptorBufferInfo {
99 buffer: self.buffer.raw,
100 offset: self.offset,
101 range: vk::WHOLE_SIZE,
102 },
103 );
104 }
105}
106impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::BufferArray<N> {
107 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
108 ctx.write_array(
109 index,
110 self.data.iter().map(|piece| vk::DescriptorBufferInfo {
111 buffer: piece.buffer.raw,
112 offset: piece.offset,
113 range: vk::WHOLE_SIZE,
114 }),
115 );
116 }
117}
118impl crate::ShaderBindable for super::AccelerationStructure {
119 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
120 ctx.write(index, self.raw);
121 }
122}
123
124impl crate::TexturePiece {
125 fn subresource_layers(&self) -> vk::ImageSubresourceLayers {
126 vk::ImageSubresourceLayers {
127 aspect_mask: super::map_aspects(self.texture.format.aspects()),
128 mip_level: self.mip_level,
129 base_array_layer: self.array_layer,
130 layer_count: 1,
131 }
132 }
133}
134
135fn map_origin(origin: &[u32; 3]) -> vk::Offset3D {
136 vk::Offset3D {
137 x: origin[0] as i32,
138 y: origin[1] as i32,
139 z: origin[2] as i32,
140 }
141}
142
143fn make_buffer_image_copy(
144 buffer: &crate::BufferPiece,
145 bytes_per_row: u32,
146 texture: &crate::TexturePiece,
147 size: &crate::Extent,
148) -> vk::BufferImageCopy {
149 let block_info = texture.texture.format.block_info();
150 vk::BufferImageCopy {
151 buffer_offset: buffer.offset,
152 buffer_row_length: block_info.dimensions.0 as u32
153 * (bytes_per_row / block_info.size as u32),
154 buffer_image_height: 0,
155 image_subresource: texture.subresource_layers(),
156 image_offset: map_origin(&texture.origin),
157 image_extent: super::map_extent_3d(size),
158 }
159}
160
161fn map_render_target(rt: &crate::RenderTarget) -> vk::RenderingAttachmentInfo<'static> {
162 let mut vk_info = vk::RenderingAttachmentInfo::default()
163 .image_view(rt.view.raw)
164 .image_layout(vk::ImageLayout::GENERAL);
165
166 match rt.init_op {
167 crate::InitOp::Load => vk_info = vk_info.load_op(vk::AttachmentLoadOp::LOAD),
168 crate::InitOp::DontCare => vk_info = vk_info.load_op(vk::AttachmentLoadOp::DONT_CARE),
169
170 crate::InitOp::Clear(color) => {
171 let cv = if rt.view.aspects.contains(crate::TexelAspects::COLOR) {
172 vk::ClearValue {
173 color: match color {
174 crate::TextureColor::TransparentBlack => {
175 vk::ClearColorValue { float32: [0.0; 4] }
176 }
177 crate::TextureColor::OpaqueBlack => vk::ClearColorValue {
178 float32: [0.0, 0.0, 0.0, 1.0],
179 },
180 crate::TextureColor::White => vk::ClearColorValue { float32: [1.0; 4] },
181 },
182 }
183 } else {
184 vk::ClearValue {
185 depth_stencil: vk::ClearDepthStencilValue {
186 depth: color.depth_clear_value(),
187 stencil: color.stencil_clear_value(),
188 },
189 }
190 };
191
192 vk_info = vk_info.load_op(vk::AttachmentLoadOp::CLEAR).clear_value(cv);
193 }
194 }
195
196 if let crate::FinishOp::ResolveTo(resolve_view) = rt.finish_op {
197 vk_info = vk_info
198 .resolve_image_view(resolve_view.raw)
199 .resolve_image_layout(vk::ImageLayout::GENERAL)
200 .resolve_mode(vk::ResolveModeFlags::AVERAGE);
201 }
202
203 vk_info.store_op = match rt.finish_op {
204 crate::FinishOp::Store => vk::AttachmentStoreOp::STORE,
205 crate::FinishOp::Discard => vk::AttachmentStoreOp::DONT_CARE,
206 crate::FinishOp::Ignore => vk::AttachmentStoreOp::DONT_CARE,
207 crate::FinishOp::ResolveTo(..) => {
208 vk::AttachmentStoreOp::DONT_CARE
216 }
217 };
218
219 vk_info
220}
221
222fn end_pass(device: &super::Device, cmd_buf: vk::CommandBuffer) {
223 if device.command_scope.is_some() {
224 unsafe {
225 device.debug_utils.cmd_end_debug_utils_label(cmd_buf);
226 }
227 }
228}
229
230impl super::CommandEncoder {
231 fn add_marker(&mut self, marker: &str) {
232 if let Some(ref mut ch) = self.crash_handler {
233 let id = ch.add_marker(marker);
234 unsafe {
235 self.device
236 .buffer_marker
237 .as_ref()
238 .unwrap()
239 .cmd_write_buffer_marker(
240 self.buffers[0].raw,
241 vk::PipelineStageFlags::ALL_COMMANDS,
242 ch.marker_buf.raw,
243 0,
244 id,
245 );
246 }
247 }
248 }
249
250 fn add_timestamp(&mut self, label: &str) {
251 if let Some(_) = self.device.timing {
252 let cmd_buf = self.buffers.first_mut().unwrap();
253 if cmd_buf.timed_pass_names.len() == crate::limits::PASS_COUNT {
254 log::warn!("Reached the maximum for `limits::PASS_COUNT`, skipping the timer");
255 return;
256 }
257 let index = cmd_buf.timed_pass_names.len() as u32;
258 unsafe {
259 self.device.core.cmd_write_timestamp(
260 cmd_buf.raw,
261 vk::PipelineStageFlags::TOP_OF_PIPE,
262 cmd_buf.query_pool,
263 index,
264 );
265 }
266 cmd_buf.timed_pass_names.push(label.to_string());
267 }
268 }
269
270 fn begin_pass(&mut self, label: &str) {
271 self.barrier();
272 self.add_marker(label);
273 self.add_timestamp(label);
274
275 if let Some(_) = self.device.command_scope {
276 self.temp_label.clear();
277 self.temp_label.extend_from_slice(label.as_bytes());
278 self.temp_label.push(0);
279 unsafe {
280 self.device.debug_utils.cmd_begin_debug_utils_label(
281 self.buffers[0].raw,
282 &vk::DebugUtilsLabelEXT {
283 p_label_name: self.temp_label.as_ptr() as *const _,
284 ..Default::default()
285 },
286 )
287 }
288 }
289 }
290
291 pub(super) fn finish(&mut self) -> vk::CommandBuffer {
292 self.barrier();
293 self.add_marker("finish");
294 let cmd_buf = self.buffers.first_mut().unwrap();
295 unsafe {
296 if self.device.timing.is_some() {
297 let index = cmd_buf.timed_pass_names.len() as u32;
298 self.device.core.cmd_write_timestamp(
299 cmd_buf.raw,
300 vk::PipelineStageFlags::TOP_OF_PIPE,
301 cmd_buf.query_pool,
302 index,
303 );
304 }
305 self.device.core.end_command_buffer(cmd_buf.raw).unwrap();
306 }
307 cmd_buf.raw
308 }
309
310 fn barrier(&mut self) {
311 let wa = &self.device.workarounds;
312 let barrier = vk::MemoryBarrier {
313 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
314 dst_access_mask: vk::AccessFlags::MEMORY_READ
315 | vk::AccessFlags::MEMORY_WRITE
316 | wa.extra_sync_dst_access,
317 ..Default::default()
318 };
319 unsafe {
320 self.device.core.cmd_pipeline_barrier(
321 self.buffers[0].raw,
322 vk::PipelineStageFlags::ALL_COMMANDS,
323 vk::PipelineStageFlags::ALL_COMMANDS,
324 vk::DependencyFlags::empty(),
325 &[barrier],
326 &[],
327 &[],
328 );
329 }
330 }
331
332 pub fn transfer(&mut self, label: &str) -> super::TransferCommandEncoder {
333 self.begin_pass(label);
334 super::TransferCommandEncoder {
335 raw: self.buffers[0].raw,
336 device: &self.device,
337 }
338 }
339
340 pub fn acceleration_structure(
341 &mut self,
342 label: &str,
343 ) -> super::AccelerationStructureCommandEncoder {
344 self.begin_pass(label);
345 super::AccelerationStructureCommandEncoder {
346 raw: self.buffers[0].raw,
347 device: &self.device,
348 }
349 }
350
351 pub fn compute(&mut self, label: &str) -> super::ComputeCommandEncoder {
352 self.begin_pass(label);
353 super::ComputeCommandEncoder {
354 cmd_buf: self.buffers.first_mut().unwrap(),
355 device: &self.device,
356 update_data: &mut self.update_data,
357 }
358 }
359
360 pub fn render(
361 &mut self,
362 label: &str,
363 targets: crate::RenderTargetSet,
364 ) -> super::RenderCommandEncoder {
365 self.begin_pass(label);
366
367 let mut target_size = [0u16; 2];
368 let mut color_attachments = Vec::with_capacity(targets.colors.len());
369 let depth_stencil_attachment;
370 for rt in targets.colors {
371 target_size = rt.view.target_size;
372 color_attachments.push(map_render_target(rt));
373 }
374
375 let mut rendering_info = vk::RenderingInfoKHR::default()
376 .layer_count(1)
377 .color_attachments(&color_attachments);
378
379 if let Some(rt) = targets.depth_stencil {
380 target_size = rt.view.target_size;
381 depth_stencil_attachment = map_render_target(&rt);
382 if rt.view.aspects.contains(crate::TexelAspects::DEPTH) {
383 rendering_info = rendering_info.depth_attachment(&depth_stencil_attachment);
384 }
385 if rt.view.aspects.contains(crate::TexelAspects::STENCIL) {
386 rendering_info = rendering_info.stencil_attachment(&depth_stencil_attachment);
387 }
388 }
389
390 let render_area = crate::ScissorRect {
391 x: 0,
392 y: 0,
393 w: target_size[0] as u32,
394 h: target_size[1] as u32,
395 }
396 .to_vk();
397 let viewport = crate::Viewport {
398 x: 0.0,
399 y: 0.0,
400 w: target_size[0] as f32,
401 h: target_size[1] as f32,
402 depth: 0.0..1.0,
403 }
404 .to_vk();
405 rendering_info.render_area = render_area;
406
407 let cmd_buf = self.buffers.first_mut().unwrap();
408 unsafe {
409 self.device
410 .core
411 .cmd_set_viewport(cmd_buf.raw, 0, &[viewport]);
412 self.device
413 .core
414 .cmd_set_scissor(cmd_buf.raw, 0, &[render_area]);
415 self.device
416 .dynamic_rendering
417 .cmd_begin_rendering(cmd_buf.raw, &rendering_info);
418 };
419
420 super::RenderCommandEncoder {
421 cmd_buf,
422 device: &self.device,
423 update_data: &mut self.update_data,
424 }
425 }
426
427 pub(super) fn check_gpu_crash<T>(&self, ret: Result<T, vk::Result>) -> Option<T> {
428 match ret {
429 Ok(value) => Some(value),
430 Err(vk::Result::ERROR_DEVICE_LOST) => match self.crash_handler {
431 Some(ref ch) => {
432 let last_id = unsafe { *(ch.marker_buf.data() as *mut u32) };
433 if last_id != 0 {
434 let (history, last_marker) = ch.extract(last_id);
435 log::error!("Last GPU executed marker is '{last_marker}'");
436 log::info!("Marker history: {}", history);
437 }
438 panic!("GPU has crashed in {}", ch.name);
439 }
440 None => {
441 panic!("GPU has crashed, and no debug information is available.");
442 }
443 },
444 Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => {
445 log::warn!("GPU frame is out of date");
446 None
447 }
448 Err(other) => panic!("GPU error {}", other),
449 }
450 }
451}
452
453#[hidden_trait::expose]
454impl crate::traits::CommandEncoder for super::CommandEncoder {
455 type Texture = super::Texture;
456 type Frame = super::Frame;
457
458 fn start(&mut self) {
459 self.buffers.rotate_left(1);
460 let cmd_buf = self.buffers.first_mut().unwrap();
461 self.device
462 .reset_descriptor_pool(&mut cmd_buf.descriptor_pool);
463
464 let vk_info = vk::CommandBufferBeginInfo {
465 flags: vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT,
466 ..Default::default()
467 };
468 unsafe {
469 self.device
470 .core
471 .begin_command_buffer(cmd_buf.raw, &vk_info)
472 .unwrap();
473 }
474
475 if let Some(ref timing) = self.device.timing {
476 self.timings.clear();
477 if !cmd_buf.timed_pass_names.is_empty() {
478 let mut timestamps = [0u64; super::QUERY_POOL_SIZE];
479 unsafe {
480 self.device
481 .core
482 .get_query_pool_results(
483 cmd_buf.query_pool,
484 0,
485 &mut timestamps[..cmd_buf.timed_pass_names.len() + 1],
486 vk::QueryResultFlags::TYPE_64,
487 )
488 .unwrap();
489 }
490 let mut prev = timestamps[0];
491 for (name, &ts) in cmd_buf
492 .timed_pass_names
493 .drain(..)
494 .zip(timestamps[1..].iter())
495 {
496 let diff = (ts - prev) as f32 * timing.period;
497 prev = ts;
498 self.timings.push((name, Duration::from_nanos(diff as _)));
499 }
500 }
501 unsafe {
502 self.device.core.cmd_reset_query_pool(
503 cmd_buf.raw,
504 cmd_buf.query_pool,
505 0,
506 super::QUERY_POOL_SIZE as u32,
507 );
508 }
509 }
510 }
511
512 fn init_texture(&mut self, texture: super::Texture) {
513 let barrier = vk::ImageMemoryBarrier {
514 old_layout: vk::ImageLayout::UNDEFINED,
515 new_layout: vk::ImageLayout::GENERAL,
516 image: texture.raw,
517 subresource_range: vk::ImageSubresourceRange {
518 aspect_mask: super::map_aspects(texture.format.aspects()),
519 base_mip_level: 0,
520 level_count: vk::REMAINING_MIP_LEVELS,
521 base_array_layer: 0,
522 layer_count: vk::REMAINING_ARRAY_LAYERS,
523 },
524 ..Default::default()
525 };
526 unsafe {
527 self.device.core.cmd_pipeline_barrier(
528 self.buffers[0].raw,
529 vk::PipelineStageFlags::TOP_OF_PIPE,
530 vk::PipelineStageFlags::ALL_COMMANDS,
531 vk::DependencyFlags::empty(),
532 &[],
533 &[],
534 &[barrier],
535 );
536 }
537 }
538
539 fn present(&mut self, frame: super::Frame) {
540 let image_index = match frame.image_index {
541 Some(index) => index,
542 None => {
543 return;
545 }
546 };
547
548 assert_eq!(self.present, None);
549 let wa = &self.device.workarounds;
550 self.present = Some(super::Presentation {
551 swapchain: frame.swapchain.raw,
552 image_index,
553 acquire_semaphore: frame.internal.acquire_semaphore,
554 present_semaphore: frame.internal.present_semaphore,
555 });
556
557 let barrier = vk::ImageMemoryBarrier {
558 old_layout: vk::ImageLayout::GENERAL,
559 new_layout: vk::ImageLayout::PRESENT_SRC_KHR,
560 image: frame.internal.image,
561 subresource_range: vk::ImageSubresourceRange {
562 aspect_mask: vk::ImageAspectFlags::COLOR,
563 base_mip_level: 0,
564 level_count: 1,
565 base_array_layer: 0,
566 layer_count: 1,
567 },
568 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
569 ..Default::default()
570 };
571 unsafe {
572 self.device.core.cmd_pipeline_barrier(
573 self.buffers[0].raw,
574 vk::PipelineStageFlags::ALL_COMMANDS,
575 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
576 vk::DependencyFlags::empty(),
577 &[],
578 &[],
579 &[barrier],
580 );
581 }
582 }
583
584 fn timings(&self) -> &crate::Timings {
585 &self.timings
586 }
587}
588
589#[hidden_trait::expose]
590impl crate::traits::TransferEncoder for super::TransferCommandEncoder<'_> {
591 type BufferPiece = crate::BufferPiece;
592 type TexturePiece = crate::TexturePiece;
593
594 fn fill_buffer(&mut self, dst: crate::BufferPiece, size: u64, value: u8) {
595 let value_u32 = (value as u32) * 0x1010101;
596 unsafe {
597 self.device
598 .core
599 .cmd_fill_buffer(self.raw, dst.buffer.raw, dst.offset, size, value_u32)
600 };
601 }
602
603 fn copy_buffer_to_buffer(
604 &mut self,
605 src: crate::BufferPiece,
606 dst: crate::BufferPiece,
607 size: u64,
608 ) {
609 let copy = vk::BufferCopy {
610 src_offset: src.offset,
611 dst_offset: dst.offset,
612 size,
613 };
614 unsafe {
615 self.device
616 .core
617 .cmd_copy_buffer(self.raw, src.buffer.raw, dst.buffer.raw, &[copy])
618 };
619 }
620
621 fn copy_texture_to_texture(
622 &mut self,
623 src: crate::TexturePiece,
624 dst: crate::TexturePiece,
625 size: crate::Extent,
626 ) {
627 let copy = vk::ImageCopy {
628 src_subresource: src.subresource_layers(),
629 src_offset: map_origin(&src.origin),
630 dst_subresource: dst.subresource_layers(),
631 dst_offset: map_origin(&dst.origin),
632 extent: super::map_extent_3d(&size),
633 };
634 unsafe {
635 self.device.core.cmd_copy_image(
636 self.raw,
637 src.texture.raw,
638 vk::ImageLayout::GENERAL,
639 dst.texture.raw,
640 vk::ImageLayout::GENERAL,
641 &[copy],
642 )
643 };
644 }
645
646 fn copy_buffer_to_texture(
647 &mut self,
648 src: crate::BufferPiece,
649 bytes_per_row: u32,
650 dst: crate::TexturePiece,
651 size: crate::Extent,
652 ) {
653 let copy = make_buffer_image_copy(&src, bytes_per_row, &dst, &size);
654 unsafe {
655 self.device.core.cmd_copy_buffer_to_image(
656 self.raw,
657 src.buffer.raw,
658 dst.texture.raw,
659 vk::ImageLayout::GENERAL,
660 &[copy],
661 )
662 };
663 }
664
665 fn copy_texture_to_buffer(
666 &mut self,
667 src: crate::TexturePiece,
668 dst: crate::BufferPiece,
669 bytes_per_row: u32,
670 size: crate::Extent,
671 ) {
672 let copy = make_buffer_image_copy(&dst, bytes_per_row, &src, &size);
673 unsafe {
674 self.device.core.cmd_copy_image_to_buffer(
675 self.raw,
676 src.texture.raw,
677 vk::ImageLayout::GENERAL,
678 dst.buffer.raw,
679 &[copy],
680 )
681 };
682 }
683}
684
685impl Drop for super::TransferCommandEncoder<'_> {
686 fn drop(&mut self) {
687 end_pass(self.device, self.raw);
688 }
689}
690
691#[hidden_trait::expose]
692impl crate::traits::AccelerationStructureEncoder
693 for super::AccelerationStructureCommandEncoder<'_>
694{
695 type AccelerationStructure = crate::AccelerationStructure;
696 type AccelerationStructureMesh = crate::AccelerationStructureMesh;
697 type BufferPiece = crate::BufferPiece;
698
699 fn build_bottom_level(
700 &mut self,
701 acceleration_structure: super::AccelerationStructure,
702 meshes: &[crate::AccelerationStructureMesh],
703 scratch_data: crate::BufferPiece,
704 ) {
705 let rt = self.device.ray_tracing.as_ref().unwrap();
706 let mut blas_input = self.device.map_acceleration_structure_meshes(meshes);
707 blas_input.build_info.dst_acceleration_structure = acceleration_structure.raw;
708 let scratch_address = self.device.get_device_address(&scratch_data);
709 assert_eq!(
710 scratch_address & rt.scratch_buffer_alignment,
711 0,
712 "BLAS scratch address {scratch_address} is not aligned"
713 );
714 blas_input.build_info.scratch_data = vk::DeviceOrHostAddressKHR {
715 device_address: scratch_address,
716 };
717
718 unsafe {
719 rt.acceleration_structure.cmd_build_acceleration_structures(
720 self.raw,
721 &[blas_input.build_info],
722 &[&blas_input.build_range_infos],
723 );
724 }
725 }
726
727 fn build_top_level(
728 &mut self,
729 acceleration_structure: super::AccelerationStructure,
730 _bottom_level: &[super::AccelerationStructure],
731 instance_count: u32,
732 instance_data: crate::BufferPiece,
733 scratch_data: crate::BufferPiece,
734 ) {
735 let build_range_info = vk::AccelerationStructureBuildRangeInfoKHR {
736 primitive_count: instance_count,
737 primitive_offset: 0,
738 first_vertex: 0,
739 transform_offset: 0,
740 };
741 let geometry = vk::AccelerationStructureGeometryKHR {
742 geometry_type: vk::GeometryTypeKHR::INSTANCES,
743 geometry: vk::AccelerationStructureGeometryDataKHR {
744 instances: vk::AccelerationStructureGeometryInstancesDataKHR {
745 data: vk::DeviceOrHostAddressConstKHR {
746 device_address: self.device.get_device_address(&instance_data),
747 },
748 ..Default::default()
749 },
750 },
751 ..Default::default()
752 };
753 let geometries = [geometry];
754 let build_info = vk::AccelerationStructureBuildGeometryInfoKHR {
755 ty: vk::AccelerationStructureTypeKHR::TOP_LEVEL,
756 mode: vk::BuildAccelerationStructureModeKHR::BUILD,
757 scratch_data: vk::DeviceOrHostAddressKHR {
758 device_address: self.device.get_device_address(&scratch_data),
759 },
760 dst_acceleration_structure: acceleration_structure.raw,
761 ..Default::default()
762 }
763 .geometries(&geometries);
764
765 let rt = self.device.ray_tracing.as_ref().unwrap();
766 unsafe {
767 rt.acceleration_structure.cmd_build_acceleration_structures(
768 self.raw,
769 &[build_info],
770 &[&[build_range_info]],
771 );
772 }
773 }
774}
775
776impl Drop for super::AccelerationStructureCommandEncoder<'_> {
777 fn drop(&mut self) {
778 end_pass(self.device, self.raw);
779 }
780}
781
782impl<'a> super::ComputeCommandEncoder<'a> {
783 pub fn with<'b, 'p>(
784 &'b mut self,
785 pipeline: &'p super::ComputePipeline,
786 ) -> super::PipelineEncoder<'b, 'p> {
787 let bind_point = vk::PipelineBindPoint::COMPUTE;
788 unsafe {
789 self.device
790 .core
791 .cmd_bind_pipeline(self.cmd_buf.raw, bind_point, pipeline.raw)
792 };
793 super::PipelineEncoder {
794 cmd_buf: self.cmd_buf,
795 layout: &pipeline.layout,
796 bind_point,
797 device: self.device,
798 update_data: self.update_data,
799 }
800 }
801}
802
803impl Drop for super::ComputeCommandEncoder<'_> {
804 fn drop(&mut self) {
805 end_pass(self.device, self.cmd_buf.raw);
806 }
807}
808
809impl<'a> super::RenderCommandEncoder<'a> {
810 pub fn with<'b, 'p>(
811 &'b mut self,
812 pipeline: &'p super::RenderPipeline,
813 ) -> super::PipelineEncoder<'b, 'p> {
814 let bind_point = vk::PipelineBindPoint::GRAPHICS;
815 unsafe {
816 self.device
817 .core
818 .cmd_bind_pipeline(self.cmd_buf.raw, bind_point, pipeline.raw)
819 };
820 super::PipelineEncoder {
821 cmd_buf: self.cmd_buf,
822 layout: &pipeline.layout,
823 bind_point,
824 device: self.device,
825 update_data: self.update_data,
826 }
827 }
828}
829
830impl Drop for super::RenderCommandEncoder<'_> {
831 fn drop(&mut self) {
832 unsafe {
833 self.device
834 .dynamic_rendering
835 .cmd_end_rendering(self.cmd_buf.raw)
836 };
837 end_pass(self.device, self.cmd_buf.raw);
838 }
839}
840
841impl crate::ScissorRect {
842 const fn to_vk(&self) -> vk::Rect2D {
843 vk::Rect2D {
844 offset: vk::Offset2D {
845 x: self.x,
846 y: self.y,
847 },
848 extent: vk::Extent2D {
849 width: self.w,
850 height: self.h,
851 },
852 }
853 }
854}
855
856impl crate::Viewport {
857 fn to_vk(&self) -> vk::Viewport {
858 vk::Viewport {
859 x: self.x,
860 y: self.y + self.h, width: self.w,
862 height: -self.h, min_depth: self.depth.start,
864 max_depth: self.depth.end,
865 }
866 }
867}
868
869#[hidden_trait::expose]
870impl crate::traits::RenderEncoder for super::RenderCommandEncoder<'_> {
871 fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
872 let vk_scissor = rect.to_vk();
873 unsafe {
874 self.device
875 .core
876 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
877 };
878 }
879
880 fn set_viewport(&mut self, viewport: &crate::Viewport) {
881 let vk_viewport = viewport.to_vk();
882 unsafe {
883 self.device
884 .core
885 .cmd_set_viewport(self.cmd_buf.raw, 0, &[vk_viewport])
886 };
887 }
888
889 fn set_stencil_reference(&mut self, reference: u32) {
890 unsafe {
891 self.device.core.cmd_set_stencil_reference(
892 self.cmd_buf.raw,
893 vk::StencilFaceFlags::FRONT_AND_BACK,
894 reference,
895 )
896 };
897 }
898}
899
900#[hidden_trait::expose]
901impl crate::traits::PipelineEncoder for super::PipelineEncoder<'_, '_> {
902 fn bind<D: crate::ShaderData>(&mut self, group: u32, data: &D) {
903 let dsl = &self.layout.descriptor_set_layouts[group as usize];
904 if !dsl.is_empty() {
905 self.update_data.clear();
906 self.update_data.resize(dsl.template_size as usize, 0);
907 data.fill(super::PipelineContext {
908 update_data: self.update_data.as_mut_slice(),
909 template_offsets: &dsl.template_offsets,
910 });
911 }
912
913 let vk_set = self
914 .device
915 .allocate_descriptor_set(&mut self.cmd_buf.descriptor_pool, dsl);
916 unsafe {
917 if !dsl.is_empty() {
918 self.device.core.update_descriptor_set_with_template(
919 vk_set,
920 dsl.update_template,
921 self.update_data.as_ptr() as *const _,
922 );
923 }
924 self.device.core.cmd_bind_descriptor_sets(
925 self.cmd_buf.raw,
926 self.bind_point,
927 self.layout.raw,
928 group,
929 &[vk_set],
930 &[],
931 );
932 }
933 }
934}
935
936#[hidden_trait::expose]
937impl crate::traits::ComputePipelineEncoder for super::PipelineEncoder<'_, '_> {
938 type BufferPiece = crate::BufferPiece;
939
940 fn dispatch(&mut self, groups: [u32; 3]) {
941 unsafe {
942 self.device
943 .core
944 .cmd_dispatch(self.cmd_buf.raw, groups[0], groups[1], groups[2])
945 };
946 }
947 fn dispatch_indirect(&mut self, indirect_buf: crate::BufferPiece) {
948 unsafe {
949 self.device.core.cmd_dispatch_indirect(
950 self.cmd_buf.raw,
951 indirect_buf.buffer.raw,
952 indirect_buf.offset,
953 )
954 };
955 }
956}
957
958#[hidden_trait::expose]
959impl crate::traits::RenderEncoder for super::PipelineEncoder<'_, '_> {
960 fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
961 let vk_scissor = rect.to_vk();
962 unsafe {
963 self.device
964 .core
965 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
966 };
967 }
968
969 fn set_viewport(&mut self, viewport: &crate::Viewport) {
970 let vk_viewport = viewport.to_vk();
971 unsafe {
972 self.device
973 .core
974 .cmd_set_viewport(self.cmd_buf.raw, 0, &[vk_viewport])
975 };
976 }
977
978 fn set_stencil_reference(&mut self, reference: u32) {
979 unsafe {
980 self.device.core.cmd_set_stencil_reference(
981 self.cmd_buf.raw,
982 vk::StencilFaceFlags::FRONT_AND_BACK,
983 reference,
984 )
985 };
986 }
987}
988
989#[hidden_trait::expose]
990impl crate::traits::RenderPipelineEncoder for super::PipelineEncoder<'_, '_> {
991 type BufferPiece = crate::BufferPiece;
992
993 fn bind_vertex(&mut self, index: u32, vertex_buf: crate::BufferPiece) {
994 unsafe {
995 self.device.core.cmd_bind_vertex_buffers(
996 self.cmd_buf.raw,
997 index,
998 &[vertex_buf.buffer.raw],
999 &[vertex_buf.offset],
1000 );
1001 }
1002 }
1003
1004 fn draw(
1005 &mut self,
1006 start_vertex: u32,
1007 vertex_count: u32,
1008 start_instance: u32,
1009 instance_count: u32,
1010 ) {
1011 unsafe {
1012 self.device.core.cmd_draw(
1013 self.cmd_buf.raw,
1014 vertex_count,
1015 instance_count,
1016 start_vertex,
1017 start_instance,
1018 );
1019 }
1020 }
1021
1022 fn draw_indexed(
1023 &mut self,
1024 index_buf: crate::BufferPiece,
1025 index_type: crate::IndexType,
1026 index_count: u32,
1027 base_vertex: i32,
1028 start_instance: u32,
1029 instance_count: u32,
1030 ) {
1031 let raw_index_type = super::map_index_type(index_type);
1032 unsafe {
1033 self.device.core.cmd_bind_index_buffer(
1034 self.cmd_buf.raw,
1035 index_buf.buffer.raw,
1036 index_buf.offset,
1037 raw_index_type,
1038 );
1039 self.device.core.cmd_draw_indexed(
1040 self.cmd_buf.raw,
1041 index_count,
1042 instance_count,
1043 0,
1044 base_vertex,
1045 start_instance,
1046 );
1047 }
1048 }
1049
1050 fn draw_indirect(&mut self, indirect_buf: crate::BufferPiece) {
1051 unsafe {
1052 self.device.core.cmd_draw_indirect(
1053 self.cmd_buf.raw,
1054 indirect_buf.buffer.raw,
1055 indirect_buf.offset,
1056 1,
1057 0,
1058 );
1059 }
1060 }
1061
1062 fn draw_indexed_indirect(
1063 &mut self,
1064 index_buf: crate::BufferPiece,
1065 index_type: crate::IndexType,
1066 indirect_buf: crate::BufferPiece,
1067 ) {
1068 let raw_index_type = super::map_index_type(index_type);
1069 unsafe {
1070 self.device.core.cmd_bind_index_buffer(
1071 self.cmd_buf.raw,
1072 index_buf.buffer.raw,
1073 index_buf.offset,
1074 raw_index_type,
1075 );
1076 self.device.core.cmd_draw_indexed_indirect(
1077 self.cmd_buf.raw,
1078 indirect_buf.buffer.raw,
1079 indirect_buf.offset,
1080 1,
1081 0,
1082 );
1083 }
1084 }
1085}