1use ash::vk;
2use std::{ptr, str, time::Duration};
3
4impl super::CrashHandler {
5 fn add_marker(&mut self, marker: &str) -> u32 {
6 if self.next_offset < self.raw_string.len() {
7 self.raw_string[self.next_offset] = b'|';
8 self.next_offset += 1;
9 }
10 let len = marker.len().min(self.raw_string.len());
11 if self.next_offset + len > self.raw_string.len() {
12 self.next_offset = 0;
13 }
14 let start = self.next_offset;
15 self.next_offset += len;
16 let end = self.next_offset;
17 self.raw_string[start..end].copy_from_slice(&marker.as_bytes()[..len]);
18 start as u32 | (end << 16) as u32
19 }
20
21 pub(super) fn extract(&self, id: u32) -> (&str, &str) {
22 let start = id as usize & 0xFFFF;
23 let end = (id >> 16) as usize;
24 let history = str::from_utf8(&self.raw_string[..start]).unwrap_or_default();
25 let marker = str::from_utf8(&self.raw_string[start..end]).unwrap();
26 (history, marker)
27 }
28}
29
30impl super::PipelineContext<'_> {
31 #[inline]
32 fn write<T>(&mut self, index: u32, value: T) {
33 let offset = self.template_offsets[index as usize];
34 unsafe {
35 ptr::write(
36 self.update_data.as_mut_ptr().offset(offset as isize) as *mut T,
37 value,
38 )
39 };
40 }
41
42 #[inline]
43 fn write_array<I: Iterator>(&mut self, index: u32, iter: I) {
44 let base_offset = self.template_offsets[index as usize];
45 let base_ptr =
46 unsafe { self.update_data.as_mut_ptr().offset(base_offset as isize) as *mut I::Item };
47 for (i, value) in iter.enumerate() {
48 unsafe { ptr::write(base_ptr.add(i), value) };
49 }
50 }
51}
52
53impl<T: bytemuck::Pod> crate::ShaderBindable for T {
54 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
55 let descriptor_buf_info = if let Some(ref mut scratch) = ctx.scratch {
56 let data = bytemuck::bytes_of(self);
58 let aligned_offset =
59 (scratch.offset + scratch.alignment - 1) & !(scratch.alignment - 1);
60 let end = aligned_offset + data.len() as u64;
61 assert!(
62 end <= scratch.capacity,
63 "Scratch buffer overflow: needed {end}, capacity {}",
64 scratch.capacity
65 );
66 unsafe {
67 ptr::copy_nonoverlapping(
68 data.as_ptr(),
69 scratch.mapped.add(aligned_offset as usize),
70 data.len(),
71 );
72 }
73 scratch.offset = end;
74 Some(vk::DescriptorBufferInfo {
75 buffer: scratch.raw,
76 offset: aligned_offset,
77 range: data.len() as u64,
78 })
79 } else {
80 None
81 };
82 if let Some(info) = descriptor_buf_info {
83 ctx.write(index, info);
84 } else {
85 ctx.write(index, *self);
87 }
88 }
89}
90impl crate::ShaderBindable for super::TextureView {
91 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
92 ctx.write(
93 index,
94 vk::DescriptorImageInfo {
95 sampler: vk::Sampler::null(),
96 image_view: self.raw,
97 image_layout: vk::ImageLayout::GENERAL,
98 },
99 );
100 }
101}
102impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::TextureArray<N> {
103 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
104 assert!(self.data.len() <= N as usize);
105 ctx.write_array(
106 index,
107 self.data
108 .iter()
109 .map(|view| vk::DescriptorImageInfo {
110 sampler: vk::Sampler::null(),
111 image_view: view.raw,
112 image_layout: vk::ImageLayout::GENERAL,
113 })
114 .cycle()
115 .take(N as usize),
116 );
117 }
118}
119impl crate::ShaderBindable for super::Sampler {
120 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
121 ctx.write(
122 index,
123 vk::DescriptorImageInfo {
124 sampler: self.raw,
125 image_view: vk::ImageView::null(),
126 image_layout: vk::ImageLayout::UNDEFINED,
127 },
128 );
129 }
130}
131impl crate::ShaderBindable for crate::BufferPiece {
132 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
133 ctx.write(
134 index,
135 vk::DescriptorBufferInfo {
136 buffer: self.buffer.raw,
137 offset: self.offset,
138 range: vk::WHOLE_SIZE,
139 },
140 );
141 }
142}
143impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::BufferArray<N> {
144 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
145 assert!(self.data.len() <= N as usize);
146 ctx.write_array(
147 index,
148 self.data
149 .iter()
150 .map(|piece| vk::DescriptorBufferInfo {
151 buffer: piece.buffer.raw,
152 offset: piece.offset,
153 range: vk::WHOLE_SIZE,
154 })
155 .cycle()
156 .take(N as usize),
157 );
158 }
159}
160impl crate::ShaderBindable for super::AccelerationStructure {
161 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
162 ctx.write(index, self.raw);
163 }
164}
165impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable
166 for &'a crate::AccelerationStructureArray<N>
167{
168 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
169 assert!(self.data.len() <= N as usize);
170 ctx.write_array(
171 index,
172 self.data
173 .iter()
174 .map(|accel| accel.raw)
175 .cycle()
176 .take(N as usize),
177 );
178 }
179}
180
181impl crate::TexturePiece {
182 fn subresource_layers(&self) -> vk::ImageSubresourceLayers {
183 vk::ImageSubresourceLayers {
184 aspect_mask: super::map_aspects(self.texture.format.aspects()),
185 mip_level: self.mip_level,
186 base_array_layer: self.array_layer,
187 layer_count: 1,
188 }
189 }
190}
191
192fn map_origin(origin: &[u32; 3]) -> vk::Offset3D {
193 vk::Offset3D {
194 x: origin[0] as i32,
195 y: origin[1] as i32,
196 z: origin[2] as i32,
197 }
198}
199
200fn make_buffer_image_copy(
201 buffer: &crate::BufferPiece,
202 bytes_per_row: u32,
203 texture: &crate::TexturePiece,
204 size: &crate::Extent,
205) -> vk::BufferImageCopy {
206 let block_info = texture.texture.format.block_info();
207 vk::BufferImageCopy {
208 buffer_offset: buffer.offset,
209 buffer_row_length: block_info.dimensions.0 as u32
210 * (bytes_per_row / block_info.size as u32),
211 buffer_image_height: 0,
212 image_subresource: texture.subresource_layers(),
213 image_offset: map_origin(&texture.origin),
214 image_extent: super::map_extent_3d(size),
215 }
216}
217
218fn map_render_target(rt: &crate::RenderTarget) -> vk::RenderingAttachmentInfo<'static> {
219 let mut vk_info = vk::RenderingAttachmentInfo::default()
220 .image_view(rt.view.raw)
221 .image_layout(vk::ImageLayout::GENERAL);
222
223 match rt.init_op {
224 crate::InitOp::Load => vk_info = vk_info.load_op(vk::AttachmentLoadOp::LOAD),
225 crate::InitOp::DontCare => vk_info = vk_info.load_op(vk::AttachmentLoadOp::DONT_CARE),
226
227 crate::InitOp::Clear(color) => {
228 let cv = if rt.view.aspects.contains(crate::TexelAspects::COLOR) {
229 vk::ClearValue {
230 color: match color {
231 crate::TextureColor::TransparentBlack => {
232 vk::ClearColorValue { float32: [0.0; 4] }
233 }
234 crate::TextureColor::OpaqueBlack => vk::ClearColorValue {
235 float32: [0.0, 0.0, 0.0, 1.0],
236 },
237 crate::TextureColor::White => vk::ClearColorValue { float32: [1.0; 4] },
238 },
239 }
240 } else {
241 vk::ClearValue {
242 depth_stencil: vk::ClearDepthStencilValue {
243 depth: color.depth_clear_value(),
244 stencil: color.stencil_clear_value(),
245 },
246 }
247 };
248
249 vk_info = vk_info.load_op(vk::AttachmentLoadOp::CLEAR).clear_value(cv);
250 }
251 }
252
253 if let crate::FinishOp::ResolveTo(resolve_view) = rt.finish_op {
254 vk_info = vk_info
255 .resolve_image_view(resolve_view.raw)
256 .resolve_image_layout(vk::ImageLayout::GENERAL)
257 .resolve_mode(vk::ResolveModeFlags::AVERAGE);
258 }
259
260 vk_info.store_op = match rt.finish_op {
261 crate::FinishOp::Store => vk::AttachmentStoreOp::STORE,
262 crate::FinishOp::Discard => vk::AttachmentStoreOp::DONT_CARE,
263 crate::FinishOp::Ignore => vk::AttachmentStoreOp::DONT_CARE,
264 crate::FinishOp::ResolveTo(..) => {
265 vk::AttachmentStoreOp::DONT_CARE
273 }
274 };
275
276 vk_info
277}
278
279fn end_pass(device: &super::Device, cmd_buf: vk::CommandBuffer) {
280 if device.command_scope.is_some() {
281 unsafe {
282 device.debug_utils.cmd_end_debug_utils_label(cmd_buf);
283 }
284 }
285}
286
287impl super::CommandEncoder {
288 fn add_marker(&mut self, marker: &str) {
289 if let Some(ref mut ch) = self.crash_handler {
290 let id = ch.add_marker(marker);
291 unsafe {
292 self.device
293 .buffer_marker
294 .as_ref()
295 .unwrap()
296 .cmd_write_buffer_marker(
297 self.buffers[0].raw,
298 vk::PipelineStageFlags::ALL_COMMANDS,
299 ch.marker_buf.raw,
300 0,
301 id,
302 );
303 }
304 }
305 }
306
307 fn add_timestamp(&mut self, label: &str) {
308 if let Some(_) = self.device.timing {
309 let cmd_buf = self.buffers.first_mut().unwrap();
310 if cmd_buf.timed_pass_names.len() == crate::limits::PASS_COUNT {
311 log::warn!("Reached the maximum for `limits::PASS_COUNT`, skipping the timer");
312 return;
313 }
314 let index = cmd_buf.timed_pass_names.len() as u32;
315 unsafe {
316 self.device.core.cmd_write_timestamp(
317 cmd_buf.raw,
318 vk::PipelineStageFlags::TOP_OF_PIPE,
319 cmd_buf.query_pool,
320 index,
321 );
322 }
323 cmd_buf.timed_pass_names.push(label.to_string());
324 }
325 }
326
327 fn begin_pass(&mut self, label: &str) {
328 self.barrier();
329 self.add_marker(label);
330 self.add_timestamp(label);
331
332 if let Some(_) = self.device.command_scope {
333 self.temp_label.clear();
334 self.temp_label.extend_from_slice(label.as_bytes());
335 self.temp_label.push(0);
336 unsafe {
337 self.device.debug_utils.cmd_begin_debug_utils_label(
338 self.buffers[0].raw,
339 &vk::DebugUtilsLabelEXT {
340 p_label_name: self.temp_label.as_ptr() as *const _,
341 ..Default::default()
342 },
343 )
344 }
345 }
346 }
347
348 pub(super) fn finish(&mut self) -> vk::CommandBuffer {
349 self.barrier();
350 self.add_marker("finish");
351 let cmd_buf = self.buffers.first_mut().unwrap();
352 unsafe {
353 if self.device.timing.is_some() {
354 let index = cmd_buf.timed_pass_names.len() as u32;
355 self.device.core.cmd_write_timestamp(
356 cmd_buf.raw,
357 vk::PipelineStageFlags::TOP_OF_PIPE,
358 cmd_buf.query_pool,
359 index,
360 );
361 }
362 self.device.core.end_command_buffer(cmd_buf.raw).unwrap();
363 }
364 cmd_buf.raw
365 }
366
367 fn barrier(&mut self) {
368 let wa = &self.device.workarounds;
369 let barrier = vk::MemoryBarrier {
370 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
371 dst_access_mask: vk::AccessFlags::MEMORY_READ
372 | vk::AccessFlags::MEMORY_WRITE
373 | wa.extra_sync_dst_access,
374 ..Default::default()
375 };
376 unsafe {
377 self.device.core.cmd_pipeline_barrier(
378 self.buffers[0].raw,
379 vk::PipelineStageFlags::ALL_COMMANDS,
380 vk::PipelineStageFlags::ALL_COMMANDS,
381 vk::DependencyFlags::empty(),
382 &[barrier],
383 &[],
384 &[],
385 );
386 }
387 }
388
389 pub fn transfer(&mut self, label: &str) -> super::TransferCommandEncoder<'_> {
390 self.begin_pass(label);
391 super::TransferCommandEncoder {
392 raw: self.buffers[0].raw,
393 device: &self.device,
394 }
395 }
396
397 pub fn acceleration_structure(
398 &mut self,
399 label: &str,
400 ) -> super::AccelerationStructureCommandEncoder<'_> {
401 self.begin_pass(label);
402 super::AccelerationStructureCommandEncoder {
403 raw: self.buffers[0].raw,
404 device: &self.device,
405 }
406 }
407
408 pub fn compute(&mut self, label: &str) -> super::ComputeCommandEncoder<'_> {
409 self.begin_pass(label);
410 super::ComputeCommandEncoder {
411 cmd_buf: self.buffers.first_mut().unwrap(),
412 device: &self.device,
413 update_data: &mut self.update_data,
414 }
415 }
416
417 pub fn render(
418 &mut self,
419 label: &str,
420 targets: crate::RenderTargetSet,
421 ) -> super::RenderCommandEncoder<'_> {
422 self.begin_pass(label);
423
424 let mut target_size = [0u16; 2];
425 let mut color_attachments = Vec::with_capacity(targets.colors.len());
426 let depth_stencil_attachment;
427 for rt in targets.colors {
428 target_size = rt.view.target_size;
429 color_attachments.push(map_render_target(rt));
430 }
431
432 let mut rendering_info = vk::RenderingInfoKHR::default()
433 .layer_count(1)
434 .color_attachments(&color_attachments);
435
436 if let Some(rt) = targets.depth_stencil {
437 target_size = rt.view.target_size;
438 depth_stencil_attachment = map_render_target(&rt);
439 if rt.view.aspects.contains(crate::TexelAspects::DEPTH) {
440 rendering_info = rendering_info.depth_attachment(&depth_stencil_attachment);
441 }
442 if rt.view.aspects.contains(crate::TexelAspects::STENCIL) {
443 rendering_info = rendering_info.stencil_attachment(&depth_stencil_attachment);
444 }
445 }
446
447 let render_area = crate::ScissorRect {
448 x: 0,
449 y: 0,
450 w: target_size[0] as u32,
451 h: target_size[1] as u32,
452 }
453 .to_vk();
454 let viewport = crate::Viewport {
455 x: 0.0,
456 y: 0.0,
457 w: target_size[0] as f32,
458 h: target_size[1] as f32,
459 depth: 0.0..1.0,
460 }
461 .to_vk();
462 rendering_info.render_area = render_area;
463
464 let cmd_buf = self.buffers.first_mut().unwrap();
465 unsafe {
466 self.device
467 .core
468 .cmd_set_viewport(cmd_buf.raw, 0, &[viewport]);
469 self.device
470 .core
471 .cmd_set_scissor(cmd_buf.raw, 0, &[render_area]);
472 self.device
473 .dynamic_rendering
474 .cmd_begin_rendering(cmd_buf.raw, &rendering_info);
475 };
476
477 super::RenderCommandEncoder {
478 cmd_buf,
479 device: &self.device,
480 update_data: &mut self.update_data,
481 }
482 }
483
484 pub(super) fn check_gpu_crash<T>(&self, ret: Result<T, vk::Result>) -> Option<T> {
485 match ret {
486 Ok(value) => Some(value),
487 Err(vk::Result::ERROR_DEVICE_LOST) => match self.crash_handler {
488 Some(ref ch) => {
489 let last_id = unsafe { *(ch.marker_buf.data() as *mut u32) };
490 if last_id != 0 {
491 let (history, last_marker) = ch.extract(last_id);
492 log::error!("Last GPU executed marker is '{last_marker}'");
493 log::info!("Marker history: {}", history);
494 }
495 panic!("GPU has crashed in {}", ch.name);
496 }
497 None => {
498 panic!("GPU has crashed, and no debug information is available.");
499 }
500 },
501 Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => {
502 log::warn!("GPU frame is out of date");
503 None
504 }
505 Err(other) => panic!("GPU error {}", other),
506 }
507 }
508}
509
510#[hidden_trait::expose]
511impl crate::traits::CommandEncoder for super::CommandEncoder {
512 type Texture = super::Texture;
513 type Frame = super::Frame;
514
515 fn start(&mut self) {
516 self.buffers.rotate_left(1);
517 let cmd_buf = self.buffers.first_mut().unwrap();
518 self.device
519 .reset_descriptor_pool(&mut cmd_buf.descriptor_pool);
520 if let Some(ref mut scratch) = cmd_buf.scratch {
521 scratch.offset = 0;
522 }
523
524 let vk_info = vk::CommandBufferBeginInfo {
525 flags: vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT,
526 ..Default::default()
527 };
528 unsafe {
529 self.device
530 .core
531 .begin_command_buffer(cmd_buf.raw, &vk_info)
532 .unwrap();
533 }
534
535 if let Some(ref timing) = self.device.timing {
536 self.timings.clear();
537 if !cmd_buf.timed_pass_names.is_empty() {
538 let mut timestamps = [0u64; super::QUERY_POOL_SIZE];
539 unsafe {
540 self.device
541 .core
542 .get_query_pool_results(
543 cmd_buf.query_pool,
544 0,
545 &mut timestamps[..cmd_buf.timed_pass_names.len() + 1],
546 vk::QueryResultFlags::TYPE_64,
547 )
548 .unwrap();
549 }
550 let mut prev = timestamps[0];
551 for (name, &ts) in cmd_buf
552 .timed_pass_names
553 .drain(..)
554 .zip(timestamps[1..].iter())
555 {
556 let diff = (ts - prev) as f32 * timing.period;
557 prev = ts;
558 self.timings.push((name, Duration::from_nanos(diff as _)));
559 }
560 }
561 unsafe {
562 self.device.core.cmd_reset_query_pool(
563 cmd_buf.raw,
564 cmd_buf.query_pool,
565 0,
566 super::QUERY_POOL_SIZE as u32,
567 );
568 }
569 }
570 }
571
572 fn init_texture(&mut self, texture: super::Texture) {
573 let barrier = vk::ImageMemoryBarrier {
574 old_layout: vk::ImageLayout::UNDEFINED,
575 new_layout: vk::ImageLayout::GENERAL,
576 image: texture.raw,
577 subresource_range: vk::ImageSubresourceRange {
578 aspect_mask: super::map_aspects(texture.format.aspects()),
579 base_mip_level: 0,
580 level_count: vk::REMAINING_MIP_LEVELS,
581 base_array_layer: 0,
582 layer_count: vk::REMAINING_ARRAY_LAYERS,
583 },
584 ..Default::default()
585 };
586 unsafe {
587 self.device.core.cmd_pipeline_barrier(
588 self.buffers[0].raw,
589 vk::PipelineStageFlags::TOP_OF_PIPE,
590 vk::PipelineStageFlags::ALL_COMMANDS,
591 vk::DependencyFlags::empty(),
592 &[],
593 &[],
594 &[barrier],
595 );
596 }
597 }
598
599 fn present(&mut self, frame: super::Frame) {
600 let image_index = match frame.image_index {
601 Some(index) => index,
602 None => {
603 return;
605 }
606 };
607
608 assert!(self.present.is_none());
609 let wa = &self.device.workarounds;
610 self.present = Some(if frame.xr_swapchain != 0 {
611 super::Presentation::Xr {
612 swapchain: frame.xr_swapchain,
613 view_count: frame.xr_view_count,
614 target_size: frame.swapchain.target_size,
615 views: frame.xr_views,
616 }
617 } else {
618 let barrier = vk::ImageMemoryBarrier {
619 old_layout: vk::ImageLayout::GENERAL,
620 new_layout: vk::ImageLayout::PRESENT_SRC_KHR,
621 image: frame.internal.image,
622 subresource_range: vk::ImageSubresourceRange {
623 aspect_mask: vk::ImageAspectFlags::COLOR,
624 base_mip_level: 0,
625 level_count: 1,
626 base_array_layer: 0,
627 layer_count: 1,
628 },
629 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
630 ..Default::default()
631 };
632 unsafe {
633 self.device.core.cmd_pipeline_barrier(
634 self.buffers[0].raw,
635 vk::PipelineStageFlags::ALL_COMMANDS,
636 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
637 vk::DependencyFlags::empty(),
638 &[],
639 &[],
640 &[barrier],
641 );
642 }
643 super::Presentation::Window {
644 swapchain: frame.swapchain.raw,
645 image_index,
646 acquire_semaphore: frame.internal.acquire_semaphore,
647 present_semaphore: frame.internal.present_semaphore,
648 }
649 });
650 }
651
652 fn timings(&self) -> &crate::Timings {
653 &self.timings
654 }
655}
656
657#[hidden_trait::expose]
658impl crate::traits::TransferEncoder for super::TransferCommandEncoder<'_> {
659 type BufferPiece = crate::BufferPiece;
660 type TexturePiece = crate::TexturePiece;
661
662 fn fill_buffer(&mut self, dst: crate::BufferPiece, size: u64, value: u8) {
663 let value_u32 = (value as u32) * 0x1010101;
664 unsafe {
665 self.device
666 .core
667 .cmd_fill_buffer(self.raw, dst.buffer.raw, dst.offset, size, value_u32)
668 };
669 }
670
671 fn copy_buffer_to_buffer(
672 &mut self,
673 src: crate::BufferPiece,
674 dst: crate::BufferPiece,
675 size: u64,
676 ) {
677 let copy = vk::BufferCopy {
678 src_offset: src.offset,
679 dst_offset: dst.offset,
680 size,
681 };
682 unsafe {
683 self.device
684 .core
685 .cmd_copy_buffer(self.raw, src.buffer.raw, dst.buffer.raw, &[copy])
686 };
687 }
688
689 fn copy_texture_to_texture(
690 &mut self,
691 src: crate::TexturePiece,
692 dst: crate::TexturePiece,
693 size: crate::Extent,
694 ) {
695 let copy = vk::ImageCopy {
696 src_subresource: src.subresource_layers(),
697 src_offset: map_origin(&src.origin),
698 dst_subresource: dst.subresource_layers(),
699 dst_offset: map_origin(&dst.origin),
700 extent: super::map_extent_3d(&size),
701 };
702 unsafe {
703 self.device.core.cmd_copy_image(
704 self.raw,
705 src.texture.raw,
706 vk::ImageLayout::GENERAL,
707 dst.texture.raw,
708 vk::ImageLayout::GENERAL,
709 &[copy],
710 )
711 };
712 }
713
714 fn copy_buffer_to_texture(
715 &mut self,
716 src: crate::BufferPiece,
717 bytes_per_row: u32,
718 dst: crate::TexturePiece,
719 size: crate::Extent,
720 ) {
721 let copy = make_buffer_image_copy(&src, bytes_per_row, &dst, &size);
722 unsafe {
723 self.device.core.cmd_copy_buffer_to_image(
724 self.raw,
725 src.buffer.raw,
726 dst.texture.raw,
727 vk::ImageLayout::GENERAL,
728 &[copy],
729 )
730 };
731 }
732
733 fn copy_texture_to_buffer(
734 &mut self,
735 src: crate::TexturePiece,
736 dst: crate::BufferPiece,
737 bytes_per_row: u32,
738 size: crate::Extent,
739 ) {
740 let copy = make_buffer_image_copy(&dst, bytes_per_row, &src, &size);
741 unsafe {
742 self.device.core.cmd_copy_image_to_buffer(
743 self.raw,
744 src.texture.raw,
745 vk::ImageLayout::GENERAL,
746 dst.buffer.raw,
747 &[copy],
748 )
749 };
750 }
751}
752
753impl Drop for super::TransferCommandEncoder<'_> {
754 fn drop(&mut self) {
755 end_pass(self.device, self.raw);
756 }
757}
758
759#[hidden_trait::expose]
760impl crate::traits::AccelerationStructureEncoder
761 for super::AccelerationStructureCommandEncoder<'_>
762{
763 type AccelerationStructure = crate::AccelerationStructure;
764 type AccelerationStructureMesh = crate::AccelerationStructureMesh;
765 type BufferPiece = crate::BufferPiece;
766
767 fn build_bottom_level(
768 &mut self,
769 acceleration_structure: super::AccelerationStructure,
770 meshes: &[crate::AccelerationStructureMesh],
771 scratch_data: crate::BufferPiece,
772 ) {
773 let rt = self.device.ray_tracing.as_ref().unwrap();
774 let mut blas_input = self.device.map_acceleration_structure_meshes(meshes);
775 blas_input.build_info.dst_acceleration_structure = acceleration_structure.raw;
776 let scratch_address = self.device.get_device_address(&scratch_data);
777 assert_eq!(
778 scratch_address & rt.scratch_buffer_alignment,
779 0,
780 "BLAS scratch address {scratch_address} is not aligned"
781 );
782 blas_input.build_info.scratch_data = vk::DeviceOrHostAddressKHR {
783 device_address: scratch_address,
784 };
785
786 unsafe {
787 rt.acceleration_structure.cmd_build_acceleration_structures(
788 self.raw,
789 &[blas_input.build_info],
790 &[&blas_input.build_range_infos],
791 );
792 }
793 }
794
795 fn build_top_level(
796 &mut self,
797 acceleration_structure: super::AccelerationStructure,
798 _bottom_level: &[super::AccelerationStructure],
799 instance_count: u32,
800 instance_data: crate::BufferPiece,
801 scratch_data: crate::BufferPiece,
802 ) {
803 let build_range_info = vk::AccelerationStructureBuildRangeInfoKHR {
804 primitive_count: instance_count,
805 primitive_offset: 0,
806 first_vertex: 0,
807 transform_offset: 0,
808 };
809 let geometry = vk::AccelerationStructureGeometryKHR {
810 geometry_type: vk::GeometryTypeKHR::INSTANCES,
811 geometry: vk::AccelerationStructureGeometryDataKHR {
812 instances: vk::AccelerationStructureGeometryInstancesDataKHR {
813 data: vk::DeviceOrHostAddressConstKHR {
814 device_address: self.device.get_device_address(&instance_data),
815 },
816 ..Default::default()
817 },
818 },
819 ..Default::default()
820 };
821 let geometries = [geometry];
822 let build_info = vk::AccelerationStructureBuildGeometryInfoKHR {
823 ty: vk::AccelerationStructureTypeKHR::TOP_LEVEL,
824 mode: vk::BuildAccelerationStructureModeKHR::BUILD,
825 scratch_data: vk::DeviceOrHostAddressKHR {
826 device_address: self.device.get_device_address(&scratch_data),
827 },
828 dst_acceleration_structure: acceleration_structure.raw,
829 ..Default::default()
830 }
831 .geometries(&geometries);
832
833 let rt = self.device.ray_tracing.as_ref().unwrap();
834 unsafe {
835 rt.acceleration_structure.cmd_build_acceleration_structures(
836 self.raw,
837 &[build_info],
838 &[&[build_range_info]],
839 );
840 }
841 }
842}
843
844impl Drop for super::AccelerationStructureCommandEncoder<'_> {
845 fn drop(&mut self) {
846 end_pass(self.device, self.raw);
847 }
848}
849
850impl<'a> super::ComputeCommandEncoder<'a> {
851 pub fn with<'b, 'p>(
852 &'b mut self,
853 pipeline: &'p super::ComputePipeline,
854 ) -> super::PipelineEncoder<'b, 'p> {
855 let bind_point = vk::PipelineBindPoint::COMPUTE;
856 unsafe {
857 self.device
858 .core
859 .cmd_bind_pipeline(self.cmd_buf.raw, bind_point, pipeline.raw)
860 };
861 super::PipelineEncoder {
862 cmd_buf: self.cmd_buf,
863 layout: &pipeline.layout,
864 bind_point,
865 device: self.device,
866 update_data: self.update_data,
867 }
868 }
869}
870
871impl Drop for super::ComputeCommandEncoder<'_> {
872 fn drop(&mut self) {
873 end_pass(self.device, self.cmd_buf.raw);
874 }
875}
876
877impl<'a> super::RenderCommandEncoder<'a> {
878 pub fn with<'b, 'p>(
879 &'b mut self,
880 pipeline: &'p super::RenderPipeline,
881 ) -> super::PipelineEncoder<'b, 'p> {
882 let bind_point = vk::PipelineBindPoint::GRAPHICS;
883 unsafe {
884 self.device
885 .core
886 .cmd_bind_pipeline(self.cmd_buf.raw, bind_point, pipeline.raw)
887 };
888 super::PipelineEncoder {
889 cmd_buf: self.cmd_buf,
890 layout: &pipeline.layout,
891 bind_point,
892 device: self.device,
893 update_data: self.update_data,
894 }
895 }
896}
897
898impl Drop for super::RenderCommandEncoder<'_> {
899 fn drop(&mut self) {
900 unsafe {
901 self.device
902 .dynamic_rendering
903 .cmd_end_rendering(self.cmd_buf.raw)
904 };
905 end_pass(self.device, self.cmd_buf.raw);
906 }
907}
908
909impl crate::ScissorRect {
910 const fn to_vk(&self) -> vk::Rect2D {
911 vk::Rect2D {
912 offset: vk::Offset2D {
913 x: self.x,
914 y: self.y,
915 },
916 extent: vk::Extent2D {
917 width: self.w,
918 height: self.h,
919 },
920 }
921 }
922}
923
924impl crate::Viewport {
925 fn to_vk(&self) -> vk::Viewport {
926 vk::Viewport {
927 x: self.x,
928 y: self.y + self.h, width: self.w,
930 height: -self.h, min_depth: self.depth.start,
932 max_depth: self.depth.end,
933 }
934 }
935}
936
937#[hidden_trait::expose]
938impl crate::traits::RenderEncoder for super::RenderCommandEncoder<'_> {
939 fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
940 let vk_scissor = rect.to_vk();
941 unsafe {
942 self.device
943 .core
944 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
945 };
946 }
947
948 fn set_viewport(&mut self, viewport: &crate::Viewport) {
949 let vk_viewport = viewport.to_vk();
950 unsafe {
951 self.device
952 .core
953 .cmd_set_viewport(self.cmd_buf.raw, 0, &[vk_viewport])
954 };
955 }
956
957 fn set_stencil_reference(&mut self, reference: u32) {
958 unsafe {
959 self.device.core.cmd_set_stencil_reference(
960 self.cmd_buf.raw,
961 vk::StencilFaceFlags::FRONT_AND_BACK,
962 reference,
963 )
964 };
965 }
966}
967
968#[hidden_trait::expose]
969impl crate::traits::PipelineEncoder for super::PipelineEncoder<'_, '_> {
970 fn bind<D: crate::ShaderData>(&mut self, group: u32, data: &D) {
971 let dsl = &self.layout.descriptor_set_layouts[group as usize];
972 if !dsl.is_empty() {
973 self.update_data.clear();
974 self.update_data.resize(dsl.template_size as usize, 0);
975 data.fill(super::PipelineContext {
976 update_data: self.update_data.as_mut_slice(),
977 template_offsets: &dsl.template_offsets,
978 scratch: self.cmd_buf.scratch.as_mut(),
979 });
980 }
981
982 let vk_set = self
983 .device
984 .allocate_descriptor_set(&mut self.cmd_buf.descriptor_pool, dsl);
985 unsafe {
986 if !dsl.is_empty() {
987 self.device.core.update_descriptor_set_with_template(
988 vk_set,
989 dsl.update_template,
990 self.update_data.as_ptr() as *const _,
991 );
992 }
993 self.device.core.cmd_bind_descriptor_sets(
994 self.cmd_buf.raw,
995 self.bind_point,
996 self.layout.raw,
997 group,
998 &[vk_set],
999 &[],
1000 );
1001 }
1002 }
1003}
1004
1005#[hidden_trait::expose]
1006impl crate::traits::ComputePipelineEncoder for super::PipelineEncoder<'_, '_> {
1007 type BufferPiece = crate::BufferPiece;
1008
1009 fn dispatch(&mut self, groups: [u32; 3]) {
1010 unsafe {
1011 self.device
1012 .core
1013 .cmd_dispatch(self.cmd_buf.raw, groups[0], groups[1], groups[2])
1014 };
1015 }
1016 fn dispatch_indirect(&mut self, indirect_buf: crate::BufferPiece) {
1017 unsafe {
1018 self.device.core.cmd_dispatch_indirect(
1019 self.cmd_buf.raw,
1020 indirect_buf.buffer.raw,
1021 indirect_buf.offset,
1022 )
1023 };
1024 }
1025}
1026
1027#[hidden_trait::expose]
1028impl crate::traits::RenderEncoder for super::PipelineEncoder<'_, '_> {
1029 fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
1030 let vk_scissor = rect.to_vk();
1031 unsafe {
1032 self.device
1033 .core
1034 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
1035 };
1036 }
1037
1038 fn set_viewport(&mut self, viewport: &crate::Viewport) {
1039 let vk_viewport = viewport.to_vk();
1040 unsafe {
1041 self.device
1042 .core
1043 .cmd_set_viewport(self.cmd_buf.raw, 0, &[vk_viewport])
1044 };
1045 }
1046
1047 fn set_stencil_reference(&mut self, reference: u32) {
1048 unsafe {
1049 self.device.core.cmd_set_stencil_reference(
1050 self.cmd_buf.raw,
1051 vk::StencilFaceFlags::FRONT_AND_BACK,
1052 reference,
1053 )
1054 };
1055 }
1056}
1057
1058#[hidden_trait::expose]
1059impl crate::traits::RenderPipelineEncoder for super::PipelineEncoder<'_, '_> {
1060 type BufferPiece = crate::BufferPiece;
1061
1062 fn bind_vertex(&mut self, index: u32, vertex_buf: crate::BufferPiece) {
1063 unsafe {
1064 self.device.core.cmd_bind_vertex_buffers(
1065 self.cmd_buf.raw,
1066 index,
1067 &[vertex_buf.buffer.raw],
1068 &[vertex_buf.offset],
1069 );
1070 }
1071 }
1072
1073 fn draw(
1074 &mut self,
1075 start_vertex: u32,
1076 vertex_count: u32,
1077 start_instance: u32,
1078 instance_count: u32,
1079 ) {
1080 unsafe {
1081 self.device.core.cmd_draw(
1082 self.cmd_buf.raw,
1083 vertex_count,
1084 instance_count,
1085 start_vertex,
1086 start_instance,
1087 );
1088 }
1089 }
1090
1091 fn draw_indexed(
1092 &mut self,
1093 index_buf: crate::BufferPiece,
1094 index_type: crate::IndexType,
1095 index_count: u32,
1096 base_vertex: i32,
1097 start_instance: u32,
1098 instance_count: u32,
1099 ) {
1100 let raw_index_type = super::map_index_type(index_type);
1101 unsafe {
1102 self.device.core.cmd_bind_index_buffer(
1103 self.cmd_buf.raw,
1104 index_buf.buffer.raw,
1105 index_buf.offset,
1106 raw_index_type,
1107 );
1108 self.device.core.cmd_draw_indexed(
1109 self.cmd_buf.raw,
1110 index_count,
1111 instance_count,
1112 0,
1113 base_vertex,
1114 start_instance,
1115 );
1116 }
1117 }
1118
1119 fn draw_indirect(&mut self, indirect_buf: crate::BufferPiece) {
1120 unsafe {
1121 self.device.core.cmd_draw_indirect(
1122 self.cmd_buf.raw,
1123 indirect_buf.buffer.raw,
1124 indirect_buf.offset,
1125 1,
1126 0,
1127 );
1128 }
1129 }
1130
1131 fn draw_indexed_indirect(
1132 &mut self,
1133 index_buf: crate::BufferPiece,
1134 index_type: crate::IndexType,
1135 indirect_buf: crate::BufferPiece,
1136 ) {
1137 let raw_index_type = super::map_index_type(index_type);
1138 unsafe {
1139 self.device.core.cmd_bind_index_buffer(
1140 self.cmd_buf.raw,
1141 index_buf.buffer.raw,
1142 index_buf.offset,
1143 raw_index_type,
1144 );
1145 self.device.core.cmd_draw_indexed_indirect(
1146 self.cmd_buf.raw,
1147 indirect_buf.buffer.raw,
1148 indirect_buf.offset,
1149 1,
1150 0,
1151 );
1152 }
1153 }
1154}