1use ash::vk;
2use std::{ptr, str, time::Duration};
3
4impl super::CrashHandler {
5 fn add_marker(&mut self, marker: &str) -> u32 {
6 if self.next_offset < self.raw_string.len() {
7 self.raw_string[self.next_offset] = b'|';
8 self.next_offset += 1;
9 }
10 let len = marker.len().min(self.raw_string.len());
11 if self.next_offset + len > self.raw_string.len() {
12 self.next_offset = 0;
13 }
14 let start = self.next_offset;
15 self.next_offset += len;
16 let end = self.next_offset;
17 self.raw_string[start..end].copy_from_slice(&marker.as_bytes()[..len]);
18 start as u32 | (end << 16) as u32
19 }
20
21 pub(super) fn extract(&self, id: u32) -> (&str, &str) {
22 let start = id as usize & 0xFFFF;
23 let end = (id >> 16) as usize;
24 let history = str::from_utf8(&self.raw_string[..start]).unwrap_or_default();
25 let marker = str::from_utf8(&self.raw_string[start..end]).unwrap();
26 (history, marker)
27 }
28}
29
30impl super::PipelineContext<'_> {
31 #[inline]
32 fn write<T>(&mut self, index: u32, value: T) {
33 let offset = self.template_offsets[index as usize];
34 unsafe {
35 ptr::write(
36 self.update_data.as_mut_ptr().offset(offset as isize) as *mut T,
37 value,
38 )
39 };
40 }
41
42 #[inline]
43 fn write_array<I: Iterator>(&mut self, index: u32, iter: I) {
44 let base_offset = self.template_offsets[index as usize];
45 let base_ptr =
46 unsafe { self.update_data.as_mut_ptr().offset(base_offset as isize) as *mut I::Item };
47 for (i, value) in iter.enumerate() {
48 unsafe { ptr::write(base_ptr.add(i), value) };
49 }
50 }
51}
52
53impl<T: bytemuck::Pod> crate::ShaderBindable for T {
54 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
55 if ctx.inline_uniform_mask & (1 << index) != 0 {
56 ctx.write(index, *self);
58 } else {
59 let info = {
61 let scratch = ctx
62 .scratch
63 .as_mut()
64 .expect("scratch buffer required for UBO binding");
65 let data = bytemuck::bytes_of(self);
66 let aligned_offset =
67 (scratch.offset + scratch.alignment - 1) & !(scratch.alignment - 1);
68 let end = aligned_offset + data.len() as u64;
69 assert!(
70 end <= scratch.capacity,
71 "Scratch buffer overflow: needed {end}, capacity {}",
72 scratch.capacity
73 );
74 unsafe {
75 ptr::copy_nonoverlapping(
76 data.as_ptr(),
77 scratch.mapped.add(aligned_offset as usize),
78 data.len(),
79 );
80 }
81 scratch.offset = end;
82 vk::DescriptorBufferInfo {
83 buffer: scratch.raw,
84 offset: aligned_offset,
85 range: data.len() as u64,
86 }
87 };
88 ctx.write(index, info);
89 }
90 }
91}
92impl crate::ShaderBindable for super::TextureView {
93 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
94 ctx.write(
95 index,
96 vk::DescriptorImageInfo {
97 sampler: vk::Sampler::null(),
98 image_view: self.raw,
99 image_layout: vk::ImageLayout::GENERAL,
100 },
101 );
102 }
103}
104impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::TextureArray<N> {
105 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
106 assert!(self.data.len() <= N as usize);
107 ctx.write_array(
108 index,
109 self.data
110 .iter()
111 .map(|view| vk::DescriptorImageInfo {
112 sampler: vk::Sampler::null(),
113 image_view: view.raw,
114 image_layout: vk::ImageLayout::GENERAL,
115 })
116 .cycle()
117 .take(N as usize),
118 );
119 }
120}
121impl crate::ShaderBindable for super::Sampler {
122 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
123 ctx.write(
124 index,
125 vk::DescriptorImageInfo {
126 sampler: self.raw,
127 image_view: vk::ImageView::null(),
128 image_layout: vk::ImageLayout::UNDEFINED,
129 },
130 );
131 }
132}
133impl crate::ShaderBindable for crate::BufferPiece {
134 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
135 ctx.write(
136 index,
137 vk::DescriptorBufferInfo {
138 buffer: self.buffer.raw,
139 offset: self.offset,
140 range: vk::WHOLE_SIZE,
141 },
142 );
143 }
144}
145impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::BufferArray<N> {
146 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
147 assert!(self.data.len() <= N as usize);
148 ctx.write_array(
149 index,
150 self.data
151 .iter()
152 .map(|piece| vk::DescriptorBufferInfo {
153 buffer: piece.buffer.raw,
154 offset: piece.offset,
155 range: vk::WHOLE_SIZE,
156 })
157 .cycle()
158 .take(N as usize),
159 );
160 }
161}
162impl crate::ShaderBindable for super::AccelerationStructure {
163 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
164 ctx.write(index, self.raw);
165 }
166}
167impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable
168 for &'a crate::AccelerationStructureArray<N>
169{
170 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
171 assert!(self.data.len() <= N as usize);
172 ctx.write_array(
173 index,
174 self.data
175 .iter()
176 .map(|accel| accel.raw)
177 .cycle()
178 .take(N as usize),
179 );
180 }
181}
182
183impl crate::TexturePiece {
184 fn subresource_layers(&self) -> vk::ImageSubresourceLayers {
185 vk::ImageSubresourceLayers {
186 aspect_mask: super::map_aspects(self.texture.format.aspects()),
187 mip_level: self.mip_level,
188 base_array_layer: self.array_layer,
189 layer_count: 1,
190 }
191 }
192}
193
194fn map_origin(origin: &[u32; 3]) -> vk::Offset3D {
195 vk::Offset3D {
196 x: origin[0] as i32,
197 y: origin[1] as i32,
198 z: origin[2] as i32,
199 }
200}
201
202fn make_buffer_image_copy(
203 buffer: &crate::BufferPiece,
204 bytes_per_row: u32,
205 texture: &crate::TexturePiece,
206 size: &crate::Extent,
207) -> vk::BufferImageCopy {
208 let block_info = texture.texture.format.block_info();
209 vk::BufferImageCopy {
210 buffer_offset: buffer.offset,
211 buffer_row_length: block_info.dimensions.0 as u32
212 * (bytes_per_row / block_info.size as u32),
213 buffer_image_height: 0,
214 image_subresource: texture.subresource_layers(),
215 image_offset: map_origin(&texture.origin),
216 image_extent: super::map_extent_3d(size),
217 }
218}
219
220fn map_render_target(rt: &crate::RenderTarget) -> vk::RenderingAttachmentInfo<'static> {
221 let mut vk_info = vk::RenderingAttachmentInfo::default()
222 .image_view(rt.view.raw)
223 .image_layout(vk::ImageLayout::GENERAL);
224
225 match rt.init_op {
226 crate::InitOp::Load => vk_info = vk_info.load_op(vk::AttachmentLoadOp::LOAD),
227 crate::InitOp::DontCare => vk_info = vk_info.load_op(vk::AttachmentLoadOp::DONT_CARE),
228
229 crate::InitOp::Clear(color) => {
230 let cv = if rt.view.aspects.contains(crate::TexelAspects::COLOR) {
231 vk::ClearValue {
232 color: match color {
233 crate::TextureColor::TransparentBlack => {
234 vk::ClearColorValue { float32: [0.0; 4] }
235 }
236 crate::TextureColor::OpaqueBlack => vk::ClearColorValue {
237 float32: [0.0, 0.0, 0.0, 1.0],
238 },
239 crate::TextureColor::White => vk::ClearColorValue { float32: [1.0; 4] },
240 },
241 }
242 } else {
243 vk::ClearValue {
244 depth_stencil: vk::ClearDepthStencilValue {
245 depth: color.depth_clear_value(),
246 stencil: color.stencil_clear_value(),
247 },
248 }
249 };
250
251 vk_info = vk_info.load_op(vk::AttachmentLoadOp::CLEAR).clear_value(cv);
252 }
253 }
254
255 if let crate::FinishOp::ResolveTo(resolve_view) = rt.finish_op {
256 vk_info = vk_info
257 .resolve_image_view(resolve_view.raw)
258 .resolve_image_layout(vk::ImageLayout::GENERAL)
259 .resolve_mode(vk::ResolveModeFlags::AVERAGE);
260 }
261
262 vk_info.store_op = match rt.finish_op {
263 crate::FinishOp::Store => vk::AttachmentStoreOp::STORE,
264 crate::FinishOp::Discard => vk::AttachmentStoreOp::DONT_CARE,
265 crate::FinishOp::Ignore => vk::AttachmentStoreOp::DONT_CARE,
266 crate::FinishOp::ResolveTo(..) => {
267 vk::AttachmentStoreOp::DONT_CARE
275 }
276 };
277
278 vk_info
279}
280
281fn end_pass(device: &super::Device, cmd_buf: vk::CommandBuffer) {
282 if device.command_scope.is_some() {
283 unsafe {
284 device.debug_utils.cmd_end_debug_utils_label(cmd_buf);
285 }
286 }
287}
288
289impl super::CommandEncoder {
290 fn add_marker(&mut self, marker: &str) {
291 if let Some(ref mut ch) = self.crash_handler {
292 let id = ch.add_marker(marker);
293 unsafe {
294 self.device
295 .buffer_marker
296 .as_ref()
297 .unwrap()
298 .cmd_write_buffer_marker(
299 self.buffers[0].raw,
300 vk::PipelineStageFlags::ALL_COMMANDS,
301 ch.marker_buf.raw,
302 0,
303 id,
304 );
305 }
306 }
307 }
308
309 fn add_timestamp(&mut self, label: &str) {
310 if let Some(_) = self.device.timing {
311 let cmd_buf = self.buffers.first_mut().unwrap();
312 if cmd_buf.timed_pass_names.len() == crate::limits::PASS_COUNT {
313 log::warn!("Reached the maximum for `limits::PASS_COUNT`, skipping the timer");
314 return;
315 }
316 let index = cmd_buf.timed_pass_names.len() as u32;
317 unsafe {
318 self.device.core.cmd_write_timestamp(
319 cmd_buf.raw,
320 vk::PipelineStageFlags::TOP_OF_PIPE,
321 cmd_buf.query_pool,
322 index,
323 );
324 }
325 cmd_buf.timed_pass_names.push(label.to_string());
326 }
327 }
328
329 fn begin_pass(&mut self, label: &str) {
330 self.barrier();
331 self.add_marker(label);
332 self.add_timestamp(label);
333
334 if let Some(_) = self.device.command_scope {
335 self.temp_label.clear();
336 self.temp_label.extend_from_slice(label.as_bytes());
337 self.temp_label.push(0);
338 unsafe {
339 self.device.debug_utils.cmd_begin_debug_utils_label(
340 self.buffers[0].raw,
341 &vk::DebugUtilsLabelEXT {
342 p_label_name: self.temp_label.as_ptr() as *const _,
343 ..Default::default()
344 },
345 )
346 }
347 }
348 }
349
350 pub(super) fn finish(&mut self) -> vk::CommandBuffer {
351 self.barrier();
352 self.add_marker("finish");
353 let cmd_buf = self.buffers.first_mut().unwrap();
354 unsafe {
355 if self.device.timing.is_some() {
356 let index = cmd_buf.timed_pass_names.len() as u32;
357 self.device.core.cmd_write_timestamp(
358 cmd_buf.raw,
359 vk::PipelineStageFlags::TOP_OF_PIPE,
360 cmd_buf.query_pool,
361 index,
362 );
363 }
364 self.device.core.end_command_buffer(cmd_buf.raw).unwrap();
365 }
366 cmd_buf.raw
367 }
368
369 fn barrier(&mut self) {
370 let wa = &self.device.workarounds;
371 let barrier = vk::MemoryBarrier {
372 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
373 dst_access_mask: vk::AccessFlags::MEMORY_READ
374 | vk::AccessFlags::MEMORY_WRITE
375 | wa.extra_sync_dst_access,
376 ..Default::default()
377 };
378 unsafe {
379 self.device.core.cmd_pipeline_barrier(
380 self.buffers[0].raw,
381 vk::PipelineStageFlags::ALL_COMMANDS,
382 vk::PipelineStageFlags::ALL_COMMANDS,
383 vk::DependencyFlags::empty(),
384 &[barrier],
385 &[],
386 &[],
387 );
388 }
389 }
390
391 pub fn transfer(&mut self, label: &str) -> super::TransferCommandEncoder<'_> {
392 self.begin_pass(label);
393 super::TransferCommandEncoder {
394 raw: self.buffers[0].raw,
395 device: &self.device,
396 }
397 }
398
399 pub fn acceleration_structure(
400 &mut self,
401 label: &str,
402 ) -> super::AccelerationStructureCommandEncoder<'_> {
403 self.begin_pass(label);
404 super::AccelerationStructureCommandEncoder {
405 raw: self.buffers[0].raw,
406 device: &self.device,
407 }
408 }
409
410 pub fn compute(&mut self, label: &str) -> super::ComputeCommandEncoder<'_> {
411 self.begin_pass(label);
412 super::ComputeCommandEncoder {
413 cmd_buf: self.buffers.first_mut().unwrap(),
414 device: &self.device,
415 update_data: &mut self.update_data,
416 }
417 }
418
419 pub fn render(
420 &mut self,
421 label: &str,
422 targets: crate::RenderTargetSet,
423 ) -> super::RenderCommandEncoder<'_> {
424 self.begin_pass(label);
425
426 let mut target_size = [0u16; 2];
427 let mut color_attachments = Vec::with_capacity(targets.colors.len());
428 let depth_stencil_attachment;
429 for rt in targets.colors {
430 target_size = rt.view.target_size;
431 color_attachments.push(map_render_target(rt));
432 }
433
434 let mut rendering_info = vk::RenderingInfoKHR::default()
435 .layer_count(1)
436 .color_attachments(&color_attachments);
437
438 if let Some(rt) = targets.depth_stencil {
439 target_size = rt.view.target_size;
440 depth_stencil_attachment = map_render_target(&rt);
441 if rt.view.aspects.contains(crate::TexelAspects::DEPTH) {
442 rendering_info = rendering_info.depth_attachment(&depth_stencil_attachment);
443 }
444 if rt.view.aspects.contains(crate::TexelAspects::STENCIL) {
445 rendering_info = rendering_info.stencil_attachment(&depth_stencil_attachment);
446 }
447 }
448
449 let render_area = crate::ScissorRect {
450 x: 0,
451 y: 0,
452 w: target_size[0] as u32,
453 h: target_size[1] as u32,
454 }
455 .to_vk();
456 let viewport = crate::Viewport {
457 x: 0.0,
458 y: 0.0,
459 w: target_size[0] as f32,
460 h: target_size[1] as f32,
461 depth: 0.0..1.0,
462 }
463 .to_vk();
464 rendering_info.render_area = render_area;
465
466 let cmd_buf = self.buffers.first_mut().unwrap();
467 unsafe {
468 self.device
469 .core
470 .cmd_set_viewport(cmd_buf.raw, 0, &[viewport]);
471 self.device
472 .core
473 .cmd_set_scissor(cmd_buf.raw, 0, &[render_area]);
474 self.device
475 .dynamic_rendering
476 .cmd_begin_rendering(cmd_buf.raw, &rendering_info);
477 };
478
479 super::RenderCommandEncoder {
480 cmd_buf,
481 device: &self.device,
482 update_data: &mut self.update_data,
483 }
484 }
485
486 pub(super) fn check_gpu_crash<T>(&self, ret: Result<T, vk::Result>) -> Option<T> {
487 match ret {
488 Ok(value) => Some(value),
489 Err(vk::Result::ERROR_DEVICE_LOST) => match self.crash_handler {
490 Some(ref ch) => {
491 let last_id = unsafe { *(ch.marker_buf.data() as *mut u32) };
492 if last_id != 0 {
493 let (history, last_marker) = ch.extract(last_id);
494 log::error!("Last GPU executed marker is '{last_marker}'");
495 log::info!("Marker history: {}", history);
496 }
497 panic!("GPU has crashed in {}", ch.name);
498 }
499 None => {
500 panic!("GPU has crashed, and no debug information is available.");
501 }
502 },
503 Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => {
504 log::warn!("GPU frame is out of date");
505 None
506 }
507 Err(other) => panic!("GPU error {}", other),
508 }
509 }
510}
511
512#[hidden_trait::expose]
513impl crate::traits::CommandEncoder for super::CommandEncoder {
514 type Texture = super::Texture;
515 type Frame = super::Frame;
516
517 fn start(&mut self) {
518 self.buffers.rotate_left(1);
519 let cmd_buf = self.buffers.first_mut().unwrap();
520 self.device
521 .reset_descriptor_pool(&mut cmd_buf.descriptor_pool);
522 if let Some(ref mut scratch) = cmd_buf.scratch {
523 scratch.offset = 0;
524 }
525
526 let vk_info = vk::CommandBufferBeginInfo {
527 flags: vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT,
528 ..Default::default()
529 };
530 unsafe {
531 self.device
532 .core
533 .begin_command_buffer(cmd_buf.raw, &vk_info)
534 .unwrap();
535 }
536
537 if let Some(ref timing) = self.device.timing {
538 self.timings.clear();
539 if !cmd_buf.timed_pass_names.is_empty() {
540 let mut timestamps = [0u64; super::QUERY_POOL_SIZE];
541 unsafe {
542 self.device
543 .core
544 .get_query_pool_results(
545 cmd_buf.query_pool,
546 0,
547 &mut timestamps[..cmd_buf.timed_pass_names.len() + 1],
548 vk::QueryResultFlags::TYPE_64,
549 )
550 .unwrap();
551 }
552 let mut prev = timestamps[0];
553 for (name, &ts) in cmd_buf
554 .timed_pass_names
555 .drain(..)
556 .zip(timestamps[1..].iter())
557 {
558 let diff = (ts - prev) as f32 * timing.period;
559 prev = ts;
560 self.timings.push((name, Duration::from_nanos(diff as _)));
561 }
562 }
563 unsafe {
564 self.device.core.cmd_reset_query_pool(
565 cmd_buf.raw,
566 cmd_buf.query_pool,
567 0,
568 super::QUERY_POOL_SIZE as u32,
569 );
570 }
571 }
572 }
573
574 fn init_texture(&mut self, texture: super::Texture) {
575 let barrier = vk::ImageMemoryBarrier {
576 old_layout: vk::ImageLayout::UNDEFINED,
577 new_layout: vk::ImageLayout::GENERAL,
578 image: texture.raw,
579 subresource_range: vk::ImageSubresourceRange {
580 aspect_mask: super::map_aspects(texture.format.aspects()),
581 base_mip_level: 0,
582 level_count: vk::REMAINING_MIP_LEVELS,
583 base_array_layer: 0,
584 layer_count: vk::REMAINING_ARRAY_LAYERS,
585 },
586 ..Default::default()
587 };
588 unsafe {
589 self.device.core.cmd_pipeline_barrier(
590 self.buffers[0].raw,
591 vk::PipelineStageFlags::TOP_OF_PIPE,
592 vk::PipelineStageFlags::ALL_COMMANDS,
593 vk::DependencyFlags::empty(),
594 &[],
595 &[],
596 &[barrier],
597 );
598 }
599 }
600
601 fn present(&mut self, frame: super::Frame) {
602 let image_index = match frame.image_index {
603 Some(index) => index,
604 None => {
605 return;
607 }
608 };
609
610 assert!(self.present.is_none());
611 let wa = &self.device.workarounds;
612 self.present = Some(if frame.xr_swapchain != 0 {
613 super::Presentation::Xr {
614 swapchain: frame.xr_swapchain,
615 view_count: frame.xr_view_count,
616 target_size: frame.swapchain.target_size,
617 views: frame.xr_views,
618 }
619 } else {
620 let barrier = vk::ImageMemoryBarrier {
621 old_layout: vk::ImageLayout::GENERAL,
622 new_layout: vk::ImageLayout::PRESENT_SRC_KHR,
623 image: frame.internal.image,
624 subresource_range: vk::ImageSubresourceRange {
625 aspect_mask: vk::ImageAspectFlags::COLOR,
626 base_mip_level: 0,
627 level_count: 1,
628 base_array_layer: 0,
629 layer_count: 1,
630 },
631 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
632 ..Default::default()
633 };
634 unsafe {
635 self.device.core.cmd_pipeline_barrier(
636 self.buffers[0].raw,
637 vk::PipelineStageFlags::ALL_COMMANDS,
638 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
639 vk::DependencyFlags::empty(),
640 &[],
641 &[],
642 &[barrier],
643 );
644 }
645 super::Presentation::Window {
646 swapchain: frame.swapchain.raw,
647 image_index,
648 acquire_semaphore: frame.internal.acquire_semaphore,
649 present_semaphore: frame.internal.present_semaphore,
650 }
651 });
652 }
653
654 fn timings(&self) -> &crate::Timings {
655 &self.timings
656 }
657}
658
659#[hidden_trait::expose]
660impl crate::traits::TransferEncoder for super::TransferCommandEncoder<'_> {
661 type BufferPiece = crate::BufferPiece;
662 type TexturePiece = crate::TexturePiece;
663
664 fn fill_buffer(&mut self, dst: crate::BufferPiece, size: u64, value: u8) {
665 let value_u32 = (value as u32) * 0x1010101;
666 unsafe {
667 self.device
668 .core
669 .cmd_fill_buffer(self.raw, dst.buffer.raw, dst.offset, size, value_u32)
670 };
671 }
672
673 fn copy_buffer_to_buffer(
674 &mut self,
675 src: crate::BufferPiece,
676 dst: crate::BufferPiece,
677 size: u64,
678 ) {
679 let copy = vk::BufferCopy {
680 src_offset: src.offset,
681 dst_offset: dst.offset,
682 size,
683 };
684 unsafe {
685 self.device
686 .core
687 .cmd_copy_buffer(self.raw, src.buffer.raw, dst.buffer.raw, &[copy])
688 };
689 }
690
691 fn copy_texture_to_texture(
692 &mut self,
693 src: crate::TexturePiece,
694 dst: crate::TexturePiece,
695 size: crate::Extent,
696 ) {
697 let copy = vk::ImageCopy {
698 src_subresource: src.subresource_layers(),
699 src_offset: map_origin(&src.origin),
700 dst_subresource: dst.subresource_layers(),
701 dst_offset: map_origin(&dst.origin),
702 extent: super::map_extent_3d(&size),
703 };
704 unsafe {
705 self.device.core.cmd_copy_image(
706 self.raw,
707 src.texture.raw,
708 vk::ImageLayout::GENERAL,
709 dst.texture.raw,
710 vk::ImageLayout::GENERAL,
711 &[copy],
712 )
713 };
714 }
715
716 fn copy_buffer_to_texture(
717 &mut self,
718 src: crate::BufferPiece,
719 bytes_per_row: u32,
720 dst: crate::TexturePiece,
721 size: crate::Extent,
722 ) {
723 let copy = make_buffer_image_copy(&src, bytes_per_row, &dst, &size);
724 unsafe {
725 self.device.core.cmd_copy_buffer_to_image(
726 self.raw,
727 src.buffer.raw,
728 dst.texture.raw,
729 vk::ImageLayout::GENERAL,
730 &[copy],
731 )
732 };
733 }
734
735 fn copy_texture_to_buffer(
736 &mut self,
737 src: crate::TexturePiece,
738 dst: crate::BufferPiece,
739 bytes_per_row: u32,
740 size: crate::Extent,
741 ) {
742 let copy = make_buffer_image_copy(&dst, bytes_per_row, &src, &size);
743 unsafe {
744 self.device.core.cmd_copy_image_to_buffer(
745 self.raw,
746 src.texture.raw,
747 vk::ImageLayout::GENERAL,
748 dst.buffer.raw,
749 &[copy],
750 )
751 };
752 }
753}
754
755impl Drop for super::TransferCommandEncoder<'_> {
756 fn drop(&mut self) {
757 end_pass(self.device, self.raw);
758 }
759}
760
761#[hidden_trait::expose]
762impl crate::traits::AccelerationStructureEncoder
763 for super::AccelerationStructureCommandEncoder<'_>
764{
765 type AccelerationStructure = crate::AccelerationStructure;
766 type AccelerationStructureMesh = crate::AccelerationStructureMesh;
767 type BufferPiece = crate::BufferPiece;
768
769 fn build_bottom_level(
770 &mut self,
771 acceleration_structure: super::AccelerationStructure,
772 meshes: &[crate::AccelerationStructureMesh],
773 scratch_data: crate::BufferPiece,
774 ) {
775 let rt = self.device.ray_tracing.as_ref().unwrap();
776 let mut blas_input = self.device.map_acceleration_structure_meshes(meshes);
777 blas_input.build_info.dst_acceleration_structure = acceleration_structure.raw;
778 let scratch_address = self.device.get_device_address(&scratch_data);
779 assert_eq!(
780 scratch_address & rt.scratch_buffer_alignment,
781 0,
782 "BLAS scratch address {scratch_address} is not aligned"
783 );
784 blas_input.build_info.scratch_data = vk::DeviceOrHostAddressKHR {
785 device_address: scratch_address,
786 };
787
788 unsafe {
789 rt.acceleration_structure.cmd_build_acceleration_structures(
790 self.raw,
791 &[blas_input.build_info],
792 &[&blas_input.build_range_infos],
793 );
794 }
795 }
796
797 fn build_top_level(
798 &mut self,
799 acceleration_structure: super::AccelerationStructure,
800 _bottom_level: &[super::AccelerationStructure],
801 instance_count: u32,
802 instance_data: crate::BufferPiece,
803 scratch_data: crate::BufferPiece,
804 ) {
805 let build_range_info = vk::AccelerationStructureBuildRangeInfoKHR {
806 primitive_count: instance_count,
807 primitive_offset: 0,
808 first_vertex: 0,
809 transform_offset: 0,
810 };
811 let geometry = vk::AccelerationStructureGeometryKHR {
812 geometry_type: vk::GeometryTypeKHR::INSTANCES,
813 geometry: vk::AccelerationStructureGeometryDataKHR {
814 instances: vk::AccelerationStructureGeometryInstancesDataKHR {
815 data: vk::DeviceOrHostAddressConstKHR {
816 device_address: self.device.get_device_address(&instance_data),
817 },
818 ..Default::default()
819 },
820 },
821 ..Default::default()
822 };
823 let geometries = [geometry];
824 let build_info = vk::AccelerationStructureBuildGeometryInfoKHR {
825 ty: vk::AccelerationStructureTypeKHR::TOP_LEVEL,
826 mode: vk::BuildAccelerationStructureModeKHR::BUILD,
827 scratch_data: vk::DeviceOrHostAddressKHR {
828 device_address: self.device.get_device_address(&scratch_data),
829 },
830 dst_acceleration_structure: acceleration_structure.raw,
831 ..Default::default()
832 }
833 .geometries(&geometries);
834
835 let rt = self.device.ray_tracing.as_ref().unwrap();
836 unsafe {
837 rt.acceleration_structure.cmd_build_acceleration_structures(
838 self.raw,
839 &[build_info],
840 &[&[build_range_info]],
841 );
842 }
843 }
844}
845
846impl Drop for super::AccelerationStructureCommandEncoder<'_> {
847 fn drop(&mut self) {
848 end_pass(self.device, self.raw);
849 }
850}
851
852impl<'a> super::ComputeCommandEncoder<'a> {
853 pub fn barrier(&mut self) {
860 let barrier = vk::MemoryBarrier {
861 src_access_mask: vk::AccessFlags::SHADER_WRITE,
862 dst_access_mask: vk::AccessFlags::SHADER_READ
863 | vk::AccessFlags::SHADER_WRITE
864 | vk::AccessFlags::UNIFORM_READ,
865 ..Default::default()
866 };
867 unsafe {
868 self.device.core.cmd_pipeline_barrier(
869 self.cmd_buf.raw,
870 vk::PipelineStageFlags::COMPUTE_SHADER,
871 vk::PipelineStageFlags::COMPUTE_SHADER,
872 vk::DependencyFlags::empty(),
873 &[barrier],
874 &[],
875 &[],
876 );
877 }
878 }
879
880 pub fn with<'b, 'p>(
881 &'b mut self,
882 pipeline: &'p super::ComputePipeline,
883 ) -> super::PipelineEncoder<'b, 'p> {
884 let bind_point = vk::PipelineBindPoint::COMPUTE;
885 unsafe {
886 self.device
887 .core
888 .cmd_bind_pipeline(self.cmd_buf.raw, bind_point, pipeline.raw)
889 };
890 super::PipelineEncoder {
891 cmd_buf: self.cmd_buf,
892 layout: &pipeline.layout,
893 bind_point,
894 device: self.device,
895 update_data: self.update_data,
896 }
897 }
898}
899
900impl Drop for super::ComputeCommandEncoder<'_> {
901 fn drop(&mut self) {
902 end_pass(self.device, self.cmd_buf.raw);
903 }
904}
905
906impl<'a> super::RenderCommandEncoder<'a> {
907 pub fn with<'b, 'p>(
908 &'b mut self,
909 pipeline: &'p super::RenderPipeline,
910 ) -> super::PipelineEncoder<'b, 'p> {
911 let bind_point = vk::PipelineBindPoint::GRAPHICS;
912 unsafe {
913 self.device
914 .core
915 .cmd_bind_pipeline(self.cmd_buf.raw, bind_point, pipeline.raw)
916 };
917 super::PipelineEncoder {
918 cmd_buf: self.cmd_buf,
919 layout: &pipeline.layout,
920 bind_point,
921 device: self.device,
922 update_data: self.update_data,
923 }
924 }
925}
926
927impl Drop for super::RenderCommandEncoder<'_> {
928 fn drop(&mut self) {
929 unsafe {
930 self.device
931 .dynamic_rendering
932 .cmd_end_rendering(self.cmd_buf.raw)
933 };
934 end_pass(self.device, self.cmd_buf.raw);
935 }
936}
937
938impl crate::ScissorRect {
939 const fn to_vk(&self) -> vk::Rect2D {
940 vk::Rect2D {
941 offset: vk::Offset2D {
942 x: self.x,
943 y: self.y,
944 },
945 extent: vk::Extent2D {
946 width: self.w,
947 height: self.h,
948 },
949 }
950 }
951}
952
953impl crate::Viewport {
954 fn to_vk(&self) -> vk::Viewport {
955 vk::Viewport {
956 x: self.x,
957 y: self.y + self.h, width: self.w,
959 height: -self.h, min_depth: self.depth.start,
961 max_depth: self.depth.end,
962 }
963 }
964}
965
966#[hidden_trait::expose]
967impl crate::traits::RenderEncoder for super::RenderCommandEncoder<'_> {
968 fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
969 let vk_scissor = rect.to_vk();
970 unsafe {
971 self.device
972 .core
973 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
974 };
975 }
976
977 fn set_viewport(&mut self, viewport: &crate::Viewport) {
978 let vk_viewport = viewport.to_vk();
979 unsafe {
980 self.device
981 .core
982 .cmd_set_viewport(self.cmd_buf.raw, 0, &[vk_viewport])
983 };
984 }
985
986 fn set_stencil_reference(&mut self, reference: u32) {
987 unsafe {
988 self.device.core.cmd_set_stencil_reference(
989 self.cmd_buf.raw,
990 vk::StencilFaceFlags::FRONT_AND_BACK,
991 reference,
992 )
993 };
994 }
995}
996
997#[hidden_trait::expose]
998impl crate::traits::PipelineEncoder for super::PipelineEncoder<'_, '_> {
999 fn bind<D: crate::ShaderData>(&mut self, group: u32, data: &D) {
1000 let dsl = &self.layout.descriptor_set_layouts[group as usize];
1001 if !dsl.is_empty() {
1002 self.update_data.clear();
1003 self.update_data.resize(dsl.template_size as usize, 0);
1004 data.fill(super::PipelineContext {
1005 update_data: self.update_data.as_mut_slice(),
1006 template_offsets: &dsl.template_offsets,
1007 scratch: self.cmd_buf.scratch.as_mut(),
1008 inline_uniform_mask: dsl.inline_uniform_mask,
1009 });
1010 }
1011
1012 let vk_set = self
1013 .device
1014 .allocate_descriptor_set(&mut self.cmd_buf.descriptor_pool, dsl);
1015 unsafe {
1016 if !dsl.is_empty() {
1017 self.device.core.update_descriptor_set_with_template(
1018 vk_set,
1019 dsl.update_template,
1020 self.update_data.as_ptr() as *const _,
1021 );
1022 }
1023 self.device.core.cmd_bind_descriptor_sets(
1024 self.cmd_buf.raw,
1025 self.bind_point,
1026 self.layout.raw,
1027 group,
1028 &[vk_set],
1029 &[],
1030 );
1031 }
1032 }
1033}
1034
1035#[hidden_trait::expose]
1036impl crate::traits::ComputePipelineEncoder for super::PipelineEncoder<'_, '_> {
1037 type BufferPiece = crate::BufferPiece;
1038
1039 fn dispatch(&mut self, groups: [u32; 3]) {
1040 unsafe {
1041 self.device
1042 .core
1043 .cmd_dispatch(self.cmd_buf.raw, groups[0], groups[1], groups[2])
1044 };
1045 }
1046 fn dispatch_indirect(&mut self, indirect_buf: crate::BufferPiece) {
1047 unsafe {
1048 self.device.core.cmd_dispatch_indirect(
1049 self.cmd_buf.raw,
1050 indirect_buf.buffer.raw,
1051 indirect_buf.offset,
1052 )
1053 };
1054 }
1055}
1056
1057#[hidden_trait::expose]
1058impl crate::traits::RenderEncoder for super::PipelineEncoder<'_, '_> {
1059 fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
1060 let vk_scissor = rect.to_vk();
1061 unsafe {
1062 self.device
1063 .core
1064 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
1065 };
1066 }
1067
1068 fn set_viewport(&mut self, viewport: &crate::Viewport) {
1069 let vk_viewport = viewport.to_vk();
1070 unsafe {
1071 self.device
1072 .core
1073 .cmd_set_viewport(self.cmd_buf.raw, 0, &[vk_viewport])
1074 };
1075 }
1076
1077 fn set_stencil_reference(&mut self, reference: u32) {
1078 unsafe {
1079 self.device.core.cmd_set_stencil_reference(
1080 self.cmd_buf.raw,
1081 vk::StencilFaceFlags::FRONT_AND_BACK,
1082 reference,
1083 )
1084 };
1085 }
1086}
1087
1088#[hidden_trait::expose]
1089impl crate::traits::RenderPipelineEncoder for super::PipelineEncoder<'_, '_> {
1090 type BufferPiece = crate::BufferPiece;
1091
1092 fn bind_vertex(&mut self, index: u32, vertex_buf: crate::BufferPiece) {
1093 unsafe {
1094 self.device.core.cmd_bind_vertex_buffers(
1095 self.cmd_buf.raw,
1096 index,
1097 &[vertex_buf.buffer.raw],
1098 &[vertex_buf.offset],
1099 );
1100 }
1101 }
1102
1103 fn draw(
1104 &mut self,
1105 start_vertex: u32,
1106 vertex_count: u32,
1107 start_instance: u32,
1108 instance_count: u32,
1109 ) {
1110 unsafe {
1111 self.device.core.cmd_draw(
1112 self.cmd_buf.raw,
1113 vertex_count,
1114 instance_count,
1115 start_vertex,
1116 start_instance,
1117 );
1118 }
1119 }
1120
1121 fn draw_indexed(
1122 &mut self,
1123 index_buf: crate::BufferPiece,
1124 index_type: crate::IndexType,
1125 index_count: u32,
1126 base_vertex: i32,
1127 start_instance: u32,
1128 instance_count: u32,
1129 ) {
1130 let raw_index_type = super::map_index_type(index_type);
1131 unsafe {
1132 self.device.core.cmd_bind_index_buffer(
1133 self.cmd_buf.raw,
1134 index_buf.buffer.raw,
1135 index_buf.offset,
1136 raw_index_type,
1137 );
1138 self.device.core.cmd_draw_indexed(
1139 self.cmd_buf.raw,
1140 index_count,
1141 instance_count,
1142 0,
1143 base_vertex,
1144 start_instance,
1145 );
1146 }
1147 }
1148
1149 fn draw_indirect(&mut self, indirect_buf: crate::BufferPiece) {
1150 unsafe {
1151 self.device.core.cmd_draw_indirect(
1152 self.cmd_buf.raw,
1153 indirect_buf.buffer.raw,
1154 indirect_buf.offset,
1155 1,
1156 0,
1157 );
1158 }
1159 }
1160
1161 fn draw_indexed_indirect(
1162 &mut self,
1163 index_buf: crate::BufferPiece,
1164 index_type: crate::IndexType,
1165 indirect_buf: crate::BufferPiece,
1166 ) {
1167 let raw_index_type = super::map_index_type(index_type);
1168 unsafe {
1169 self.device.core.cmd_bind_index_buffer(
1170 self.cmd_buf.raw,
1171 index_buf.buffer.raw,
1172 index_buf.offset,
1173 raw_index_type,
1174 );
1175 self.device.core.cmd_draw_indexed_indirect(
1176 self.cmd_buf.raw,
1177 indirect_buf.buffer.raw,
1178 indirect_buf.offset,
1179 1,
1180 0,
1181 );
1182 }
1183 }
1184}