1use ash::vk;
2use std::{ops::Range, str, time::Duration};
3
4impl super::CrashHandler {
5 fn add_marker(&mut self, marker: &str) -> u32 {
6 if self.next_offset < self.raw_string.len() {
7 self.raw_string[self.next_offset] = b'|';
8 self.next_offset += 1;
9 }
10 let len = marker.as_bytes().len().min(self.raw_string.len());
11 if self.next_offset + len > self.raw_string.len() {
12 self.next_offset = 0;
13 }
14 let start = self.next_offset;
15 self.next_offset += len;
16 let end = self.next_offset;
17 self.raw_string[start..end].copy_from_slice(&marker.as_bytes()[..len]);
18 start as u32 | (end << 16) as u32
19 }
20
21 pub(super) fn extract(&self, id: u32) -> (&str, &str) {
22 let start = id as usize & 0xFFFF;
23 let end = (id >> 16) as usize;
24 let history = str::from_utf8(&self.raw_string[..start]).unwrap_or_default();
25 let marker = str::from_utf8(&self.raw_string[start..end]).unwrap();
26 (history, marker)
27 }
28}
29
30impl super::PipelineContext<'_> {
31 #[inline]
32 fn write<T>(&mut self, index: u32, value: T) {
33 let offset = self.template_offsets[index as usize];
34 unsafe {
35 std::ptr::write(
36 self.update_data.as_mut_ptr().offset(offset as isize) as *mut T,
37 value,
38 )
39 };
40 }
41
42 #[inline]
43 fn write_array<I: Iterator>(&mut self, index: u32, iter: I) {
44 let base_offset = self.template_offsets[index as usize];
45 let base_ptr =
46 unsafe { self.update_data.as_mut_ptr().offset(base_offset as isize) as *mut I::Item };
47 for (i, value) in iter.enumerate() {
48 unsafe { std::ptr::write(base_ptr.add(i), value) };
49 }
50 }
51}
52
53impl<T: bytemuck::Pod> crate::ShaderBindable for T {
54 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
55 ctx.write(index, *self);
56 }
57}
58impl crate::ShaderBindable for super::TextureView {
59 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
60 ctx.write(
61 index,
62 vk::DescriptorImageInfo {
63 sampler: vk::Sampler::null(),
64 image_view: self.raw,
65 image_layout: vk::ImageLayout::GENERAL,
66 },
67 );
68 }
69}
70impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::TextureArray<N> {
71 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
72 ctx.write_array(
73 index,
74 self.data.iter().map(|view| vk::DescriptorImageInfo {
75 sampler: vk::Sampler::null(),
76 image_view: view.raw,
77 image_layout: vk::ImageLayout::GENERAL,
78 }),
79 );
80 }
81}
82impl crate::ShaderBindable for super::Sampler {
83 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
84 ctx.write(
85 index,
86 vk::DescriptorImageInfo {
87 sampler: self.raw,
88 image_view: vk::ImageView::null(),
89 image_layout: vk::ImageLayout::UNDEFINED,
90 },
91 );
92 }
93}
94impl crate::ShaderBindable for crate::BufferPiece {
95 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
96 ctx.write(
97 index,
98 vk::DescriptorBufferInfo {
99 buffer: self.buffer.raw,
100 offset: self.offset,
101 range: vk::WHOLE_SIZE,
102 },
103 );
104 }
105}
106impl<'a, const N: crate::ResourceIndex> crate::ShaderBindable for &'a crate::BufferArray<N> {
107 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
108 ctx.write_array(
109 index,
110 self.data.iter().map(|piece| vk::DescriptorBufferInfo {
111 buffer: piece.buffer.raw,
112 offset: piece.offset,
113 range: vk::WHOLE_SIZE,
114 }),
115 );
116 }
117}
118impl crate::ShaderBindable for super::AccelerationStructure {
119 fn bind_to(&self, ctx: &mut super::PipelineContext, index: u32) {
120 ctx.write(index, self.raw);
121 }
122}
123
124impl crate::TexturePiece {
125 fn subresource_layers(&self) -> vk::ImageSubresourceLayers {
126 vk::ImageSubresourceLayers {
127 aspect_mask: super::map_aspects(self.texture.format.aspects()),
128 mip_level: self.mip_level,
129 base_array_layer: self.array_layer,
130 layer_count: 1,
131 }
132 }
133}
134
135fn map_origin(origin: &[u32; 3]) -> vk::Offset3D {
136 vk::Offset3D {
137 x: origin[0] as i32,
138 y: origin[1] as i32,
139 z: origin[2] as i32,
140 }
141}
142
143fn make_buffer_image_copy(
144 buffer: &crate::BufferPiece,
145 bytes_per_row: u32,
146 texture: &crate::TexturePiece,
147 size: &crate::Extent,
148) -> vk::BufferImageCopy {
149 let block_info = texture.texture.format.block_info();
150 vk::BufferImageCopy {
151 buffer_offset: buffer.offset,
152 buffer_row_length: block_info.dimensions.0 as u32
153 * (bytes_per_row / block_info.size as u32),
154 buffer_image_height: 0,
155 image_subresource: texture.subresource_layers(),
156 image_offset: map_origin(&texture.origin),
157 image_extent: super::map_extent_3d(size),
158 }
159}
160
161fn map_render_target(rt: &crate::RenderTarget) -> vk::RenderingAttachmentInfo<'static> {
162 let mut vk_info = vk::RenderingAttachmentInfo::default()
163 .image_view(rt.view.raw)
164 .image_layout(vk::ImageLayout::GENERAL)
165 .load_op(vk::AttachmentLoadOp::LOAD);
166
167 if let crate::InitOp::Clear(color) = rt.init_op {
168 let cv = if rt.view.aspects.contains(crate::TexelAspects::COLOR) {
169 vk::ClearValue {
170 color: match color {
171 crate::TextureColor::TransparentBlack => vk::ClearColorValue::default(),
172 crate::TextureColor::OpaqueBlack => vk::ClearColorValue {
173 float32: [0.0, 0.0, 0.0, 1.0],
174 },
175 crate::TextureColor::White => vk::ClearColorValue { float32: [1.0; 4] },
176 },
177 }
178 } else {
179 vk::ClearValue {
180 depth_stencil: match color {
181 crate::TextureColor::TransparentBlack => vk::ClearDepthStencilValue::default(),
182 crate::TextureColor::OpaqueBlack => vk::ClearDepthStencilValue {
183 depth: 1.0,
184 stencil: 0,
185 },
186 crate::TextureColor::White => vk::ClearDepthStencilValue {
187 depth: 1.0,
188 stencil: !0,
189 },
190 },
191 }
192 };
193
194 vk_info.load_op = vk::AttachmentLoadOp::CLEAR;
195 vk_info.clear_value = cv;
196 }
197
198 if let crate::FinishOp::ResolveTo(resolve_view) = rt.finish_op {
199 vk_info = vk_info
200 .resolve_image_view(resolve_view.raw)
201 .resolve_image_layout(vk::ImageLayout::GENERAL)
202 .resolve_mode(vk::ResolveModeFlags::AVERAGE);
203 }
204
205 vk_info.store_op = match rt.finish_op {
206 crate::FinishOp::Store => vk::AttachmentStoreOp::STORE,
207 crate::FinishOp::Discard => vk::AttachmentStoreOp::DONT_CARE,
208 crate::FinishOp::Ignore => vk::AttachmentStoreOp::DONT_CARE,
209 crate::FinishOp::ResolveTo(..) => {
210 vk::AttachmentStoreOp::DONT_CARE
218 }
219 };
220
221 vk_info
222}
223
224fn end_pass(device: &super::Device, cmd_buf: vk::CommandBuffer) {
225 if device.command_scope.is_some() {
226 unsafe {
227 device.debug_utils.cmd_end_debug_utils_label(cmd_buf);
228 }
229 }
230}
231
232impl super::CommandEncoder {
233 fn add_marker(&mut self, marker: &str) {
234 if let Some(ref mut ch) = self.crash_handler {
235 let id = ch.add_marker(marker);
236 unsafe {
237 self.device
238 .buffer_marker
239 .as_ref()
240 .unwrap()
241 .cmd_write_buffer_marker(
242 self.buffers[0].raw,
243 vk::PipelineStageFlags::ALL_COMMANDS,
244 ch.marker_buf.raw,
245 0,
246 id,
247 );
248 }
249 }
250 }
251
252 fn add_timestamp(&mut self, label: &str) {
253 if let Some(_) = self.device.timing {
254 let cmd_buf = self.buffers.first_mut().unwrap();
255 if cmd_buf.timed_pass_names.len() == crate::limits::PASS_COUNT {
256 log::warn!("Reached the maximum for `limits::PASS_COUNT`, skipping the timer");
257 return;
258 }
259 let index = cmd_buf.timed_pass_names.len() as u32;
260 unsafe {
261 self.device.core.cmd_write_timestamp(
262 cmd_buf.raw,
263 vk::PipelineStageFlags::TOP_OF_PIPE,
264 cmd_buf.query_pool,
265 index,
266 );
267 }
268 cmd_buf.timed_pass_names.push(label.to_string());
269 }
270 }
271
272 fn begin_pass(&mut self, label: &str) {
273 self.barrier();
274 self.add_marker(label);
275 self.add_timestamp(label);
276
277 if let Some(_) = self.device.command_scope {
278 self.temp_label.clear();
279 self.temp_label.extend_from_slice(label.as_bytes());
280 self.temp_label.push(0);
281 unsafe {
282 self.device.debug_utils.cmd_begin_debug_utils_label(
283 self.buffers[0].raw,
284 &vk::DebugUtilsLabelEXT {
285 p_label_name: self.temp_label.as_ptr() as *const _,
286 ..Default::default()
287 },
288 )
289 }
290 }
291 }
292
293 pub(super) fn finish(&mut self) -> vk::CommandBuffer {
294 self.barrier();
295 self.add_marker("finish");
296 let cmd_buf = self.buffers.first_mut().unwrap();
297 unsafe {
298 if self.device.timing.is_some() {
299 let index = cmd_buf.timed_pass_names.len() as u32;
300 self.device.core.cmd_write_timestamp(
301 cmd_buf.raw,
302 vk::PipelineStageFlags::TOP_OF_PIPE,
303 cmd_buf.query_pool,
304 index,
305 );
306 }
307 self.device.core.end_command_buffer(cmd_buf.raw).unwrap();
308 }
309 cmd_buf.raw
310 }
311
312 fn barrier(&mut self) {
313 let wa = &self.device.workarounds;
314 let barrier = vk::MemoryBarrier {
315 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
316 dst_access_mask: vk::AccessFlags::MEMORY_READ
317 | vk::AccessFlags::MEMORY_WRITE
318 | wa.extra_sync_dst_access,
319 ..Default::default()
320 };
321 unsafe {
322 self.device.core.cmd_pipeline_barrier(
323 self.buffers[0].raw,
324 vk::PipelineStageFlags::ALL_COMMANDS,
325 vk::PipelineStageFlags::ALL_COMMANDS,
326 vk::DependencyFlags::empty(),
327 &[barrier],
328 &[],
329 &[],
330 );
331 }
332 }
333
334 pub fn transfer(&mut self, label: &str) -> super::TransferCommandEncoder {
335 self.begin_pass(label);
336 super::TransferCommandEncoder {
337 raw: self.buffers[0].raw,
338 device: &self.device,
339 }
340 }
341
342 pub fn acceleration_structure(
343 &mut self,
344 label: &str,
345 ) -> super::AccelerationStructureCommandEncoder {
346 self.begin_pass(label);
347 super::AccelerationStructureCommandEncoder {
348 raw: self.buffers[0].raw,
349 device: &self.device,
350 }
351 }
352
353 pub fn compute(&mut self, label: &str) -> super::ComputeCommandEncoder {
354 self.begin_pass(label);
355 super::ComputeCommandEncoder {
356 cmd_buf: self.buffers.first_mut().unwrap(),
357 device: &self.device,
358 update_data: &mut self.update_data,
359 }
360 }
361
362 pub fn render(
363 &mut self,
364 label: &str,
365 targets: crate::RenderTargetSet,
366 ) -> super::RenderCommandEncoder {
367 self.begin_pass(label);
368
369 let mut target_size = [0u16; 2];
370 let mut color_attachments = Vec::with_capacity(targets.colors.len());
371 let depth_stencil_attachment;
372 for rt in targets.colors {
373 target_size = rt.view.target_size;
374 color_attachments.push(map_render_target(rt));
375 }
376
377 let mut rendering_info = vk::RenderingInfoKHR::default()
378 .layer_count(1)
379 .color_attachments(&color_attachments);
380
381 if let Some(rt) = targets.depth_stencil {
382 target_size = rt.view.target_size;
383 depth_stencil_attachment = map_render_target(&rt);
384 if rt.view.aspects.contains(crate::TexelAspects::DEPTH) {
385 rendering_info = rendering_info.depth_attachment(&depth_stencil_attachment);
386 }
387 if rt.view.aspects.contains(crate::TexelAspects::STENCIL) {
388 rendering_info = rendering_info.stencil_attachment(&depth_stencil_attachment);
389 }
390 }
391
392 let render_area = vk::Rect2D {
393 offset: Default::default(),
394 extent: vk::Extent2D {
395 width: target_size[0] as u32,
396 height: target_size[1] as u32,
397 },
398 };
399 let viewport = vk::Viewport {
400 x: 0.0,
401 y: target_size[1] as f32,
402 width: target_size[0] as f32,
403 height: -(target_size[1] as f32),
404 min_depth: 0.0,
405 max_depth: 1.0,
406 };
407 rendering_info.render_area = render_area;
408
409 let cmd_buf = self.buffers.first_mut().unwrap();
410 unsafe {
411 self.device
412 .core
413 .cmd_set_viewport(cmd_buf.raw, 0, &[viewport]);
414 self.device
415 .core
416 .cmd_set_scissor(cmd_buf.raw, 0, &[render_area]);
417 self.device
418 .dynamic_rendering
419 .cmd_begin_rendering(cmd_buf.raw, &rendering_info);
420 };
421
422 super::RenderCommandEncoder {
423 cmd_buf,
424 device: &self.device,
425 update_data: &mut self.update_data,
426 }
427 }
428
429 pub(super) fn check_gpu_crash<T>(&self, ret: Result<T, vk::Result>) -> Option<T> {
430 match ret {
431 Ok(value) => Some(value),
432 Err(vk::Result::ERROR_DEVICE_LOST) => match self.crash_handler {
433 Some(ref ch) => {
434 let last_id = unsafe { *(ch.marker_buf.data() as *mut u32) };
435 if last_id != 0 {
436 let (history, last_marker) = ch.extract(last_id);
437 log::error!("Last GPU executed marker is '{last_marker}'");
438 log::info!("Marker history: {}", history);
439 }
440 panic!("GPU has crashed in {}", ch.name);
441 }
442 None => {
443 panic!("GPU has crashed, and no debug information is available.");
444 }
445 },
446 Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => {
447 log::warn!("GPU frame is out of date");
448 None
449 }
450 Err(other) => panic!("GPU error {}", other),
451 }
452 }
453}
454
455#[hidden_trait::expose]
456impl crate::traits::CommandEncoder for super::CommandEncoder {
457 type Texture = super::Texture;
458 type Frame = super::Frame;
459
460 fn start(&mut self) {
461 self.buffers.rotate_left(1);
462 let cmd_buf = self.buffers.first_mut().unwrap();
463 self.device
464 .reset_descriptor_pool(&mut cmd_buf.descriptor_pool);
465
466 let vk_info = vk::CommandBufferBeginInfo {
467 flags: vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT,
468 ..Default::default()
469 };
470 unsafe {
471 self.device
472 .core
473 .begin_command_buffer(cmd_buf.raw, &vk_info)
474 .unwrap();
475 }
476
477 if let Some(ref timing) = self.device.timing {
478 self.timings.clear();
479 if !cmd_buf.timed_pass_names.is_empty() {
480 let mut timestamps = [0u64; super::QUERY_POOL_SIZE];
481 unsafe {
482 self.device
483 .core
484 .get_query_pool_results(
485 cmd_buf.query_pool,
486 0,
487 &mut timestamps[..cmd_buf.timed_pass_names.len() + 1],
488 vk::QueryResultFlags::TYPE_64,
489 )
490 .unwrap();
491 }
492 let mut prev = timestamps[0];
493 for (name, &ts) in cmd_buf
494 .timed_pass_names
495 .drain(..)
496 .zip(timestamps[1..].iter())
497 {
498 let diff = (ts - prev) as f32 * timing.period;
499 prev = ts;
500 *self.timings.entry(name).or_default() += Duration::from_nanos(diff as _);
501 }
502 }
503 unsafe {
504 self.device.core.cmd_reset_query_pool(
505 cmd_buf.raw,
506 cmd_buf.query_pool,
507 0,
508 super::QUERY_POOL_SIZE as u32,
509 );
510 }
511 }
512 }
513
514 fn init_texture(&mut self, texture: super::Texture) {
515 let barrier = vk::ImageMemoryBarrier {
516 old_layout: vk::ImageLayout::UNDEFINED,
517 new_layout: vk::ImageLayout::GENERAL,
518 image: texture.raw,
519 subresource_range: vk::ImageSubresourceRange {
520 aspect_mask: super::map_aspects(texture.format.aspects()),
521 base_mip_level: 0,
522 level_count: vk::REMAINING_MIP_LEVELS,
523 base_array_layer: 0,
524 layer_count: vk::REMAINING_ARRAY_LAYERS,
525 },
526 ..Default::default()
527 };
528 unsafe {
529 self.device.core.cmd_pipeline_barrier(
530 self.buffers[0].raw,
531 vk::PipelineStageFlags::TOP_OF_PIPE,
532 vk::PipelineStageFlags::ALL_COMMANDS,
533 vk::DependencyFlags::empty(),
534 &[],
535 &[],
536 &[barrier],
537 );
538 }
539 }
540
541 fn present(&mut self, frame: super::Frame) {
542 if frame.internal.acquire_semaphore == vk::Semaphore::null() {
543 return;
544 }
545
546 assert_eq!(self.present, None);
547 let wa = &self.device.workarounds;
548 self.present = Some(super::Presentation {
549 acquire_semaphore: frame.internal.acquire_semaphore,
550 swapchain: frame.swapchain.raw,
551 image_index: frame.image_index,
552 });
553
554 let barrier = vk::ImageMemoryBarrier {
555 old_layout: vk::ImageLayout::GENERAL,
556 new_layout: vk::ImageLayout::PRESENT_SRC_KHR,
557 image: frame.internal.image,
558 subresource_range: vk::ImageSubresourceRange {
559 aspect_mask: vk::ImageAspectFlags::COLOR,
560 base_mip_level: 0,
561 level_count: 1,
562 base_array_layer: 0,
563 layer_count: 1,
564 },
565 src_access_mask: vk::AccessFlags::MEMORY_WRITE | wa.extra_sync_src_access,
566 ..Default::default()
567 };
568 unsafe {
569 self.device.core.cmd_pipeline_barrier(
570 self.buffers[0].raw,
571 vk::PipelineStageFlags::ALL_COMMANDS,
572 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
573 vk::DependencyFlags::empty(),
574 &[],
575 &[],
576 &[barrier],
577 );
578 }
579 }
580
581 fn timings(&self) -> &crate::Timings {
582 &self.timings
583 }
584}
585
586#[hidden_trait::expose]
587impl crate::traits::TransferEncoder for super::TransferCommandEncoder<'_> {
588 type BufferPiece = crate::BufferPiece;
589 type TexturePiece = crate::TexturePiece;
590
591 fn fill_buffer(&mut self, dst: crate::BufferPiece, size: u64, value: u8) {
592 let value_u32 = (value as u32) * 0x1010101;
593 unsafe {
594 self.device
595 .core
596 .cmd_fill_buffer(self.raw, dst.buffer.raw, dst.offset, size, value_u32)
597 };
598 }
599
600 fn copy_buffer_to_buffer(
601 &mut self,
602 src: crate::BufferPiece,
603 dst: crate::BufferPiece,
604 size: u64,
605 ) {
606 let copy = vk::BufferCopy {
607 src_offset: src.offset,
608 dst_offset: dst.offset,
609 size,
610 };
611 unsafe {
612 self.device
613 .core
614 .cmd_copy_buffer(self.raw, src.buffer.raw, dst.buffer.raw, &[copy])
615 };
616 }
617
618 fn copy_texture_to_texture(
619 &mut self,
620 src: crate::TexturePiece,
621 dst: crate::TexturePiece,
622 size: crate::Extent,
623 ) {
624 let copy = vk::ImageCopy {
625 src_subresource: src.subresource_layers(),
626 src_offset: map_origin(&src.origin),
627 dst_subresource: dst.subresource_layers(),
628 dst_offset: map_origin(&dst.origin),
629 extent: super::map_extent_3d(&size),
630 };
631 unsafe {
632 self.device.core.cmd_copy_image(
633 self.raw,
634 src.texture.raw,
635 vk::ImageLayout::GENERAL,
636 dst.texture.raw,
637 vk::ImageLayout::GENERAL,
638 &[copy],
639 )
640 };
641 }
642
643 fn copy_buffer_to_texture(
644 &mut self,
645 src: crate::BufferPiece,
646 bytes_per_row: u32,
647 dst: crate::TexturePiece,
648 size: crate::Extent,
649 ) {
650 let copy = make_buffer_image_copy(&src, bytes_per_row, &dst, &size);
651 unsafe {
652 self.device.core.cmd_copy_buffer_to_image(
653 self.raw,
654 src.buffer.raw,
655 dst.texture.raw,
656 vk::ImageLayout::GENERAL,
657 &[copy],
658 )
659 };
660 }
661
662 fn copy_texture_to_buffer(
663 &mut self,
664 src: crate::TexturePiece,
665 dst: crate::BufferPiece,
666 bytes_per_row: u32,
667 size: crate::Extent,
668 ) {
669 let copy = make_buffer_image_copy(&dst, bytes_per_row, &src, &size);
670 unsafe {
671 self.device.core.cmd_copy_image_to_buffer(
672 self.raw,
673 src.texture.raw,
674 vk::ImageLayout::GENERAL,
675 dst.buffer.raw,
676 &[copy],
677 )
678 };
679 }
680}
681
682impl Drop for super::TransferCommandEncoder<'_> {
683 fn drop(&mut self) {
684 end_pass(self.device, self.raw);
685 }
686}
687
688#[hidden_trait::expose]
689impl crate::traits::AccelerationStructureEncoder
690 for super::AccelerationStructureCommandEncoder<'_>
691{
692 type AccelerationStructure = crate::AccelerationStructure;
693 type AccelerationStructureMesh = crate::AccelerationStructureMesh;
694 type BufferPiece = crate::BufferPiece;
695
696 fn build_bottom_level(
697 &mut self,
698 acceleration_structure: super::AccelerationStructure,
699 meshes: &[crate::AccelerationStructureMesh],
700 scratch_data: crate::BufferPiece,
701 ) {
702 let mut blas_input = self.device.map_acceleration_structure_meshes(meshes);
703 blas_input.build_info.dst_acceleration_structure = acceleration_structure.raw;
704 let scratch_address = self.device.get_device_address(&scratch_data);
705 assert!(
706 scratch_address & 0xFF == 0,
707 "BLAS scratch address {scratch_address} is not aligned"
708 );
709 blas_input.build_info.scratch_data = vk::DeviceOrHostAddressKHR {
710 device_address: scratch_address,
711 };
712
713 let rt = self.device.ray_tracing.as_ref().unwrap();
714 unsafe {
715 rt.acceleration_structure.cmd_build_acceleration_structures(
716 self.raw,
717 &[blas_input.build_info],
718 &[&blas_input.build_range_infos],
719 );
720 }
721 }
722
723 fn build_top_level(
724 &mut self,
725 acceleration_structure: super::AccelerationStructure,
726 _bottom_level: &[super::AccelerationStructure],
727 instance_count: u32,
728 instance_data: crate::BufferPiece,
729 scratch_data: crate::BufferPiece,
730 ) {
731 let build_range_info = vk::AccelerationStructureBuildRangeInfoKHR {
732 primitive_count: instance_count,
733 primitive_offset: 0,
734 first_vertex: 0,
735 transform_offset: 0,
736 };
737 let geometry = vk::AccelerationStructureGeometryKHR {
738 geometry_type: vk::GeometryTypeKHR::INSTANCES,
739 geometry: vk::AccelerationStructureGeometryDataKHR {
740 instances: vk::AccelerationStructureGeometryInstancesDataKHR {
741 data: vk::DeviceOrHostAddressConstKHR {
742 device_address: self.device.get_device_address(&instance_data),
743 },
744 ..Default::default()
745 },
746 },
747 ..Default::default()
748 };
749 let geometries = [geometry];
750 let build_info = vk::AccelerationStructureBuildGeometryInfoKHR {
751 ty: vk::AccelerationStructureTypeKHR::TOP_LEVEL,
752 mode: vk::BuildAccelerationStructureModeKHR::BUILD,
753 scratch_data: vk::DeviceOrHostAddressKHR {
754 device_address: self.device.get_device_address(&scratch_data),
755 },
756 dst_acceleration_structure: acceleration_structure.raw,
757 ..Default::default()
758 }
759 .geometries(&geometries);
760
761 let rt = self.device.ray_tracing.as_ref().unwrap();
762 unsafe {
763 rt.acceleration_structure.cmd_build_acceleration_structures(
764 self.raw,
765 &[build_info],
766 &[&[build_range_info]],
767 );
768 }
769 }
770}
771
772impl Drop for super::AccelerationStructureCommandEncoder<'_> {
773 fn drop(&mut self) {
774 end_pass(self.device, self.raw);
775 }
776}
777
778impl<'a> super::ComputeCommandEncoder<'a> {
779 pub fn with<'b, 'p>(
780 &'b mut self,
781 pipeline: &'p super::ComputePipeline,
782 ) -> super::PipelineEncoder<'b, 'p> {
783 super::PipelineEncoder {
784 cmd_buf: self.cmd_buf,
785 layout: &pipeline.layout,
786 bind_point: vk::PipelineBindPoint::COMPUTE,
787 device: self.device,
788 update_data: self.update_data,
789 }
790 .init(pipeline.raw)
791 }
792}
793
794impl Drop for super::ComputeCommandEncoder<'_> {
795 fn drop(&mut self) {
796 end_pass(self.device, self.cmd_buf.raw);
797 }
798}
799
800impl<'a> super::RenderCommandEncoder<'a> {
801 pub fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
802 let vk_scissor = vk::Rect2D {
803 offset: vk::Offset2D {
804 x: rect.x,
805 y: rect.y,
806 },
807 extent: vk::Extent2D {
808 width: rect.w,
809 height: rect.h,
810 },
811 };
812 unsafe {
813 self.device
814 .core
815 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
816 };
817 }
818
819 pub fn set_viewport(&mut self, viewport: &crate::Viewport, depth_range: Range<f32>) {
820 let vk_viewports = [vk::Viewport {
821 x: viewport.x,
822 y: viewport.y,
823 width: viewport.w,
824 height: -viewport.h, min_depth: depth_range.start,
826 max_depth: depth_range.end,
827 }];
828 unsafe {
829 self.device
830 .core
831 .cmd_set_viewport(self.cmd_buf.raw, 0, &vk_viewports)
832 };
833 }
834
835 pub fn with<'b, 'p>(
836 &'b mut self,
837 pipeline: &'p super::RenderPipeline,
838 ) -> super::PipelineEncoder<'b, 'p> {
839 super::PipelineEncoder {
840 cmd_buf: self.cmd_buf,
841 layout: &pipeline.layout,
842 bind_point: vk::PipelineBindPoint::GRAPHICS,
843 device: self.device,
844 update_data: self.update_data,
845 }
846 .init(pipeline.raw)
847 }
848}
849
850impl Drop for super::RenderCommandEncoder<'_> {
851 fn drop(&mut self) {
852 unsafe {
853 self.device
854 .dynamic_rendering
855 .cmd_end_rendering(self.cmd_buf.raw)
856 };
857 end_pass(self.device, self.cmd_buf.raw);
858 }
859}
860
861impl super::PipelineEncoder<'_, '_> {
862 fn init(self, raw_pipeline: vk::Pipeline) -> Self {
863 unsafe {
864 self.device
865 .core
866 .cmd_bind_pipeline(self.cmd_buf.raw, self.bind_point, raw_pipeline)
867 };
868 self
869 }
870}
871
872#[hidden_trait::expose]
873impl crate::traits::PipelineEncoder for super::PipelineEncoder<'_, '_> {
874 fn bind<D: crate::ShaderData>(&mut self, group: u32, data: &D) {
875 let dsl = &self.layout.descriptor_set_layouts[group as usize];
876 if !dsl.is_empty() {
877 self.update_data.clear();
878 self.update_data.resize(dsl.template_size as usize, 0);
879 data.fill(super::PipelineContext {
880 update_data: self.update_data.as_mut_slice(),
881 template_offsets: &dsl.template_offsets,
882 });
883 }
884
885 let vk_set = self
886 .device
887 .allocate_descriptor_set(&mut self.cmd_buf.descriptor_pool, dsl);
888 unsafe {
889 if !dsl.is_empty() {
890 self.device.core.update_descriptor_set_with_template(
891 vk_set,
892 dsl.update_template,
893 self.update_data.as_ptr() as *const _,
894 );
895 }
896 self.device.core.cmd_bind_descriptor_sets(
897 self.cmd_buf.raw,
898 self.bind_point,
899 self.layout.raw,
900 group,
901 &[vk_set],
902 &[],
903 );
904 }
905 }
906}
907
908#[hidden_trait::expose]
909impl crate::traits::ComputePipelineEncoder for super::PipelineEncoder<'_, '_> {
910 fn dispatch(&mut self, groups: [u32; 3]) {
911 unsafe {
912 self.device
913 .core
914 .cmd_dispatch(self.cmd_buf.raw, groups[0], groups[1], groups[2])
915 };
916 }
917}
918
919#[hidden_trait::expose]
920impl crate::traits::RenderPipelineEncoder for super::PipelineEncoder<'_, '_> {
921 type BufferPiece = crate::BufferPiece;
922
923 fn set_scissor_rect(&mut self, rect: &crate::ScissorRect) {
924 let vk_scissor = vk::Rect2D {
925 offset: vk::Offset2D {
926 x: rect.x,
927 y: rect.y,
928 },
929 extent: vk::Extent2D {
930 width: rect.w,
931 height: rect.h,
932 },
933 };
934 unsafe {
935 self.device
936 .core
937 .cmd_set_scissor(self.cmd_buf.raw, 0, &[vk_scissor])
938 };
939 }
940
941 fn set_viewport(&mut self, viewport: &crate::Viewport, depth_range: Range<f32>) {
942 let vk_viewports = [vk::Viewport {
943 x: viewport.x,
944 y: viewport.y,
945 width: viewport.w,
946 height: -viewport.h, min_depth: depth_range.start,
948 max_depth: depth_range.end,
949 }];
950 unsafe {
951 self.device
952 .core
953 .cmd_set_viewport(self.cmd_buf.raw, 0, &vk_viewports)
954 };
955 }
956
957 fn bind_vertex(&mut self, index: u32, vertex_buf: crate::BufferPiece) {
958 unsafe {
959 self.device.core.cmd_bind_vertex_buffers(
960 self.cmd_buf.raw,
961 index,
962 &[vertex_buf.buffer.raw],
963 &[vertex_buf.offset],
964 );
965 }
966 }
967
968 fn draw(
969 &mut self,
970 start_vertex: u32,
971 vertex_count: u32,
972 start_instance: u32,
973 instance_count: u32,
974 ) {
975 unsafe {
976 self.device.core.cmd_draw(
977 self.cmd_buf.raw,
978 vertex_count,
979 instance_count,
980 start_vertex,
981 start_instance,
982 );
983 }
984 }
985
986 fn draw_indexed(
987 &mut self,
988 index_buf: crate::BufferPiece,
989 index_type: crate::IndexType,
990 index_count: u32,
991 base_vertex: i32,
992 start_instance: u32,
993 instance_count: u32,
994 ) {
995 let raw_index_type = super::map_index_type(index_type);
996 unsafe {
997 self.device.core.cmd_bind_index_buffer(
998 self.cmd_buf.raw,
999 index_buf.buffer.raw,
1000 index_buf.offset,
1001 raw_index_type,
1002 );
1003 self.device.core.cmd_draw_indexed(
1004 self.cmd_buf.raw,
1005 index_count,
1006 instance_count,
1007 0,
1008 base_vertex,
1009 start_instance,
1010 );
1011 }
1012 }
1013
1014 fn draw_indirect(&mut self, indirect_buf: crate::BufferPiece) {
1015 unsafe {
1016 self.device.core.cmd_draw_indirect(
1017 self.cmd_buf.raw,
1018 indirect_buf.buffer.raw,
1019 indirect_buf.offset,
1020 1,
1021 0,
1022 );
1023 }
1024 }
1025
1026 fn draw_indexed_indirect(
1027 &mut self,
1028 index_buf: crate::BufferPiece,
1029 index_type: crate::IndexType,
1030 indirect_buf: crate::BufferPiece,
1031 ) {
1032 let raw_index_type = super::map_index_type(index_type);
1033 unsafe {
1034 self.device.core.cmd_bind_index_buffer(
1035 self.cmd_buf.raw,
1036 index_buf.buffer.raw,
1037 index_buf.offset,
1038 raw_index_type,
1039 );
1040 self.device.core.cmd_draw_indexed_indirect(
1041 self.cmd_buf.raw,
1042 indirect_buf.buffer.raw,
1043 indirect_buf.offset,
1044 1,
1045 0,
1046 );
1047 }
1048 }
1049}