1use super::{Device, DeviceOwned, QueueCreateFlags};
11use crate::{
12 buffer::BufferState,
13 command_buffer::{
14 CommandBufferResourcesUsage, CommandBufferState, CommandBufferUsage, SemaphoreSubmitInfo,
15 SubmitInfo,
16 },
17 image::ImageState,
18 instance::{debug::DebugUtilsLabel, InstanceOwnedDebugWrapper},
19 macros::vulkan_bitflags,
20 memory::{
21 BindSparseInfo, SparseBufferMemoryBind, SparseImageMemoryBind, SparseImageOpaqueMemoryBind,
22 },
23 swapchain::{PresentInfo, SwapchainPresentInfo},
24 sync::{
25 fence::{Fence, FenceState},
26 future::{AccessCheckError, GpuFuture},
27 semaphore::SemaphoreState,
28 },
29 Requires, RequiresAllOf, RequiresOneOf, Validated, ValidationError, Version, VulkanError,
30 VulkanObject,
31};
32use ahash::HashMap;
33use parking_lot::{Mutex, MutexGuard};
34use smallvec::{smallvec, SmallVec};
35use std::{
36 collections::VecDeque,
37 ffi::CString,
38 hash::{Hash, Hasher},
39 mem::{take, MaybeUninit},
40 ptr,
41 sync::{atomic::Ordering, Arc},
42};
43
44#[derive(Debug)]
47pub struct Queue {
48 handle: ash::vk::Queue,
49 device: InstanceOwnedDebugWrapper<Arc<Device>>,
50
51 flags: QueueCreateFlags,
52 queue_family_index: u32,
53 id: u32, state: Mutex<QueueState>,
56}
57
58impl Queue {
59 pub(super) unsafe fn new(
60 device: Arc<Device>,
61 flags: QueueCreateFlags,
62 queue_family_index: u32,
63 id: u32,
64 ) -> Arc<Self> {
65 let handle = {
66 let fns = device.fns();
67 let mut output = MaybeUninit::uninit();
68 (fns.v1_0.get_device_queue)(
69 device.handle(),
70 queue_family_index,
71 id,
72 output.as_mut_ptr(),
73 );
74 output.assume_init()
75 };
76
77 Self::from_handle(device, handle, flags, queue_family_index, id)
78 }
79
80 #[inline]
82 pub(super) unsafe fn from_handle(
83 device: Arc<Device>,
84 handle: ash::vk::Queue,
85 flags: QueueCreateFlags,
86 queue_family_index: u32,
87 id: u32,
88 ) -> Arc<Self> {
89 Arc::new(Queue {
90 handle,
91 device: InstanceOwnedDebugWrapper(device),
92 flags,
93 queue_family_index,
94 id,
95 state: Mutex::new(Default::default()),
96 })
97 }
98
99 #[inline]
101 pub fn device(&self) -> &Arc<Device> {
102 &self.device
103 }
104
105 #[inline]
107 pub fn flags(&self) -> QueueCreateFlags {
108 self.flags
109 }
110
111 #[inline]
113 pub fn queue_family_index(&self) -> u32 {
114 self.queue_family_index
115 }
116
117 #[inline]
119 pub fn id_within_family(&self) -> u32 {
120 self.id
121 }
122
123 #[inline]
126 pub fn with<'a, R>(self: &'a Arc<Self>, func: impl FnOnce(QueueGuard<'a>) -> R) -> R {
127 func(QueueGuard {
128 queue: self,
129 state: self.state.lock(),
130 })
131 }
132}
133
134impl Drop for Queue {
135 #[inline]
136 fn drop(&mut self) {
137 let state = self.state.get_mut();
138 let _ = state.wait_idle(&self.device, self.handle);
139 }
140}
141
142unsafe impl VulkanObject for Queue {
143 type Handle = ash::vk::Queue;
144
145 #[inline]
146 fn handle(&self) -> Self::Handle {
147 self.handle
148 }
149}
150
151unsafe impl DeviceOwned for Queue {
152 #[inline]
153 fn device(&self) -> &Arc<Device> {
154 &self.device
155 }
156}
157
158impl PartialEq for Queue {
159 #[inline]
160 fn eq(&self, other: &Self) -> bool {
161 self.id == other.id
162 && self.queue_family_index == other.queue_family_index
163 && self.device == other.device
164 }
165}
166
167impl Eq for Queue {}
168
169impl Hash for Queue {
170 fn hash<H: Hasher>(&self, state: &mut H) {
171 self.id.hash(state);
172 self.queue_family_index.hash(state);
173 self.device.hash(state);
174 }
175}
176
177pub struct QueueGuard<'a> {
178 queue: &'a Arc<Queue>,
179 state: MutexGuard<'a, QueueState>,
180}
181
182impl<'a> QueueGuard<'a> {
183 pub(crate) unsafe fn fence_signaled(&mut self, fence: &Fence) {
184 self.state.fence_signaled(fence)
185 }
186
187 #[inline]
196 pub fn wait_idle(&mut self) -> Result<(), VulkanError> {
197 self.state.wait_idle(&self.queue.device, self.queue.handle)
198 }
199
200 #[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
201 pub(crate) unsafe fn bind_sparse_unchecked(
202 &mut self,
203 bind_infos: impl IntoIterator<Item = BindSparseInfo>,
204 fence: Option<Arc<Fence>>,
205 ) -> Result<(), VulkanError> {
206 let bind_infos: SmallVec<[_; 4]> = bind_infos.into_iter().collect();
207 let mut states = States::from_bind_infos(&bind_infos);
208
209 self.bind_sparse_unchecked_locked(
210 &bind_infos,
211 fence.as_ref().map(|fence| {
212 let state = fence.state();
213 (fence, state)
214 }),
215 &mut states,
216 )
217 }
218
219 unsafe fn bind_sparse_unchecked_locked(
220 &mut self,
221 bind_infos: &SmallVec<[BindSparseInfo; 4]>,
222 fence: Option<(&Arc<Fence>, MutexGuard<'_, FenceState>)>,
223 states: &mut States<'_>,
224 ) -> Result<(), VulkanError> {
225 struct PerBindSparseInfo {
226 wait_semaphores_vk: SmallVec<[ash::vk::Semaphore; 4]>,
227 buffer_bind_infos_vk: SmallVec<[ash::vk::SparseBufferMemoryBindInfo; 4]>,
228 buffer_binds_vk: SmallVec<[SmallVec<[ash::vk::SparseMemoryBind; 4]>; 4]>,
229 image_opaque_bind_infos_vk: SmallVec<[ash::vk::SparseImageOpaqueMemoryBindInfo; 4]>,
230 image_opaque_binds_vk: SmallVec<[SmallVec<[ash::vk::SparseMemoryBind; 4]>; 4]>,
231 image_bind_infos_vk: SmallVec<[ash::vk::SparseImageMemoryBindInfo; 4]>,
232 image_binds_vk: SmallVec<[SmallVec<[ash::vk::SparseImageMemoryBind; 4]>; 4]>,
233 signal_semaphores_vk: SmallVec<[ash::vk::Semaphore; 4]>,
234 }
235
236 let (mut bind_infos_vk, mut per_bind_vk): (SmallVec<[_; 4]>, SmallVec<[_; 4]>) = bind_infos
237 .iter()
238 .map(|bind_info| {
239 let &BindSparseInfo {
240 ref wait_semaphores,
241 ref buffer_binds,
242 ref image_opaque_binds,
243 ref image_binds,
244 ref signal_semaphores,
245 _ne: _,
246 } = bind_info;
247
248 let wait_semaphores_vk: SmallVec<[_; 4]> = wait_semaphores
249 .iter()
250 .map(|semaphore| semaphore.handle())
251 .collect();
252
253 let (buffer_bind_infos_vk, buffer_binds_vk): (SmallVec<[_; 4]>, SmallVec<[_; 4]>) =
254 buffer_binds
255 .iter()
256 .map(|(buffer, memory_binds)| {
257 (
258 ash::vk::SparseBufferMemoryBindInfo {
259 buffer: buffer.buffer().handle(),
260 bind_count: 0,
261 p_binds: ptr::null(),
262 },
263 memory_binds
264 .iter()
265 .map(|memory_bind| {
266 let &SparseBufferMemoryBind {
267 offset,
268 size,
269 ref memory,
270 } = memory_bind;
271
272 let (memory, memory_offset) = memory.as_ref().map_or_else(
273 Default::default,
274 |(memory, memory_offset)| {
275 (memory.handle(), *memory_offset)
276 },
277 );
278
279 ash::vk::SparseMemoryBind {
280 resource_offset: offset,
281 size,
282 memory,
283 memory_offset,
284 flags: ash::vk::SparseMemoryBindFlags::empty(),
285 }
286 })
287 .collect::<SmallVec<[_; 4]>>(),
288 )
289 })
290 .unzip();
291
292 let (image_opaque_bind_infos_vk, image_opaque_binds_vk): (
293 SmallVec<[_; 4]>,
294 SmallVec<[_; 4]>,
295 ) = image_opaque_binds
296 .iter()
297 .map(|(image, memory_binds)| {
298 (
299 ash::vk::SparseImageOpaqueMemoryBindInfo {
300 image: image.handle(),
301 bind_count: 0,
302 p_binds: ptr::null(),
303 },
304 memory_binds
305 .iter()
306 .map(|memory_bind| {
307 let &SparseImageOpaqueMemoryBind {
308 offset,
309 size,
310 ref memory,
311 metadata,
312 } = memory_bind;
313
314 let (memory, memory_offset) = memory.as_ref().map_or_else(
315 Default::default,
316 |(memory, memory_offset)| (memory.handle(), *memory_offset),
317 );
318
319 ash::vk::SparseMemoryBind {
320 resource_offset: offset,
321 size,
322 memory,
323 memory_offset,
324 flags: if metadata {
325 ash::vk::SparseMemoryBindFlags::METADATA
326 } else {
327 ash::vk::SparseMemoryBindFlags::empty()
328 },
329 }
330 })
331 .collect::<SmallVec<[_; 4]>>(),
332 )
333 })
334 .unzip();
335
336 let (image_bind_infos_vk, image_binds_vk): (SmallVec<[_; 4]>, SmallVec<[_; 4]>) =
337 image_binds
338 .iter()
339 .map(|(image, memory_binds)| {
340 (
341 ash::vk::SparseImageMemoryBindInfo {
342 image: image.handle(),
343 bind_count: 0,
344 p_binds: ptr::null(),
345 },
346 memory_binds
347 .iter()
348 .map(|memory_bind| {
349 let &SparseImageMemoryBind {
350 aspects,
351 mip_level,
352 array_layer,
353 offset,
354 extent,
355 ref memory,
356 } = memory_bind;
357
358 let (memory, memory_offset) = memory.as_ref().map_or_else(
359 Default::default,
360 |(memory, memory_offset)| {
361 (memory.handle(), *memory_offset)
362 },
363 );
364
365 ash::vk::SparseImageMemoryBind {
366 subresource: ash::vk::ImageSubresource {
367 aspect_mask: aspects.into(),
368 mip_level,
369 array_layer,
370 },
371 offset: ash::vk::Offset3D {
372 x: offset[0] as i32,
373 y: offset[1] as i32,
374 z: offset[2] as i32,
375 },
376 extent: ash::vk::Extent3D {
377 width: extent[0],
378 height: extent[1],
379 depth: extent[2],
380 },
381 memory,
382 memory_offset,
383 flags: ash::vk::SparseMemoryBindFlags::empty(),
384 }
385 })
386 .collect::<SmallVec<[_; 4]>>(),
387 )
388 })
389 .unzip();
390
391 let signal_semaphores_vk: SmallVec<[_; 4]> = signal_semaphores
392 .iter()
393 .map(|semaphore| semaphore.handle())
394 .collect();
395
396 (
397 ash::vk::BindSparseInfo::default(),
398 PerBindSparseInfo {
399 wait_semaphores_vk,
400 buffer_bind_infos_vk,
401 buffer_binds_vk,
402 image_opaque_bind_infos_vk,
403 image_opaque_binds_vk,
404 image_bind_infos_vk,
405 image_binds_vk,
406 signal_semaphores_vk,
407 },
408 )
409 })
410 .unzip();
411
412 for (
413 bind_info_vk,
414 PerBindSparseInfo {
415 wait_semaphores_vk,
416 buffer_bind_infos_vk,
417 buffer_binds_vk,
418 image_opaque_bind_infos_vk,
419 image_opaque_binds_vk,
420 image_bind_infos_vk,
421 image_binds_vk,
422 signal_semaphores_vk,
423 },
424 ) in (bind_infos_vk.iter_mut()).zip(per_bind_vk.iter_mut())
425 {
426 for (buffer_bind_infos_vk, buffer_binds_vk) in
427 (buffer_bind_infos_vk.iter_mut()).zip(buffer_binds_vk.iter())
428 {
429 *buffer_bind_infos_vk = ash::vk::SparseBufferMemoryBindInfo {
430 bind_count: buffer_binds_vk.len() as u32,
431 p_binds: buffer_binds_vk.as_ptr(),
432 ..*buffer_bind_infos_vk
433 };
434 }
435
436 for (image_opaque_bind_infos_vk, image_opaque_binds_vk) in
437 (image_opaque_bind_infos_vk.iter_mut()).zip(image_opaque_binds_vk.iter())
438 {
439 *image_opaque_bind_infos_vk = ash::vk::SparseImageOpaqueMemoryBindInfo {
440 bind_count: image_opaque_binds_vk.len() as u32,
441 p_binds: image_opaque_binds_vk.as_ptr(),
442 ..*image_opaque_bind_infos_vk
443 };
444 }
445
446 for (image_bind_infos_vk, image_binds_vk) in
447 (image_bind_infos_vk.iter_mut()).zip(image_binds_vk.iter())
448 {
449 *image_bind_infos_vk = ash::vk::SparseImageMemoryBindInfo {
450 bind_count: image_binds_vk.len() as u32,
451 p_binds: image_binds_vk.as_ptr(),
452 ..*image_bind_infos_vk
453 };
454 }
455
456 *bind_info_vk = ash::vk::BindSparseInfo {
457 wait_semaphore_count: wait_semaphores_vk.len() as u32,
458 p_wait_semaphores: wait_semaphores_vk.as_ptr(),
459 buffer_bind_count: buffer_bind_infos_vk.len() as u32,
460 p_buffer_binds: buffer_bind_infos_vk.as_ptr(),
461 image_opaque_bind_count: image_opaque_bind_infos_vk.len() as u32,
462 p_image_opaque_binds: image_opaque_bind_infos_vk.as_ptr(),
463 image_bind_count: image_bind_infos_vk.len() as u32,
464 p_image_binds: image_bind_infos_vk.as_ptr(),
465 signal_semaphore_count: signal_semaphores_vk.len() as u32,
466 p_signal_semaphores: signal_semaphores_vk.as_ptr(),
467 ..*bind_info_vk
468 }
469 }
470
471 let fns = self.queue.device.fns();
472 (fns.v1_0.queue_bind_sparse)(
473 self.queue.handle,
474 bind_infos_vk.len() as u32,
475 bind_infos_vk.as_ptr(),
476 fence
477 .as_ref()
478 .map_or_else(Default::default, |(fence, _)| fence.handle()),
479 )
480 .result()
481 .map_err(VulkanError::from)?;
482
483 for bind_info in bind_infos {
484 let BindSparseInfo {
485 wait_semaphores,
486 buffer_binds: _,
487 image_opaque_binds: _,
488 image_binds: _,
489 signal_semaphores,
490 _ne: _,
491 } = bind_info;
492
493 for semaphore in wait_semaphores {
494 let state = states.semaphores.get_mut(&semaphore.handle()).unwrap();
495 state.add_queue_wait(self.queue);
496 }
497
498 for semaphore in signal_semaphores {
499 let state = states.semaphores.get_mut(&semaphore.handle()).unwrap();
500 state.add_queue_wait(self.queue);
501 }
502 }
503
504 let fence = fence.map(|(fence, mut state)| {
505 state.add_queue_signal(self.queue);
506 fence.clone()
507 });
508
509 self.state
510 .operations
511 .push_back((bind_infos.clone().into(), fence));
512
513 Ok(())
514 }
515
516 #[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
517 #[inline]
518 pub unsafe fn present_unchecked(
519 &mut self,
520 present_info: PresentInfo,
521 ) -> Result<impl ExactSizeIterator<Item = Result<bool, VulkanError>>, VulkanError> {
522 let mut states = States::from_present_info(&present_info);
523 self.present_unchecked_locked(&present_info, &mut states)
524 }
525
526 unsafe fn present_unchecked_locked(
527 &mut self,
528 present_info: &PresentInfo,
529 states: &mut States<'_>,
530 ) -> Result<impl ExactSizeIterator<Item = Result<bool, VulkanError>>, VulkanError> {
531 let PresentInfo {
532 wait_semaphores,
533 swapchain_infos,
534 _ne: _,
535 } = present_info;
536
537 let wait_semaphores_vk: SmallVec<[_; 4]> = wait_semaphores
538 .iter()
539 .map(|semaphore| semaphore.handle())
540 .collect();
541
542 let mut swapchains_vk: SmallVec<[_; 4]> = SmallVec::with_capacity(swapchain_infos.len());
543 let mut image_indices_vk: SmallVec<[_; 4]> = SmallVec::with_capacity(swapchain_infos.len());
544 let mut present_ids_vk: SmallVec<[_; 4]> = SmallVec::with_capacity(swapchain_infos.len());
545 let mut present_modes_vk: SmallVec<[_; 4]> = SmallVec::with_capacity(swapchain_infos.len());
546 let mut present_regions_vk: SmallVec<[_; 4]> =
547 SmallVec::with_capacity(swapchain_infos.len());
548 let mut rectangles_vk: SmallVec<[_; 4]> = SmallVec::with_capacity(swapchain_infos.len());
549
550 let mut has_present_ids = false;
551 let mut has_present_modes = false;
552 let mut has_present_regions = false;
553
554 for swapchain_info in swapchain_infos {
555 let &SwapchainPresentInfo {
556 ref swapchain,
557 image_index,
558 present_id,
559 present_mode,
560 ref present_regions,
561 _ne: _,
562 } = swapchain_info;
563
564 swapchains_vk.push(swapchain.handle());
565 image_indices_vk.push(image_index);
566 present_ids_vk.push(present_id.map_or(0, u64::from));
567 present_modes_vk.push(present_mode.map_or_else(Default::default, Into::into));
568 present_regions_vk.push(ash::vk::PresentRegionKHR::default());
569 rectangles_vk.push(
570 present_regions
571 .iter()
572 .map(ash::vk::RectLayerKHR::from)
573 .collect::<SmallVec<[_; 4]>>(),
574 );
575
576 if present_id.is_some() {
577 has_present_ids = true;
578 }
579
580 if present_mode.is_some() {
581 has_present_modes = true;
582 }
583
584 if !present_regions.is_empty() {
585 has_present_regions = true;
586 }
587 }
588
589 let mut results = vec![ash::vk::Result::SUCCESS; swapchain_infos.len()];
590 let mut info_vk = ash::vk::PresentInfoKHR {
591 wait_semaphore_count: wait_semaphores_vk.len() as u32,
592 p_wait_semaphores: wait_semaphores_vk.as_ptr(),
593 swapchain_count: swapchains_vk.len() as u32,
594 p_swapchains: swapchains_vk.as_ptr(),
595 p_image_indices: image_indices_vk.as_ptr(),
596 p_results: results.as_mut_ptr(),
597 ..Default::default()
598 };
599 let mut present_id_info_vk = None;
600 let mut present_mode_info_vk = None;
601 let mut present_region_info_vk = None;
602
603 if has_present_ids {
604 let next = present_id_info_vk.insert(ash::vk::PresentIdKHR {
605 swapchain_count: present_ids_vk.len() as u32,
606 p_present_ids: present_ids_vk.as_ptr(),
607 ..Default::default()
608 });
609
610 next.p_next = info_vk.p_next;
611 info_vk.p_next = next as *const _ as *const _;
612 }
613
614 if has_present_modes {
615 let next = present_mode_info_vk.insert(ash::vk::SwapchainPresentModeInfoEXT {
616 swapchain_count: present_modes_vk.len() as u32,
617 p_present_modes: present_modes_vk.as_ptr(),
618 ..Default::default()
619 });
620
621 next.p_next = info_vk.p_next as _;
622 info_vk.p_next = next as *const _ as *const _;
623 }
624
625 if has_present_regions {
626 for (present_regions_vk, rectangles_vk) in
627 (present_regions_vk.iter_mut()).zip(rectangles_vk.iter())
628 {
629 *present_regions_vk = ash::vk::PresentRegionKHR {
630 rectangle_count: rectangles_vk.len() as u32,
631 p_rectangles: rectangles_vk.as_ptr(),
632 };
633 }
634
635 let next = present_region_info_vk.insert(ash::vk::PresentRegionsKHR {
636 swapchain_count: present_regions_vk.len() as u32,
637 p_regions: present_regions_vk.as_ptr(),
638 ..Default::default()
639 });
640
641 next.p_next = info_vk.p_next;
642 info_vk.p_next = next as *const _ as *const _;
643 }
644
645 let fns = self.queue.device().fns();
646 let result = (fns.khr_swapchain.queue_present_khr)(self.queue.handle, &info_vk);
647
648 if !matches!(
653 result,
654 ash::vk::Result::SUCCESS
655 | ash::vk::Result::SUBOPTIMAL_KHR
656 | ash::vk::Result::ERROR_OUT_OF_DATE_KHR
657 | ash::vk::Result::ERROR_SURFACE_LOST_KHR
658 | ash::vk::Result::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
659 ) {
660 return Err(VulkanError::from(result));
661 }
662
663 for semaphore in wait_semaphores {
664 let state = states.semaphores.get_mut(&semaphore.handle()).unwrap();
665 state.add_queue_wait(self.queue);
666 }
667
668 self.state
669 .operations
670 .push_back((present_info.clone().into(), None));
671
672 for (&result, swapchain_info) in results.iter().zip(&present_info.swapchain_infos) {
675 if result == ash::vk::Result::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT {
676 swapchain_info
677 .swapchain
678 .full_screen_exclusive_held()
679 .store(false, Ordering::SeqCst);
680 }
681 }
682
683 Ok(results.into_iter().map(|result| match result {
684 ash::vk::Result::SUCCESS => Ok(false),
685 ash::vk::Result::SUBOPTIMAL_KHR => Ok(true),
686 err => Err(VulkanError::from(err)),
687 }))
688 }
689
690 pub(crate) unsafe fn submit_with_future(
692 &mut self,
693 submit_info: SubmitInfo,
694 fence: Option<Arc<Fence>>,
695 future: &dyn GpuFuture,
696 queue: &Queue,
697 ) -> Result<(), Validated<VulkanError>> {
698 let submit_infos: SmallVec<[_; 4]> = smallvec![submit_info];
699 let mut states = States::from_submit_infos(&submit_infos);
700
701 for submit_info in &submit_infos {
702 for command_buffer in &submit_info.command_buffers {
703 let state = states
704 .command_buffers
705 .get(&command_buffer.handle())
706 .unwrap();
707
708 match command_buffer.usage() {
709 CommandBufferUsage::OneTimeSubmit => {
710 if state.has_been_submitted() {
711 return Err(Box::new(ValidationError {
712 problem: "a command buffer, or one of the secondary \
713 command buffers it executes, was created with the \
714 `CommandBufferUsage::OneTimeSubmit` usage, but \
715 it has already been submitted in the past"
716 .into(),
717 vuids: &["VUID-vkQueueSubmit2-commandBuffer-03874"],
718 ..Default::default()
719 })
720 .into());
721 }
722 }
723 CommandBufferUsage::MultipleSubmit => {
724 if state.is_submit_pending() {
725 return Err(Box::new(ValidationError {
726 problem: "a command buffer, or one of the secondary \
727 command buffers it executes, was not created with the \
728 `CommandBufferUsage::SimultaneousUse` usage, but \
729 it is already in use by the device"
730 .into(),
731 vuids: &["VUID-vkQueueSubmit2-commandBuffer-03875"],
732 ..Default::default()
733 })
734 .into());
735 }
736 }
737 CommandBufferUsage::SimultaneousUse => (),
738 }
739
740 let CommandBufferResourcesUsage {
741 buffers,
742 images,
743 buffer_indices: _,
744 image_indices: _,
745 } = command_buffer.resources_usage();
746
747 for usage in buffers {
748 let state = states.buffers.get_mut(&usage.buffer.handle()).unwrap();
749
750 for (range, range_usage) in usage.ranges.iter() {
751 match future.check_buffer_access(
752 &usage.buffer,
753 range.clone(),
754 range_usage.mutable,
755 queue,
756 ) {
757 Err(AccessCheckError::Denied(error)) => {
758 return Err(Box::new(ValidationError {
759 problem: format!(
760 "access to a resource has been denied \
761 (resource use: {:?}, error: {})",
762 range_usage.first_use, error
763 )
764 .into(),
765 ..Default::default()
766 })
767 .into());
768 }
769 Err(AccessCheckError::Unknown) => {
770 let result = if range_usage.mutable {
771 state.check_gpu_write(range.clone())
772 } else {
773 state.check_gpu_read(range.clone())
774 };
775
776 if let Err(error) = result {
777 return Err(Box::new(ValidationError {
778 problem: format!(
779 "access to a resource has been denied \
780 (resource use: {:?}, error: {})",
781 range_usage.first_use, error
782 )
783 .into(),
784 ..Default::default()
785 })
786 .into());
787 }
788 }
789 _ => (),
790 }
791 }
792 }
793
794 for usage in images {
795 let state = states.images.get_mut(&usage.image.handle()).unwrap();
796
797 for (range, range_usage) in usage.ranges.iter() {
798 match future.check_image_access(
799 &usage.image,
800 range.clone(),
801 range_usage.mutable,
802 range_usage.expected_layout,
803 queue,
804 ) {
805 Err(AccessCheckError::Denied(error)) => {
806 return Err(Box::new(ValidationError {
807 problem: format!(
808 "access to a resource has been denied \
809 (resource use: {:?}, error: {})",
810 range_usage.first_use, error
811 )
812 .into(),
813 ..Default::default()
814 })
815 .into());
816 }
817 Err(AccessCheckError::Unknown) => {
818 let result = if range_usage.mutable {
819 state
820 .check_gpu_write(range.clone(), range_usage.expected_layout)
821 } else {
822 state.check_gpu_read(range.clone(), range_usage.expected_layout)
823 };
824
825 if let Err(error) = result {
826 return Err(Box::new(ValidationError {
827 problem: format!(
828 "access to a resource has been denied \
829 (resource use: {:?}, error: {})",
830 range_usage.first_use, error
831 )
832 .into(),
833 ..Default::default()
834 })
835 .into());
836 }
837 }
838 _ => (),
839 };
840 }
841 }
842 }
843 }
844
845 Ok(self.submit_unchecked_locked(
846 &submit_infos,
847 fence.as_ref().map(|fence| {
848 let state = fence.state();
849 (fence, state)
850 }),
851 &mut states,
852 )?)
853 }
854
855 #[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
856 pub unsafe fn submit_unchecked(
857 &mut self,
858 submit_infos: impl IntoIterator<Item = SubmitInfo>,
859 fence: Option<Arc<Fence>>,
860 ) -> Result<(), VulkanError> {
861 let submit_infos: SmallVec<[_; 4]> = submit_infos.into_iter().collect();
862 let mut states = States::from_submit_infos(&submit_infos);
863
864 self.submit_unchecked_locked(
865 &submit_infos,
866 fence.as_ref().map(|fence| {
867 let state = fence.state();
868 (fence, state)
869 }),
870 &mut states,
871 )
872 }
873
874 unsafe fn submit_unchecked_locked(
875 &mut self,
876 submit_infos: &SmallVec<[SubmitInfo; 4]>,
877 fence: Option<(&Arc<Fence>, MutexGuard<'_, FenceState>)>,
878 states: &mut States<'_>,
879 ) -> Result<(), VulkanError> {
880 if self.queue.device.enabled_features().synchronization2 {
881 struct PerSubmitInfo {
882 wait_semaphore_infos_vk: SmallVec<[ash::vk::SemaphoreSubmitInfo; 4]>,
883 command_buffer_infos_vk: SmallVec<[ash::vk::CommandBufferSubmitInfo; 4]>,
884 signal_semaphore_infos_vk: SmallVec<[ash::vk::SemaphoreSubmitInfo; 4]>,
885 }
886
887 let (mut submit_info_vk, per_submit_vk): (SmallVec<[_; 4]>, SmallVec<[_; 4]>) =
888 submit_infos
889 .iter()
890 .map(|submit_info| {
891 let &SubmitInfo {
892 ref wait_semaphores,
893 ref command_buffers,
894 ref signal_semaphores,
895 _ne: _,
896 } = submit_info;
897
898 let wait_semaphore_infos_vk = wait_semaphores
899 .iter()
900 .map(|semaphore_submit_info| {
901 let &SemaphoreSubmitInfo {
902 ref semaphore,
903 stages,
904 _ne: _,
905 } = semaphore_submit_info;
906
907 ash::vk::SemaphoreSubmitInfo {
908 semaphore: semaphore.handle(),
909 value: 0, stage_mask: stages.into(),
911 device_index: 0, ..Default::default()
913 }
914 })
915 .collect();
916
917 let command_buffer_infos_vk = command_buffers
918 .iter()
919 .map(|cb| ash::vk::CommandBufferSubmitInfo {
920 command_buffer: cb.handle(),
921 device_mask: 0, ..Default::default()
923 })
924 .collect();
925
926 let signal_semaphore_infos_vk = signal_semaphores
927 .iter()
928 .map(|semaphore_submit_info| {
929 let &SemaphoreSubmitInfo {
930 ref semaphore,
931 stages,
932 _ne: _,
933 } = semaphore_submit_info;
934
935 ash::vk::SemaphoreSubmitInfo {
936 semaphore: semaphore.handle(),
937 value: 0, stage_mask: stages.into(),
939 device_index: 0, ..Default::default()
941 }
942 })
943 .collect();
944
945 (
946 ash::vk::SubmitInfo2 {
947 flags: ash::vk::SubmitFlags::empty(), wait_semaphore_info_count: 0,
949 p_wait_semaphore_infos: ptr::null(),
950 command_buffer_info_count: 0,
951 p_command_buffer_infos: ptr::null(),
952 signal_semaphore_info_count: 0,
953 p_signal_semaphore_infos: ptr::null(),
954 ..Default::default()
955 },
956 PerSubmitInfo {
957 wait_semaphore_infos_vk,
958 command_buffer_infos_vk,
959 signal_semaphore_infos_vk,
960 },
961 )
962 })
963 .unzip();
964
965 for (
966 submit_info_vk,
967 PerSubmitInfo {
968 wait_semaphore_infos_vk,
969 command_buffer_infos_vk,
970 signal_semaphore_infos_vk,
971 },
972 ) in (submit_info_vk.iter_mut()).zip(per_submit_vk.iter())
973 {
974 *submit_info_vk = ash::vk::SubmitInfo2 {
975 wait_semaphore_info_count: wait_semaphore_infos_vk.len() as u32,
976 p_wait_semaphore_infos: wait_semaphore_infos_vk.as_ptr(),
977 command_buffer_info_count: command_buffer_infos_vk.len() as u32,
978 p_command_buffer_infos: command_buffer_infos_vk.as_ptr(),
979 signal_semaphore_info_count: signal_semaphore_infos_vk.len() as u32,
980 p_signal_semaphore_infos: signal_semaphore_infos_vk.as_ptr(),
981 ..*submit_info_vk
982 };
983 }
984
985 let fns = self.queue.device.fns();
986
987 if self.queue.device.api_version() >= Version::V1_3 {
988 (fns.v1_3.queue_submit2)(
989 self.queue.handle,
990 submit_info_vk.len() as u32,
991 submit_info_vk.as_ptr(),
992 fence
993 .as_ref()
994 .map_or_else(Default::default, |(fence, _)| fence.handle()),
995 )
996 } else {
997 debug_assert!(self.queue.device.enabled_extensions().khr_synchronization2);
998 (fns.khr_synchronization2.queue_submit2_khr)(
999 self.queue.handle,
1000 submit_info_vk.len() as u32,
1001 submit_info_vk.as_ptr(),
1002 fence
1003 .as_ref()
1004 .map_or_else(Default::default, |(fence, _)| fence.handle()),
1005 )
1006 }
1007 .result()
1008 .map_err(VulkanError::from)?;
1009 } else {
1010 struct PerSubmitInfo {
1011 wait_semaphores_vk: SmallVec<[ash::vk::Semaphore; 4]>,
1012 wait_dst_stage_mask_vk: SmallVec<[ash::vk::PipelineStageFlags; 4]>,
1013 command_buffers_vk: SmallVec<[ash::vk::CommandBuffer; 4]>,
1014 signal_semaphores_vk: SmallVec<[ash::vk::Semaphore; 4]>,
1015 }
1016
1017 let (mut submit_info_vk, per_submit_vk): (SmallVec<[_; 4]>, SmallVec<[_; 4]>) =
1018 submit_infos
1019 .iter()
1020 .map(|submit_info| {
1021 let &SubmitInfo {
1022 ref wait_semaphores,
1023 ref command_buffers,
1024 ref signal_semaphores,
1025 _ne: _,
1026 } = submit_info;
1027
1028 let (wait_semaphores_vk, wait_dst_stage_mask_vk) = wait_semaphores
1029 .iter()
1030 .map(|semaphore_submit_info| {
1031 let &SemaphoreSubmitInfo {
1032 ref semaphore,
1033 stages,
1034 _ne: _,
1035 } = semaphore_submit_info;
1036
1037 (semaphore.handle(), stages.into())
1038 })
1039 .unzip();
1040
1041 let command_buffers_vk =
1042 command_buffers.iter().map(|cb| cb.handle()).collect();
1043
1044 let signal_semaphores_vk = signal_semaphores
1045 .iter()
1046 .map(|semaphore_submit_info| {
1047 let &SemaphoreSubmitInfo {
1048 ref semaphore,
1049 stages: _,
1050 _ne: _,
1051 } = semaphore_submit_info;
1052
1053 semaphore.handle()
1054 })
1055 .collect();
1056
1057 (
1058 ash::vk::SubmitInfo {
1059 wait_semaphore_count: 0,
1060 p_wait_semaphores: ptr::null(),
1061 p_wait_dst_stage_mask: ptr::null(),
1062 command_buffer_count: 0,
1063 p_command_buffers: ptr::null(),
1064 signal_semaphore_count: 0,
1065 p_signal_semaphores: ptr::null(),
1066 ..Default::default()
1067 },
1068 PerSubmitInfo {
1069 wait_semaphores_vk,
1070 wait_dst_stage_mask_vk,
1071 command_buffers_vk,
1072 signal_semaphores_vk,
1073 },
1074 )
1075 })
1076 .unzip();
1077
1078 for (
1079 submit_info_vk,
1080 PerSubmitInfo {
1081 wait_semaphores_vk,
1082 wait_dst_stage_mask_vk,
1083 command_buffers_vk,
1084 signal_semaphores_vk,
1085 },
1086 ) in (submit_info_vk.iter_mut()).zip(per_submit_vk.iter())
1087 {
1088 *submit_info_vk = ash::vk::SubmitInfo {
1089 wait_semaphore_count: wait_semaphores_vk.len() as u32,
1090 p_wait_semaphores: wait_semaphores_vk.as_ptr(),
1091 p_wait_dst_stage_mask: wait_dst_stage_mask_vk.as_ptr(),
1092 command_buffer_count: command_buffers_vk.len() as u32,
1093 p_command_buffers: command_buffers_vk.as_ptr(),
1094 signal_semaphore_count: signal_semaphores_vk.len() as u32,
1095 p_signal_semaphores: signal_semaphores_vk.as_ptr(),
1096 ..*submit_info_vk
1097 };
1098 }
1099
1100 let fns = self.queue.device.fns();
1101 (fns.v1_0.queue_submit)(
1102 self.queue.handle,
1103 submit_info_vk.len() as u32,
1104 submit_info_vk.as_ptr(),
1105 fence
1106 .as_ref()
1107 .map_or_else(Default::default, |(fence, _)| fence.handle()),
1108 )
1109 .result()
1110 .map_err(VulkanError::from)?;
1111 }
1112
1113 for submit_info in submit_infos {
1114 let SubmitInfo {
1115 wait_semaphores,
1116 command_buffers,
1117 signal_semaphores,
1118 _ne: _,
1119 } = submit_info;
1120
1121 for semaphore_submit_info in wait_semaphores {
1122 let state = states
1123 .semaphores
1124 .get_mut(&semaphore_submit_info.semaphore.handle())
1125 .unwrap();
1126 state.add_queue_wait(self.queue);
1127 }
1128
1129 for command_buffer in command_buffers {
1130 let state = states
1131 .command_buffers
1132 .get_mut(&command_buffer.handle())
1133 .unwrap();
1134 state.add_queue_submit();
1135
1136 let CommandBufferResourcesUsage {
1137 buffers,
1138 images,
1139 buffer_indices: _,
1140 image_indices: _,
1141 } = command_buffer.resources_usage();
1142
1143 for usage in buffers {
1144 let state = states.buffers.get_mut(&usage.buffer.handle()).unwrap();
1145
1146 for (range, range_usage) in usage.ranges.iter() {
1147 if range_usage.mutable {
1148 state.gpu_write_lock(range.clone());
1149 } else {
1150 state.gpu_read_lock(range.clone());
1151 }
1152 }
1153 }
1154
1155 for usage in images {
1156 let state = states.images.get_mut(&usage.image.handle()).unwrap();
1157
1158 for (range, range_usage) in usage.ranges.iter() {
1159 if range_usage.mutable {
1160 state.gpu_write_lock(range.clone(), range_usage.final_layout);
1161 } else {
1162 state.gpu_read_lock(range.clone());
1163 }
1164 }
1165 }
1166 }
1167
1168 for semaphore_submit_info in signal_semaphores {
1169 let state = states
1170 .semaphores
1171 .get_mut(&semaphore_submit_info.semaphore.handle())
1172 .unwrap();
1173 state.add_queue_signal(self.queue);
1174 }
1175 }
1176
1177 let fence = fence.map(|(fence, mut state)| {
1178 state.add_queue_signal(self.queue);
1179 fence.clone()
1180 });
1181
1182 self.state
1183 .operations
1184 .push_back((submit_infos.clone().into(), fence));
1185
1186 Ok(())
1187 }
1188
1189 #[inline]
1195 pub fn begin_debug_utils_label(
1196 &mut self,
1197 label_info: DebugUtilsLabel,
1198 ) -> Result<(), Box<ValidationError>> {
1199 self.validate_begin_debug_utils_label(&label_info)?;
1200
1201 unsafe {
1202 self.begin_debug_utils_label_unchecked(label_info);
1203 Ok(())
1204 }
1205 }
1206
1207 fn validate_begin_debug_utils_label(
1208 &self,
1209 _label_info: &DebugUtilsLabel,
1210 ) -> Result<(), Box<ValidationError>> {
1211 if !self
1212 .queue
1213 .device
1214 .instance()
1215 .enabled_extensions()
1216 .ext_debug_utils
1217 {
1218 return Err(Box::new(ValidationError {
1219 requires_one_of: RequiresOneOf(&[RequiresAllOf(&[Requires::InstanceExtension(
1220 "ext_debug_utils",
1221 )])]),
1222 ..Default::default()
1223 }));
1224 }
1225
1226 Ok(())
1227 }
1228
1229 #[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
1230 #[inline]
1231 pub unsafe fn begin_debug_utils_label_unchecked(&mut self, label_info: DebugUtilsLabel) {
1232 let DebugUtilsLabel {
1233 label_name,
1234 color,
1235 _ne: _,
1236 } = label_info;
1237
1238 let label_name_vk = CString::new(label_name.as_str()).unwrap();
1239 let label_info = ash::vk::DebugUtilsLabelEXT {
1240 p_label_name: label_name_vk.as_ptr(),
1241 color,
1242 ..Default::default()
1243 };
1244
1245 let fns = self.queue.device.instance().fns();
1246 (fns.ext_debug_utils.queue_begin_debug_utils_label_ext)(self.queue.handle, &label_info);
1247 }
1248
1249 #[inline]
1259 pub unsafe fn end_debug_utils_label(&mut self) -> Result<(), Box<ValidationError>> {
1260 self.validate_end_debug_utils_label()?;
1261 self.end_debug_utils_label_unchecked();
1262
1263 Ok(())
1264 }
1265
1266 fn validate_end_debug_utils_label(&self) -> Result<(), Box<ValidationError>> {
1267 if !self
1268 .queue
1269 .device
1270 .instance()
1271 .enabled_extensions()
1272 .ext_debug_utils
1273 {
1274 return Err(Box::new(ValidationError {
1275 requires_one_of: RequiresOneOf(&[RequiresAllOf(&[Requires::InstanceExtension(
1276 "ext_debug_utils",
1277 )])]),
1278 ..Default::default()
1279 }));
1280 }
1281
1282 Ok(())
1286 }
1287
1288 #[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
1289 #[inline]
1290 pub unsafe fn end_debug_utils_label_unchecked(&mut self) {
1291 let fns = self.queue.device.instance().fns();
1292 (fns.ext_debug_utils.queue_end_debug_utils_label_ext)(self.queue.handle);
1293 }
1294
1295 #[inline]
1300 pub fn insert_debug_utils_label(
1301 &mut self,
1302 label_info: DebugUtilsLabel,
1303 ) -> Result<(), Box<ValidationError>> {
1304 self.validate_insert_debug_utils_label(&label_info)?;
1305
1306 unsafe {
1307 self.insert_debug_utils_label_unchecked(label_info);
1308 Ok(())
1309 }
1310 }
1311
1312 fn validate_insert_debug_utils_label(
1313 &self,
1314 _label_info: &DebugUtilsLabel,
1315 ) -> Result<(), Box<ValidationError>> {
1316 if !self
1317 .queue
1318 .device
1319 .instance()
1320 .enabled_extensions()
1321 .ext_debug_utils
1322 {
1323 return Err(Box::new(ValidationError {
1324 requires_one_of: RequiresOneOf(&[RequiresAllOf(&[Requires::InstanceExtension(
1325 "ext_debug_utils",
1326 )])]),
1327 ..Default::default()
1328 }));
1329 }
1330
1331 Ok(())
1332 }
1333
1334 #[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
1335 #[inline]
1336 pub unsafe fn insert_debug_utils_label_unchecked(&mut self, label_info: DebugUtilsLabel) {
1337 let DebugUtilsLabel {
1338 label_name,
1339 color,
1340 _ne: _,
1341 } = label_info;
1342
1343 let label_name_vk = CString::new(label_name.as_str()).unwrap();
1344 let label_info = ash::vk::DebugUtilsLabelEXT {
1345 p_label_name: label_name_vk.as_ptr(),
1346 color,
1347 ..Default::default()
1348 };
1349
1350 let fns = self.queue.device.instance().fns();
1351 (fns.ext_debug_utils.queue_insert_debug_utils_label_ext)(self.queue.handle, &label_info);
1352 }
1353}
1354
1355#[derive(Debug, Default)]
1356struct QueueState {
1357 operations: VecDeque<(QueueOperation, Option<Arc<Fence>>)>,
1358}
1359
1360impl QueueState {
1361 fn wait_idle(&mut self, device: &Device, handle: ash::vk::Queue) -> Result<(), VulkanError> {
1362 unsafe {
1363 let fns = device.fns();
1364 (fns.v1_0.queue_wait_idle)(handle)
1365 .result()
1366 .map_err(VulkanError::from)?;
1367
1368 for (operation, _) in take(&mut self.operations) {
1371 operation.set_finished();
1372 }
1373
1374 Ok(())
1375 }
1376 }
1377
1378 fn fence_signaled(&mut self, fence: &Fence) {
1380 let fence_index = self
1382 .operations
1383 .iter()
1384 .enumerate()
1385 .rev()
1386 .find_map(|(index, (_, f))| {
1387 f.as_ref().map_or(false, |f| **f == *fence).then_some(index)
1388 });
1389
1390 if let Some(index) = fence_index {
1391 for (operation, fence) in self.operations.drain(..index + 1) {
1393 unsafe {
1394 operation.set_finished();
1395
1396 if let Some(fence) = fence {
1397 fence.state().set_signal_finished();
1398 }
1399 }
1400 }
1401 }
1402 }
1403}
1404
1405#[derive(Debug)]
1406enum QueueOperation {
1407 BindSparse(SmallVec<[BindSparseInfo; 4]>),
1408 Present(PresentInfo),
1409 Submit(SmallVec<[SubmitInfo; 4]>),
1410}
1411
1412impl QueueOperation {
1413 unsafe fn set_finished(self) {
1414 match self {
1415 QueueOperation::BindSparse(bind_infos) => {
1416 for bind_info in bind_infos {
1417 for semaphore in bind_info.wait_semaphores {
1418 semaphore.state().set_wait_finished();
1419 }
1420
1421 for semaphore in bind_info.signal_semaphores {
1422 semaphore.state().set_signal_finished();
1423 }
1424 }
1425
1426 }
1428 QueueOperation::Present(present_info) => {
1429 for semaphore in present_info.wait_semaphores {
1430 semaphore.state().set_wait_finished();
1431 }
1432 }
1433 QueueOperation::Submit(submit_infos) => {
1434 for submit_info in submit_infos {
1435 for semaphore_submit_info in submit_info.wait_semaphores {
1436 semaphore_submit_info.semaphore.state().set_wait_finished();
1437 }
1438
1439 for semaphore_submit_info in submit_info.signal_semaphores {
1440 semaphore_submit_info
1441 .semaphore
1442 .state()
1443 .set_signal_finished();
1444 }
1445
1446 for command_buffer in submit_info.command_buffers {
1447 let resource_usage = command_buffer.resources_usage();
1448
1449 for usage in &resource_usage.buffers {
1450 let mut state = usage.buffer.state();
1451
1452 for (range, range_usage) in usage.ranges.iter() {
1453 if range_usage.mutable {
1454 state.gpu_write_unlock(range.clone());
1455 } else {
1456 state.gpu_read_unlock(range.clone());
1457 }
1458 }
1459 }
1460
1461 for usage in &resource_usage.images {
1462 let mut state = usage.image.state();
1463
1464 for (range, range_usage) in usage.ranges.iter() {
1465 if range_usage.mutable {
1466 state.gpu_write_unlock(range.clone());
1467 } else {
1468 state.gpu_read_unlock(range.clone());
1469 }
1470 }
1471 }
1472
1473 command_buffer.state().set_submit_finished();
1474 }
1475 }
1476 }
1477 }
1478 }
1479}
1480
1481impl From<SmallVec<[BindSparseInfo; 4]>> for QueueOperation {
1482 #[inline]
1483 fn from(val: SmallVec<[BindSparseInfo; 4]>) -> Self {
1484 Self::BindSparse(val)
1485 }
1486}
1487
1488impl From<PresentInfo> for QueueOperation {
1489 #[inline]
1490 fn from(val: PresentInfo) -> Self {
1491 Self::Present(val)
1492 }
1493}
1494
1495impl From<SmallVec<[SubmitInfo; 4]>> for QueueOperation {
1496 #[inline]
1497 fn from(val: SmallVec<[SubmitInfo; 4]>) -> Self {
1498 Self::Submit(val)
1499 }
1500}
1501
1502#[derive(Debug)]
1505struct States<'a> {
1506 buffers: HashMap<ash::vk::Buffer, MutexGuard<'a, BufferState>>,
1507 command_buffers: HashMap<ash::vk::CommandBuffer, MutexGuard<'a, CommandBufferState>>,
1508 images: HashMap<ash::vk::Image, MutexGuard<'a, ImageState>>,
1509 semaphores: HashMap<ash::vk::Semaphore, MutexGuard<'a, SemaphoreState>>,
1510}
1511
1512impl<'a> States<'a> {
1513 fn from_bind_infos(bind_infos: &'a [BindSparseInfo]) -> Self {
1514 let mut buffers = HashMap::default();
1515 let mut images = HashMap::default();
1516 let mut semaphores = HashMap::default();
1517
1518 for bind_info in bind_infos {
1519 let BindSparseInfo {
1520 wait_semaphores,
1521 buffer_binds,
1522 image_opaque_binds,
1523 image_binds,
1524 signal_semaphores,
1525 _ne: _,
1526 } = bind_info;
1527
1528 for semaphore in wait_semaphores {
1529 semaphores
1530 .entry(semaphore.handle())
1531 .or_insert_with(|| semaphore.state());
1532 }
1533
1534 for (buffer, _) in buffer_binds {
1535 let buffer = buffer.buffer();
1536 buffers
1537 .entry(buffer.handle())
1538 .or_insert_with(|| buffer.state());
1539 }
1540
1541 for (image, _) in image_opaque_binds {
1542 images
1543 .entry(image.handle())
1544 .or_insert_with(|| image.state());
1545 }
1546
1547 for (image, _) in image_binds {
1548 images
1549 .entry(image.handle())
1550 .or_insert_with(|| image.state());
1551 }
1552
1553 for semaphore in signal_semaphores {
1554 semaphores
1555 .entry(semaphore.handle())
1556 .or_insert_with(|| semaphore.state());
1557 }
1558 }
1559
1560 Self {
1561 buffers,
1562 command_buffers: HashMap::default(),
1563 images,
1564 semaphores,
1565 }
1566 }
1567
1568 fn from_present_info(present_info: &'a PresentInfo) -> Self {
1569 let mut semaphores = HashMap::default();
1570
1571 let PresentInfo {
1572 wait_semaphores,
1573 swapchain_infos: _,
1574 _ne: _,
1575 } = present_info;
1576
1577 for semaphore in wait_semaphores {
1578 semaphores
1579 .entry(semaphore.handle())
1580 .or_insert_with(|| semaphore.state());
1581 }
1582
1583 Self {
1584 buffers: HashMap::default(),
1585 command_buffers: HashMap::default(),
1586 images: HashMap::default(),
1587 semaphores,
1588 }
1589 }
1590
1591 fn from_submit_infos(submit_infos: &'a [SubmitInfo]) -> Self {
1592 let mut buffers = HashMap::default();
1593 let mut command_buffers = HashMap::default();
1594 let mut images = HashMap::default();
1595 let mut semaphores = HashMap::default();
1596
1597 for submit_info in submit_infos {
1598 let SubmitInfo {
1599 wait_semaphores,
1600 command_buffers: info_command_buffers,
1601 signal_semaphores,
1602 _ne: _,
1603 } = submit_info;
1604
1605 for semaphore_submit_info in wait_semaphores {
1606 let semaphore = &semaphore_submit_info.semaphore;
1607 semaphores
1608 .entry(semaphore.handle())
1609 .or_insert_with(|| semaphore.state());
1610 }
1611
1612 for command_buffer in info_command_buffers {
1613 command_buffers
1614 .entry(command_buffer.handle())
1615 .or_insert_with(|| command_buffer.state());
1616
1617 let CommandBufferResourcesUsage {
1618 buffers: buffers_usage,
1619 images: images_usage,
1620 buffer_indices: _,
1621 image_indices: _,
1622 } = command_buffer.resources_usage();
1623
1624 for usage in buffers_usage {
1625 let buffer = &usage.buffer;
1626 buffers
1627 .entry(buffer.handle())
1628 .or_insert_with(|| buffer.state());
1629 }
1630
1631 for usage in images_usage {
1632 let image = &usage.image;
1633 images
1634 .entry(image.handle())
1635 .or_insert_with(|| image.state());
1636 }
1637 }
1638
1639 for semaphore_submit_info in signal_semaphores {
1640 let semaphore = &semaphore_submit_info.semaphore;
1641 semaphores
1642 .entry(semaphore.handle())
1643 .or_insert_with(|| semaphore.state());
1644 }
1645 }
1646
1647 Self {
1648 buffers,
1649 command_buffers,
1650 images,
1651 semaphores,
1652 }
1653 }
1654}
1655
1656#[derive(Clone, Debug)]
1658#[non_exhaustive]
1659pub struct QueueFamilyProperties {
1660 pub queue_flags: QueueFlags,
1662
1663 pub queue_count: u32,
1667
1668 pub timestamp_valid_bits: Option<u32>,
1673
1674 pub min_image_transfer_granularity: [u32; 3],
1676}
1677
1678impl From<ash::vk::QueueFamilyProperties> for QueueFamilyProperties {
1679 #[inline]
1680 fn from(val: ash::vk::QueueFamilyProperties) -> Self {
1681 Self {
1682 queue_flags: val.queue_flags.into(),
1683 queue_count: val.queue_count,
1684 timestamp_valid_bits: (val.timestamp_valid_bits != 0)
1685 .then_some(val.timestamp_valid_bits),
1686 min_image_transfer_granularity: [
1687 val.min_image_transfer_granularity.width,
1688 val.min_image_transfer_granularity.height,
1689 val.min_image_transfer_granularity.depth,
1690 ],
1691 }
1692 }
1693}
1694
1695vulkan_bitflags! {
1696 #[non_exhaustive]
1697
1698 QueueFlags = QueueFlags(u32);
1700
1701 GRAPHICS = GRAPHICS,
1703
1704 COMPUTE = COMPUTE,
1706
1707 TRANSFER = TRANSFER,
1709
1710 SPARSE_BINDING = SPARSE_BINDING,
1712
1713 PROTECTED = PROTECTED
1715 RequiresOneOf([
1716 RequiresAllOf([APIVersion(V1_1)]),
1717 ]),
1718
1719 VIDEO_DECODE = VIDEO_DECODE_KHR
1721 RequiresOneOf([
1722 RequiresAllOf([DeviceExtension(khr_video_decode_queue)]),
1723 ]),
1724
1725 VIDEO_ENCODE = VIDEO_ENCODE_KHR
1727 RequiresOneOf([
1728 RequiresAllOf([DeviceExtension(khr_video_encode_queue)]),
1729 ]),
1730
1731 OPTICAL_FLOW = OPTICAL_FLOW_NV
1733 RequiresOneOf([
1734 RequiresAllOf([DeviceExtension(nv_optical_flow)]),
1735 ]),
1736}
1737
1738#[cfg(test)]
1739mod tests {
1740 use crate::sync::fence::Fence;
1741 use std::{sync::Arc, time::Duration};
1742
1743 #[test]
1744 fn empty_submit() {
1745 let (_device, queue) = gfx_dev_and_queue!();
1746
1747 queue
1748 .with(|mut q| unsafe { q.submit_unchecked([Default::default()], None) })
1749 .unwrap();
1750 }
1751
1752 #[test]
1753 fn signal_fence() {
1754 unsafe {
1755 let (device, queue) = gfx_dev_and_queue!();
1756
1757 let fence = Arc::new(Fence::new(device, Default::default()).unwrap());
1758 assert!(!fence.is_signaled().unwrap());
1759
1760 queue
1761 .with(|mut q| q.submit_unchecked([Default::default()], Some(fence.clone())))
1762 .unwrap();
1763
1764 fence.wait(Some(Duration::from_secs(5))).unwrap();
1765 assert!(fence.is_signaled().unwrap());
1766 }
1767 }
1768}