1use crate::{Id, InvalidSlotError, Object, Ref};
4use ash::vk;
5use concurrent_slotmap::{epoch, SlotMap};
6use parking_lot::{Mutex, RwLock};
7use smallvec::SmallVec;
8use std::{
9 any::Any,
10 hash::Hash,
11 num::{NonZeroU32, NonZeroU64},
12 ops::{BitOr, BitOrAssign},
13 sync::{
14 atomic::{AtomicU32, AtomicU64, Ordering},
15 Arc,
16 },
17 time::Duration,
18};
19use thread_local::ThreadLocal;
20use vulkano::{
21 buffer::{AllocateBufferError, Buffer, BufferCreateInfo},
22 command_buffer::allocator::StandardCommandBufferAllocator,
23 device::{Device, DeviceOwned},
24 image::{AllocateImageError, Image, ImageCreateInfo, ImageLayout, ImageMemory},
25 memory::allocator::{AllocationCreateInfo, DeviceLayout, StandardMemoryAllocator},
26 swapchain::{Surface, Swapchain, SwapchainCreateInfo},
27 sync::{
28 fence::{Fence, FenceCreateFlags, FenceCreateInfo},
29 semaphore::Semaphore,
30 AccessFlags, PipelineStages,
31 },
32 Validated, VulkanError,
33};
34
35static REGISTERED_DEVICES: Mutex<Vec<usize>> = Mutex::new(Vec::new());
36
37#[derive(Debug)]
44pub struct Resources {
45 device: Arc<Device>,
46 memory_allocator: Arc<StandardMemoryAllocator>,
47 command_buffer_allocator: Arc<StandardCommandBufferAllocator>,
48
49 global: epoch::GlobalHandle,
50 locals: ThreadLocal<epoch::UniqueLocalHandle>,
51 buffers: SlotMap<BufferState>,
52 images: SlotMap<ImageState>,
53 swapchains: SlotMap<SwapchainState>,
54 flights: SlotMap<Flight>,
55}
56
57#[derive(Debug)]
58pub struct BufferState {
59 buffer: Arc<Buffer>,
60 last_access: Mutex<BufferAccess>,
61}
62
63#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
64pub struct BufferAccess {
65 stage_mask: PipelineStages,
66 access_mask: AccessFlags,
67 queue_family_index: u32,
68}
69
70#[derive(Debug)]
71pub struct ImageState {
72 image: Arc<Image>,
73 last_access: Mutex<ImageAccess>,
74}
75
76#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
77pub struct ImageAccess {
78 stage_mask: PipelineStages,
79 access_mask: AccessFlags,
80 image_layout: ImageLayout,
81 queue_family_index: u32,
82}
83
84#[derive(Debug)]
86pub struct SwapchainState {
87 swapchain: Arc<Swapchain>,
88 images: SmallVec<[Arc<Image>; 3]>,
89 pub(crate) semaphores: SmallVec<[SwapchainSemaphoreState; 3]>,
90 flight_id: Id<Flight>,
91 pub(crate) current_image_index: AtomicU32,
92 last_access: Mutex<ImageAccess>,
93}
94
95#[derive(Clone, Debug)]
96pub(crate) struct SwapchainSemaphoreState {
97 pub(crate) image_available_semaphore: Arc<Semaphore>,
98 pub(crate) pre_present_complete_semaphore: Arc<Semaphore>,
99 pub(crate) tasks_complete_semaphore: Arc<Semaphore>,
100}
101
102#[derive(Debug)]
104pub struct Flight {
105 frame_count: NonZeroU32,
106 current_frame: AtomicU64,
107 fences: SmallVec<[RwLock<Fence>; 3]>,
108 pub(crate) state: Mutex<FlightState>,
109}
110
111#[derive(Debug)]
112pub(crate) struct FlightState {
113 pub(crate) death_rows: SmallVec<[DeathRow; 3]>,
114}
115
116pub(crate) type DeathRow = Vec<Arc<dyn Any + Send + Sync>>;
117
118impl Resources {
119 #[must_use]
125 pub fn new(device: &Arc<Device>, create_info: &ResourcesCreateInfo<'_>) -> Arc<Self> {
126 let mut registered_devices = REGISTERED_DEVICES.lock();
127 let device_addr = Arc::as_ptr(device) as usize;
128
129 assert!(
130 !registered_devices.contains(&device_addr),
131 "the device already has a `Resources` collection associated with it",
132 );
133
134 registered_devices.push(device_addr);
135
136 let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
137 let command_buffer_allocator = Arc::new(StandardCommandBufferAllocator::new(
138 device.clone(),
139 Default::default(),
140 ));
141
142 let global = epoch::GlobalHandle::new();
143
144 Arc::new(Resources {
145 device: device.clone(),
146 memory_allocator,
147 command_buffer_allocator,
148 locals: ThreadLocal::new(),
149 buffers: SlotMap::with_global(create_info.max_buffers, global.clone()),
150 images: SlotMap::with_global(create_info.max_images, global.clone()),
151 swapchains: SlotMap::with_global(create_info.max_swapchains, global.clone()),
152 flights: SlotMap::with_global(create_info.max_flights, global.clone()),
153 global,
154 })
155 }
156
157 #[inline]
159 #[must_use]
160 pub fn memory_allocator(&self) -> &Arc<StandardMemoryAllocator> {
161 &self.memory_allocator
162 }
163
164 pub fn create_buffer(
174 &self,
175 create_info: BufferCreateInfo,
176 allocation_info: AllocationCreateInfo,
177 layout: DeviceLayout,
178 ) -> Result<Id<Buffer>, Validated<AllocateBufferError>> {
179 let buffer = Buffer::new(
180 self.memory_allocator.clone(),
181 create_info,
182 allocation_info,
183 layout,
184 )?;
185
186 Ok(unsafe { self.add_buffer_unchecked(buffer) })
188 }
189
190 pub fn create_image(
196 &self,
197 create_info: ImageCreateInfo,
198 allocation_info: AllocationCreateInfo,
199 ) -> Result<Id<Image>, Validated<AllocateImageError>> {
200 let image = Image::new(self.memory_allocator.clone(), create_info, allocation_info)?;
201
202 Ok(unsafe { self.add_image_unchecked(image) })
204 }
205
206 pub fn create_swapchain(
223 &self,
224 flight_id: Id<Flight>,
225 surface: Arc<Surface>,
226 create_info: SwapchainCreateInfo,
227 ) -> Result<Id<Swapchain>, Validated<VulkanError>> {
228 let frames_in_flight = self.flight(flight_id).unwrap().frame_count();
229
230 assert!(create_info.min_image_count >= frames_in_flight);
231
232 let (swapchain, images) = Swapchain::new(self.device().clone(), surface, create_info)?;
233
234 Ok(unsafe { self.add_swapchain_unchecked(flight_id, swapchain, images) }?)
236 }
237
238 pub fn create_flight(&self, frame_count: u32) -> Result<Id<Flight>, VulkanError> {
248 let frame_count =
249 NonZeroU32::new(frame_count).expect("a flight with zero frames is not valid");
250
251 let fences = (0..frame_count.get())
252 .map(|_| {
253 unsafe {
255 Fence::new_unchecked(
256 self.device().clone(),
257 FenceCreateInfo {
258 flags: FenceCreateFlags::SIGNALED,
259 ..Default::default()
260 },
261 )
262 }
263 .map(RwLock::new)
264 })
265 .collect::<Result<_, VulkanError>>()?;
266
267 let flight = Flight {
268 frame_count,
269 current_frame: AtomicU64::new(0),
270 fences,
271 state: Mutex::new(FlightState {
272 death_rows: (0..frame_count.get()).map(|_| Vec::new()).collect(),
273 }),
274 };
275
276 let slot = self
277 .flights
278 .insert_with_tag(flight, Flight::TAG, self.pin());
279
280 Ok(unsafe { Id::new(slot) })
281 }
282
283 #[must_use]
290 pub fn add_buffer(&self, mut buffer: Arc<Buffer>) -> Id<Buffer> {
291 assert!(Arc::get_mut(&mut buffer).is_some());
292 assert_eq!(buffer.device(), self.device());
293
294 unsafe { self.add_buffer_unchecked(buffer) }
295 }
296
297 unsafe fn add_buffer_unchecked(&self, buffer: Arc<Buffer>) -> Id<Buffer> {
298 let state = BufferState {
299 buffer,
300 last_access: Mutex::new(BufferAccess::NONE),
301 };
302
303 let slot = self.buffers.insert_with_tag(state, Buffer::TAG, self.pin());
304
305 unsafe { Id::new(slot) }
306 }
307
308 #[must_use]
316 pub fn add_image(&self, mut image: Arc<Image>) -> Id<Image> {
317 assert!(Arc::get_mut(&mut image).is_some());
318 assert_eq!(image.device(), self.device());
319
320 assert!(
321 !matches!(image.memory(), ImageMemory::Swapchain { .. }),
322 "swapchain images cannot be added like regular images; please use \
323 `Resources::add_swapchain` instead",
324 );
325
326 unsafe { self.add_image_unchecked(image) }
327 }
328
329 unsafe fn add_image_unchecked(&self, image: Arc<Image>) -> Id<Image> {
330 let state = ImageState {
331 image,
332 last_access: Mutex::new(ImageAccess::NONE),
333 };
334
335 let slot = self.images.insert_with_tag(state, Image::TAG, self.pin());
336
337 unsafe { Id::new(slot) }
338 }
339
340 pub fn add_swapchain(
357 &self,
358 flight_id: Id<Flight>,
359 swapchain: Arc<Swapchain>,
360 mut images: Vec<Arc<Image>>,
361 ) -> Result<Id<Swapchain>, VulkanError> {
362 assert_eq!(swapchain.device(), self.device());
363 assert_eq!(images.len(), swapchain.image_count() as usize);
364
365 let frames_in_flight = self.flight(flight_id).unwrap().frame_count();
366
367 assert!(swapchain.image_count() >= frames_in_flight);
368
369 for (index, image) in images.iter_mut().enumerate() {
370 match image.memory() {
371 ImageMemory::Swapchain {
372 swapchain: image_swapchain,
373 image_index,
374 } => {
375 assert_eq!(image_swapchain, &swapchain);
376 assert_eq!(*image_index as usize, index);
377 assert!(Arc::get_mut(image).is_some());
378 }
379 _ => panic!("not a swapchain image"),
380 }
381 }
382
383 let ptr = Arc::into_raw(swapchain);
387 let mut swapchain = unsafe { Arc::from_raw(ptr) };
388
389 {
392 for _ in 0..images.len() {
393 unsafe { Arc::decrement_strong_count(ptr) };
398 }
399
400 let we_own_the_only_references = Arc::get_mut(&mut swapchain).is_some();
401
402 for _ in 0..images.len() {
403 unsafe { Arc::increment_strong_count(ptr) };
405 }
406
407 assert!(we_own_the_only_references);
408 }
409
410 unsafe { self.add_swapchain_unchecked(flight_id, swapchain, images) }
411 }
412
413 unsafe fn add_swapchain_unchecked(
414 &self,
415 flight_id: Id<Flight>,
416 swapchain: Arc<Swapchain>,
417 images: Vec<Arc<Image>>,
418 ) -> Result<Id<Swapchain>, VulkanError> {
419 let guard = &self.pin();
420
421 let frames_in_flight = unsafe { self.flight_unprotected(flight_id) }
422 .unwrap()
423 .frame_count();
424
425 let semaphores = (0..frames_in_flight)
426 .map(|_| {
427 Ok(SwapchainSemaphoreState {
428 image_available_semaphore: Arc::new(unsafe {
430 Semaphore::new_unchecked(self.device().clone(), Default::default())
431 }?),
432 pre_present_complete_semaphore: Arc::new(unsafe {
434 Semaphore::new_unchecked(self.device().clone(), Default::default())
435 }?),
436 tasks_complete_semaphore: Arc::new(unsafe {
438 Semaphore::new_unchecked(self.device().clone(), Default::default())
439 }?),
440 })
441 })
442 .collect::<Result<_, VulkanError>>()?;
443
444 let state = SwapchainState {
445 swapchain,
446 images: images.into(),
447 semaphores,
448 flight_id,
449 current_image_index: AtomicU32::new(u32::MAX),
450 last_access: Mutex::new(ImageAccess::NONE),
451 };
452
453 let slot = self
454 .swapchains
455 .insert_with_tag(state, Swapchain::TAG, guard);
456
457 Ok(unsafe { Id::new(slot) })
458 }
459
460 pub fn recreate_swapchain(
476 &self,
477 id: Id<Swapchain>,
478 f: impl FnOnce(SwapchainCreateInfo) -> SwapchainCreateInfo,
479 ) -> Result<Id<Swapchain>, Validated<VulkanError>> {
480 let guard = self.pin();
481
482 let state = unsafe { self.swapchain_unprotected(id) }.unwrap();
483 let swapchain = state.swapchain();
484 let flight_id = state.flight_id;
485 let flight = unsafe { self.flight_unprotected_unchecked(flight_id) };
486 let mut flight_state = flight.state.try_lock().unwrap();
487
488 let (new_swapchain, new_images) = swapchain.recreate(f(swapchain.create_info()))?;
489
490 let frames_in_flight = flight.frame_count();
491
492 assert!(new_swapchain.image_count() >= frames_in_flight);
493
494 let death_row = &mut flight_state.death_rows[flight.previous_frame_index() as usize];
495 death_row.push(swapchain.clone());
496
497 let new_state = SwapchainState {
498 swapchain: new_swapchain,
499 images: new_images.into(),
500 semaphores: state.semaphores.clone(),
501 flight_id,
502 current_image_index: AtomicU32::new(u32::MAX),
503 last_access: Mutex::new(ImageAccess::NONE),
504 };
505
506 let slot = self
507 .swapchains
508 .insert_with_tag(new_state, Swapchain::TAG, guard);
509
510 let _ = unsafe { self.remove_swapchain(id) };
511
512 Ok(unsafe { Id::new(slot) })
513 }
514
515 pub unsafe fn remove_buffer(&self, id: Id<Buffer>) -> Result<Ref<'_, BufferState>> {
523 self.buffers
524 .remove(id.slot, self.pin())
525 .map(Ref)
526 .ok_or(InvalidSlotError::new(id))
527 }
528
529 pub unsafe fn remove_image(&self, id: Id<Image>) -> Result<Ref<'_, ImageState>> {
537 self.images
538 .remove(id.slot, self.pin())
539 .map(Ref)
540 .ok_or(InvalidSlotError::new(id))
541 }
542
543 pub unsafe fn remove_swapchain(&self, id: Id<Swapchain>) -> Result<Ref<'_, SwapchainState>> {
551 self.swapchains
552 .remove(id.slot, self.pin())
553 .map(Ref)
554 .ok_or(InvalidSlotError::new(id))
555 }
556
557 #[inline]
559 pub fn buffer(&self, id: Id<Buffer>) -> Result<Ref<'_, BufferState>> {
560 self.buffers
561 .get(id.slot, self.pin())
562 .map(Ref)
563 .ok_or(InvalidSlotError::new(id))
564 }
565
566 #[inline]
567 pub(crate) unsafe fn buffer_unprotected(&self, id: Id<Buffer>) -> Result<&BufferState> {
568 unsafe { self.buffers.get_unprotected(id.slot) }.ok_or(InvalidSlotError::new(id))
570 }
571
572 #[inline]
573 pub(crate) unsafe fn buffer_unchecked_unprotected(&self, id: Id<Buffer>) -> &BufferState {
574 #[cfg(debug_assertions)]
575 if unsafe { self.buffers.get_unprotected(id.slot) }.is_none() {
576 std::process::abort();
577 }
578
579 unsafe { self.buffers.index_unchecked_unprotected(id.index()) }
581 }
582
583 #[inline]
585 pub fn image(&self, id: Id<Image>) -> Result<Ref<'_, ImageState>> {
586 self.images
587 .get(id.slot, self.pin())
588 .map(Ref)
589 .ok_or(InvalidSlotError::new(id))
590 }
591
592 #[inline]
593 pub(crate) unsafe fn image_unprotected(&self, id: Id<Image>) -> Result<&ImageState> {
594 unsafe { self.images.get_unprotected(id.slot) }.ok_or(InvalidSlotError::new(id))
596 }
597
598 #[inline]
599 pub(crate) unsafe fn image_unchecked_unprotected(&self, id: Id<Image>) -> &ImageState {
600 #[cfg(debug_assertions)]
601 if unsafe { self.images.get_unprotected(id.slot) }.is_none() {
602 std::process::abort();
603 }
604
605 unsafe { self.images.index_unchecked_unprotected(id.index()) }
607 }
608
609 #[inline]
611 pub fn swapchain(&self, id: Id<Swapchain>) -> Result<Ref<'_, SwapchainState>> {
612 self.swapchains
613 .get(id.slot, self.pin())
614 .map(Ref)
615 .ok_or(InvalidSlotError::new(id))
616 }
617
618 #[inline]
619 pub(crate) unsafe fn swapchain_unprotected(
620 &self,
621 id: Id<Swapchain>,
622 ) -> Result<&SwapchainState> {
623 unsafe { self.swapchains.get_unprotected(id.slot) }.ok_or(InvalidSlotError::new(id))
625 }
626
627 #[inline]
628 pub(crate) unsafe fn swapchain_unchecked_unprotected(
629 &self,
630 id: Id<Swapchain>,
631 ) -> &SwapchainState {
632 #[cfg(debug_assertions)]
633 if unsafe { self.swapchains.get_unprotected(id.slot) }.is_none() {
634 std::process::abort();
635 }
636
637 unsafe { self.swapchains.index_unchecked_unprotected(id.index()) }
639 }
640
641 #[inline]
643 pub fn flight(&self, id: Id<Flight>) -> Result<Ref<'_, Flight>> {
644 self.flights
645 .get(id.slot, self.pin())
646 .map(Ref)
647 .ok_or(InvalidSlotError::new(id))
648 }
649
650 #[inline]
651 pub(crate) unsafe fn flight_unprotected(&self, id: Id<Flight>) -> Result<&Flight> {
652 unsafe { self.flights.get_unprotected(id.slot) }.ok_or(InvalidSlotError::new(id))
654 }
655
656 #[inline]
657 pub(crate) unsafe fn flight_unprotected_unchecked(&self, id: Id<Flight>) -> &Flight {
658 unsafe { self.flights.index_unchecked_unprotected(id.slot.index()) }
660 }
661
662 #[inline]
663 pub(crate) fn pin(&self) -> epoch::Guard<'_> {
664 self.locals.get_or(|| self.global.register_local()).pin()
665 }
666
667 pub(crate) fn command_buffer_allocator(&self) -> &Arc<StandardCommandBufferAllocator> {
668 &self.command_buffer_allocator
669 }
670
671 pub(crate) fn try_advance_global_and_collect(&self, guard: &epoch::Guard<'_>) {
672 if guard.try_advance_global() {
673 self.buffers.try_collect(guard);
674 self.images.try_collect(guard);
675 self.swapchains.try_collect(guard);
676 self.flights.try_collect(guard);
677 }
678 }
679}
680
681impl Drop for Resources {
682 fn drop(&mut self) {
683 for (flight_id, flight) in &mut self.flights {
684 let prev_frame_index = flight.previous_frame_index();
685 let fence = flight.fences[prev_frame_index as usize].get_mut();
686
687 if let Err(err) = fence.wait(None) {
688 if err == VulkanError::DeviceLost {
689 break;
690 }
691
692 eprintln!(
693 "failed to wait for flight {flight_id:?} to finish rendering graceful shutdown \
694 impossible: {err}; aborting",
695 );
696 std::process::abort();
697 }
698 }
699
700 let _ = unsafe { self.device().wait_idle() };
702
703 let mut registered_devices = REGISTERED_DEVICES.lock();
704
705 let index = registered_devices
708 .iter()
709 .position(|&addr| addr == Arc::as_ptr(self.device()) as usize)
710 .unwrap();
711
712 registered_devices.remove(index);
713 }
714}
715
716unsafe impl DeviceOwned for Resources {
717 #[inline]
718 fn device(&self) -> &Arc<Device> {
719 &self.device
720 }
721}
722
723impl BufferState {
724 #[inline]
726 #[must_use]
727 pub fn buffer(&self) -> &Arc<Buffer> {
728 &self.buffer
729 }
730
731 #[inline]
733 pub fn access(&self) -> BufferAccess {
734 *self.last_access.lock()
735 }
736
737 #[inline]
743 pub unsafe fn set_access(&self, access: BufferAccess) {
744 *self.last_access.lock() = access;
745 }
746}
747
748impl BufferAccess {
749 pub const NONE: Self = BufferAccess {
752 stage_mask: PipelineStages::empty(),
753 access_mask: AccessFlags::empty(),
754 queue_family_index: vk::QUEUE_FAMILY_IGNORED,
755 };
756
757 #[inline]
763 #[must_use]
764 pub const fn new(access_types: AccessTypes, queue_family_index: u32) -> Self {
765 assert!(access_types.are_valid_buffer_access_types());
766
767 BufferAccess {
768 stage_mask: access_types.stage_mask(),
769 access_mask: access_types.access_mask(),
770 queue_family_index,
771 }
772 }
773
774 pub(crate) const fn from_masks(
775 stage_mask: PipelineStages,
776 access_mask: AccessFlags,
777 queue_family_index: u32,
778 ) -> Self {
779 BufferAccess {
780 stage_mask,
781 access_mask,
782 queue_family_index,
783 }
784 }
785
786 #[inline]
788 #[must_use]
789 pub const fn stage_mask(&self) -> PipelineStages {
790 self.stage_mask
791 }
792
793 #[inline]
795 #[must_use]
796 pub const fn access_mask(&self) -> AccessFlags {
797 self.access_mask
798 }
799
800 #[inline]
802 #[must_use]
803 pub const fn queue_family_index(&self) -> u32 {
804 self.queue_family_index
805 }
806}
807
808impl ImageState {
809 #[inline]
811 #[must_use]
812 pub fn image(&self) -> &Arc<Image> {
813 &self.image
814 }
815
816 #[inline]
818 pub fn access(&self) -> ImageAccess {
819 *self.last_access.lock()
820 }
821
822 #[inline]
828 pub unsafe fn set_access(&self, access: ImageAccess) {
829 *self.last_access.lock() = access;
830 }
831}
832
833impl ImageAccess {
834 pub const NONE: Self = ImageAccess {
837 stage_mask: PipelineStages::empty(),
838 access_mask: AccessFlags::empty(),
839 image_layout: ImageLayout::Undefined,
840 queue_family_index: vk::QUEUE_FAMILY_IGNORED,
841 };
842
843 #[inline]
849 #[must_use]
850 pub const fn new(
851 access_types: AccessTypes,
852 layout_type: ImageLayoutType,
853 queue_family_index: u32,
854 ) -> Self {
855 assert!(access_types.are_valid_image_access_types());
856
857 ImageAccess {
858 stage_mask: access_types.stage_mask(),
859 access_mask: access_types.access_mask(),
860 image_layout: access_types.image_layout(layout_type),
861 queue_family_index,
862 }
863 }
864
865 pub(crate) const fn from_masks(
866 stage_mask: PipelineStages,
867 access_mask: AccessFlags,
868 image_layout: ImageLayout,
869 queue_family_index: u32,
870 ) -> Self {
871 ImageAccess {
872 stage_mask,
873 access_mask,
874 image_layout,
875 queue_family_index,
876 }
877 }
878
879 #[inline]
881 #[must_use]
882 pub const fn stage_mask(&self) -> PipelineStages {
883 self.stage_mask
884 }
885
886 #[inline]
888 #[must_use]
889 pub const fn access_mask(&self) -> AccessFlags {
890 self.access_mask
891 }
892
893 #[inline]
895 #[must_use]
896 pub const fn image_layout(&self) -> ImageLayout {
897 self.image_layout
898 }
899
900 #[inline]
902 #[must_use]
903 pub const fn queue_family_index(&self) -> u32 {
904 self.queue_family_index
905 }
906}
907
908impl SwapchainState {
909 #[inline]
911 #[must_use]
912 pub fn swapchain(&self) -> &Arc<Swapchain> {
913 &self.swapchain
914 }
915
916 #[inline]
918 #[must_use]
919 pub fn images(&self) -> &[Arc<Image>] {
920 &self.images
921 }
922
923 #[inline]
925 #[must_use]
926 pub fn flight_id(&self) -> Id<Flight> {
927 self.flight_id
928 }
929
930 #[inline]
933 #[must_use]
934 pub fn current_image_index(&self) -> Option<u32> {
935 let index = self.current_image_index.load(Ordering::Relaxed);
936
937 if index == u32::MAX {
938 None
939 } else {
940 Some(index)
941 }
942 }
943
944 pub(crate) fn current_image(&self) -> &Arc<Image> {
945 &self.images[self.current_image_index.load(Ordering::Relaxed) as usize]
946 }
947
948 pub(crate) fn access(&self) -> ImageAccess {
949 *self.last_access.lock()
950 }
951
952 pub(crate) unsafe fn set_access(&self, access: ImageAccess) {
953 *self.last_access.lock() = access;
954 }
955}
956
957impl Flight {
958 #[inline]
960 #[must_use]
961 pub fn frame_count(&self) -> u32 {
962 self.frame_count.get()
963 }
964
965 #[inline]
968 #[must_use]
969 pub fn current_frame(&self) -> u64 {
970 self.current_frame.load(Ordering::Relaxed)
971 }
972
973 #[inline]
975 #[must_use]
976 pub fn current_frame_index(&self) -> u32 {
977 (self.current_frame() % NonZeroU64::from(self.frame_count)) as u32
978 }
979
980 fn previous_frame_index(&self) -> u32 {
981 (self.current_frame().wrapping_sub(1) % NonZeroU64::from(self.frame_count)) as u32
982 }
983
984 pub(crate) fn current_fence(&self) -> &RwLock<Fence> {
985 &self.fences[self.current_frame_index() as usize]
986 }
987
988 #[inline]
990 pub fn wait(&self, timeout: Option<Duration>) -> Result<(), VulkanError> {
991 self.fences[self.current_frame_index() as usize]
992 .read()
993 .wait(timeout)
994 }
995
996 #[inline]
1005 pub fn wait_for_frame(&self, frame: u64, timeout: Option<Duration>) -> Result<(), VulkanError> {
1006 let current_frame = self.current_frame();
1007
1008 assert!(frame <= current_frame);
1009
1010 if current_frame - frame > u64::from(self.frame_count()) {
1011 return Ok(());
1012 }
1013
1014 self.fences[(frame % NonZeroU64::from(self.frame_count)) as usize]
1015 .read()
1016 .wait(timeout)
1017 }
1018
1019 #[inline]
1027 pub fn destroy_object(&self, object: Arc<impl Any + Send + Sync>) {
1028 let mut state = self.state.try_lock().unwrap();
1029 state.death_rows[self.previous_frame_index() as usize].push(object);
1030 }
1031
1032 #[inline]
1040 pub fn destroy_objects(&self, objects: impl IntoIterator<Item = Arc<impl Any + Send + Sync>>) {
1041 let mut state = self.state.try_lock().unwrap();
1042 state.death_rows[self.previous_frame_index() as usize]
1043 .extend(objects.into_iter().map(|object| object as _));
1044 }
1045
1046 pub(crate) unsafe fn next_frame(&self) {
1047 self.current_frame.fetch_add(1, Ordering::Relaxed);
1048 }
1049}
1050
1051#[derive(Debug)]
1053pub struct ResourcesCreateInfo<'a> {
1054 pub max_buffers: u32,
1056
1057 pub max_images: u32,
1059
1060 pub max_swapchains: u32,
1062
1063 pub max_flights: u32,
1065
1066 pub _ne: crate::NonExhaustive<'a>,
1067}
1068
1069impl Default for ResourcesCreateInfo<'_> {
1070 #[inline]
1071 fn default() -> Self {
1072 ResourcesCreateInfo {
1073 max_buffers: 1 << 24,
1074 max_images: 1 << 24,
1075 max_swapchains: 1 << 8,
1076 max_flights: 1 << 8,
1077 _ne: crate::NE,
1078 }
1079 }
1080}
1081
1082#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
1084pub struct AccessTypes {
1085 stage_mask: PipelineStages,
1086 access_mask: AccessFlags,
1087 image_layout: ImageLayout,
1088}
1089
1090macro_rules! access_types {
1091 (
1092 $(
1093 $(#[$meta:meta])*
1094 $name:ident {
1095 stage_mask: $($stage_flag:ident)|+,
1096 access_mask: $($access_flag:ident)|+,
1097 image_layout: $image_layout:ident,
1098 }
1099 )*
1100 ) => {
1101 impl AccessTypes {
1102 $(
1103 $(#[$meta])*
1104 pub const $name: Self = AccessTypes {
1105 stage_mask: PipelineStages::empty()$(.union(PipelineStages::$stage_flag))+,
1106 access_mask: AccessFlags::empty()$(.union(AccessFlags::$access_flag))+,
1107 image_layout: ImageLayout::$image_layout,
1108 };
1109 )*
1110 }
1111 };
1112}
1113
1114access_types! {
1115 INDIRECT_COMMAND_READ {
1116 stage_mask: DRAW_INDIRECT,
1117 access_mask: INDIRECT_COMMAND_READ,
1118 image_layout: Undefined,
1119 }
1120
1121 INDEX_READ {
1122 stage_mask: INDEX_INPUT,
1123 access_mask: INDEX_READ,
1124 image_layout: Undefined,
1125 }
1126
1127 VERTEX_ATTRIBUTE_READ {
1128 stage_mask: VERTEX_ATTRIBUTE_INPUT,
1129 access_mask: VERTEX_ATTRIBUTE_READ,
1130 image_layout: Undefined,
1131 }
1132
1133 VERTEX_SHADER_UNIFORM_READ {
1134 stage_mask: VERTEX_SHADER,
1135 access_mask: UNIFORM_READ,
1136 image_layout: Undefined,
1137 }
1138
1139 VERTEX_SHADER_SAMPLED_READ {
1140 stage_mask: VERTEX_SHADER,
1141 access_mask: SHADER_SAMPLED_READ,
1142 image_layout: ShaderReadOnlyOptimal,
1143 }
1144
1145 VERTEX_SHADER_STORAGE_READ {
1146 stage_mask: VERTEX_SHADER,
1147 access_mask: SHADER_STORAGE_READ,
1148 image_layout: General,
1149 }
1150
1151 VERTEX_SHADER_STORAGE_WRITE {
1152 stage_mask: VERTEX_SHADER,
1153 access_mask: SHADER_STORAGE_WRITE,
1154 image_layout: General,
1155 }
1156
1157 VERTEX_SHADER_ACCELERATION_STRUCTURE_READ {
1158 stage_mask: VERTEX_SHADER,
1159 access_mask: ACCELERATION_STRUCTURE_READ,
1160 image_layout: Undefined,
1161 }
1162
1163 TESSELLATION_CONTROL_SHADER_UNIFORM_READ {
1164 stage_mask: TESSELLATION_CONTROL_SHADER,
1165 access_mask: UNIFORM_READ,
1166 image_layout: Undefined,
1167 }
1168
1169 TESSELLATION_CONTROL_SHADER_SAMPLED_READ {
1170 stage_mask: TESSELLATION_CONTROL_SHADER,
1171 access_mask: SHADER_SAMPLED_READ,
1172 image_layout: ShaderReadOnlyOptimal,
1173 }
1174
1175 TESSELLATION_CONTROL_SHADER_STORAGE_READ {
1176 stage_mask: TESSELLATION_CONTROL_SHADER,
1177 access_mask: SHADER_STORAGE_READ,
1178 image_layout: General,
1179 }
1180
1181 TESSELLATION_CONTROL_SHADER_STORAGE_WRITE {
1182 stage_mask: TESSELLATION_CONTROL_SHADER,
1183 access_mask: SHADER_STORAGE_WRITE,
1184 image_layout: General,
1185 }
1186
1187 TESSELLATION_CONTROL_SHADER_ACCELERATION_STRUCTURE_READ {
1188 stage_mask: TESSELLATION_CONTROL_SHADER,
1189 access_mask: ACCELERATION_STRUCTURE_READ,
1190 image_layout: Undefined,
1191 }
1192
1193 TESSELLATION_EVALUATION_SHADER_UNIFORM_READ {
1194 stage_mask: TESSELLATION_EVALUATION_SHADER,
1195 access_mask: UNIFORM_READ,
1196 image_layout: Undefined,
1197 }
1198
1199 TESSELLATION_EVALUATION_SHADER_SAMPLED_READ {
1200 stage_mask: TESSELLATION_EVALUATION_SHADER,
1201 access_mask: SHADER_SAMPLED_READ,
1202 image_layout: ShaderReadOnlyOptimal,
1203 }
1204
1205 TESSELLATION_EVALUATION_SHADER_STORAGE_READ {
1206 stage_mask: TESSELLATION_EVALUATION_SHADER,
1207 access_mask: SHADER_STORAGE_READ,
1208 image_layout: General,
1209 }
1210
1211 TESSELLATION_EVALUATION_SHADER_STORAGE_WRITE {
1212 stage_mask: TESSELLATION_EVALUATION_SHADER,
1213 access_mask: SHADER_STORAGE_WRITE,
1214 image_layout: General,
1215 }
1216
1217 TESSELLATION_EVALUATION_SHADER_ACCELERATION_STRUCTURE_READ {
1218 stage_mask: TESSELLATION_EVALUATION_SHADER,
1219 access_mask: ACCELERATION_STRUCTURE_READ,
1220 image_layout: Undefined,
1221 }
1222
1223 GEOMETRY_SHADER_UNIFORM_READ {
1224 stage_mask: GEOMETRY_SHADER,
1225 access_mask: UNIFORM_READ,
1226 image_layout: Undefined,
1227 }
1228
1229 GEOMETRY_SHADER_SAMPLED_READ {
1230 stage_mask: GEOMETRY_SHADER,
1231 access_mask: SHADER_SAMPLED_READ,
1232 image_layout: ShaderReadOnlyOptimal,
1233 }
1234
1235 GEOMETRY_SHADER_STORAGE_READ {
1236 stage_mask: GEOMETRY_SHADER,
1237 access_mask: SHADER_STORAGE_READ,
1238 image_layout: General,
1239 }
1240
1241 GEOMETRY_SHADER_STORAGE_WRITE {
1242 stage_mask: GEOMETRY_SHADER,
1243 access_mask: SHADER_STORAGE_WRITE,
1244 image_layout: General,
1245 }
1246
1247 GEOMETRY_SHADER_ACCELERATION_STRUCTURE_READ {
1248 stage_mask: GEOMETRY_SHADER,
1249 access_mask: ACCELERATION_STRUCTURE_READ,
1250 image_layout: Undefined,
1251 }
1252
1253 FRAGMENT_SHADER_UNIFORM_READ {
1254 stage_mask: FRAGMENT_SHADER,
1255 access_mask: UNIFORM_READ,
1256 image_layout: Undefined,
1257 }
1258
1259 FRAGMENT_SHADER_COLOR_INPUT_ATTACHMENT_READ {
1260 stage_mask: FRAGMENT_SHADER,
1261 access_mask: INPUT_ATTACHMENT_READ,
1262 image_layout: ShaderReadOnlyOptimal,
1263 }
1264
1265 FRAGMENT_SHADER_DEPTH_STENCIL_INPUT_ATTACHMENT_READ {
1266 stage_mask: FRAGMENT_SHADER,
1267 access_mask: INPUT_ATTACHMENT_READ,
1268 image_layout: DepthStencilReadOnlyOptimal,
1269 }
1270
1271 FRAGMENT_SHADER_SAMPLED_READ {
1272 stage_mask: FRAGMENT_SHADER,
1273 access_mask: SHADER_SAMPLED_READ,
1274 image_layout: ShaderReadOnlyOptimal,
1275 }
1276
1277 FRAGMENT_SHADER_STORAGE_READ {
1278 stage_mask: FRAGMENT_SHADER,
1279 access_mask: SHADER_STORAGE_READ,
1280 image_layout: General,
1281 }
1282
1283 FRAGMENT_SHADER_STORAGE_WRITE {
1284 stage_mask: FRAGMENT_SHADER,
1285 access_mask: SHADER_STORAGE_WRITE,
1286 image_layout: General,
1287 }
1288
1289 FRAGMENT_SHADER_ACCELERATION_STRUCTURE_READ {
1290 stage_mask: FRAGMENT_SHADER,
1291 access_mask: ACCELERATION_STRUCTURE_READ,
1292 image_layout: Undefined,
1293 }
1294
1295 DEPTH_STENCIL_ATTACHMENT_READ {
1296 stage_mask: EARLY_FRAGMENT_TESTS | LATE_FRAGMENT_TESTS,
1297 access_mask: DEPTH_STENCIL_ATTACHMENT_READ,
1298 image_layout: DepthStencilReadOnlyOptimal,
1299 }
1300
1301 DEPTH_STENCIL_ATTACHMENT_WRITE {
1302 stage_mask: EARLY_FRAGMENT_TESTS | LATE_FRAGMENT_TESTS,
1303 access_mask: DEPTH_STENCIL_ATTACHMENT_WRITE,
1304 image_layout: DepthStencilAttachmentOptimal,
1305 }
1306
1307 DEPTH_ATTACHMENT_WRITE_STENCIL_READ_ONLY {
1308 stage_mask: EARLY_FRAGMENT_TESTS | LATE_FRAGMENT_TESTS,
1309 access_mask: DEPTH_STENCIL_ATTACHMENT_READ | DEPTH_STENCIL_ATTACHMENT_WRITE,
1310 image_layout: DepthAttachmentStencilReadOnlyOptimal,
1311 }
1312
1313 DEPTH_READ_ONLY_STENCIL_ATTACHMENT_WRITE {
1314 stage_mask: EARLY_FRAGMENT_TESTS | LATE_FRAGMENT_TESTS,
1315 access_mask: DEPTH_STENCIL_ATTACHMENT_READ | DEPTH_STENCIL_ATTACHMENT_WRITE,
1316 image_layout: DepthReadOnlyStencilAttachmentOptimal,
1317 }
1318
1319 COLOR_ATTACHMENT_READ {
1320 stage_mask: COLOR_ATTACHMENT_OUTPUT,
1321 access_mask: COLOR_ATTACHMENT_READ,
1322 image_layout: ColorAttachmentOptimal,
1323 }
1324
1325 COLOR_ATTACHMENT_WRITE {
1326 stage_mask: COLOR_ATTACHMENT_OUTPUT,
1327 access_mask: COLOR_ATTACHMENT_WRITE,
1328 image_layout: ColorAttachmentOptimal,
1329 }
1330
1331 COMPUTE_SHADER_UNIFORM_READ {
1332 stage_mask: COMPUTE_SHADER,
1333 access_mask: UNIFORM_READ,
1334 image_layout: Undefined,
1335 }
1336
1337 COMPUTE_SHADER_SAMPLED_READ {
1338 stage_mask: COMPUTE_SHADER,
1339 access_mask: SHADER_SAMPLED_READ,
1340 image_layout: ShaderReadOnlyOptimal,
1341 }
1342
1343 COMPUTE_SHADER_STORAGE_READ {
1344 stage_mask: COMPUTE_SHADER,
1345 access_mask: SHADER_STORAGE_READ,
1346 image_layout: General,
1347 }
1348
1349 COMPUTE_SHADER_STORAGE_WRITE {
1350 stage_mask: COMPUTE_SHADER,
1351 access_mask: SHADER_STORAGE_WRITE,
1352 image_layout: General,
1353 }
1354
1355 COMPUTE_SHADER_ACCELERATION_STRUCTURE_READ {
1356 stage_mask: COMPUTE_SHADER,
1357 access_mask: ACCELERATION_STRUCTURE_READ,
1358 image_layout: Undefined,
1359 }
1360
1361 COPY_TRANSFER_READ {
1362 stage_mask: COPY,
1363 access_mask: TRANSFER_READ,
1364 image_layout: TransferSrcOptimal,
1365 }
1366
1367 COPY_TRANSFER_WRITE {
1368 stage_mask: COPY,
1369 access_mask: TRANSFER_WRITE,
1370 image_layout: TransferDstOptimal,
1371 }
1372
1373 BLIT_TRANSFER_READ {
1374 stage_mask: BLIT,
1375 access_mask: TRANSFER_READ,
1376 image_layout: TransferSrcOptimal,
1377 }
1378
1379 BLIT_TRANSFER_WRITE {
1380 stage_mask: BLIT,
1381 access_mask: TRANSFER_WRITE,
1382 image_layout: TransferDstOptimal,
1383 }
1384
1385 RESOLVE_TRANSFER_READ {
1386 stage_mask: RESOLVE,
1387 access_mask: TRANSFER_READ,
1388 image_layout: TransferSrcOptimal,
1389 }
1390
1391 RESOLVE_TRANSFER_WRITE {
1392 stage_mask: RESOLVE,
1393 access_mask: TRANSFER_WRITE,
1394 image_layout: TransferDstOptimal,
1395 }
1396
1397 CLEAR_TRANSFER_WRITE {
1398 stage_mask: CLEAR,
1399 access_mask: TRANSFER_WRITE,
1400 image_layout: TransferDstOptimal,
1401 }
1402
1403 ACCELERATION_STRUCTURE_COPY_TRANSFER_READ {
1404 stage_mask: ACCELERATION_STRUCTURE_COPY,
1405 access_mask: TRANSFER_READ,
1406 image_layout: Undefined,
1407 }
1408
1409 ACCELERATION_STRUCTURE_COPY_TRANSFER_WRITE {
1410 stage_mask: ACCELERATION_STRUCTURE_COPY,
1411 access_mask: TRANSFER_WRITE,
1412 image_layout: Undefined,
1413 }
1414
1415 RAY_TRACING_SHADER_UNIFORM_READ {
1472 stage_mask: RAY_TRACING_SHADER,
1473 access_mask: UNIFORM_READ,
1474 image_layout: Undefined,
1475 }
1476
1477 RAY_TRACING_SHADER_COLOR_INPUT_ATTACHMENT_READ {
1478 stage_mask: RAY_TRACING_SHADER,
1479 access_mask: INPUT_ATTACHMENT_READ,
1480 image_layout: ShaderReadOnlyOptimal,
1481 }
1482
1483 RAY_TRACING_SHADER_DEPTH_STENCIL_INPUT_ATTACHMENT_READ {
1484 stage_mask: RAY_TRACING_SHADER,
1485 access_mask: INPUT_ATTACHMENT_READ,
1486 image_layout: DepthStencilReadOnlyOptimal,
1487 }
1488
1489 RAY_TRACING_SHADER_SAMPLED_READ {
1490 stage_mask: RAY_TRACING_SHADER,
1491 access_mask: SHADER_SAMPLED_READ,
1492 image_layout: ShaderReadOnlyOptimal,
1493 }
1494
1495 RAY_TRACING_SHADER_STORAGE_READ {
1496 stage_mask: RAY_TRACING_SHADER,
1497 access_mask: SHADER_STORAGE_READ,
1498 image_layout: General,
1499 }
1500
1501 RAY_TRACING_SHADER_STORAGE_WRITE {
1502 stage_mask: RAY_TRACING_SHADER,
1503 access_mask: SHADER_STORAGE_WRITE,
1504 image_layout: General,
1505 }
1506
1507 RAY_TRACING_SHADER_BINDING_TABLE_READ {
1508 stage_mask: RAY_TRACING_SHADER,
1509 access_mask: SHADER_BINDING_TABLE_READ,
1510 image_layout: Undefined,
1511 }
1512
1513 RAY_TRACING_SHADER_ACCELERATION_STRUCTURE_READ {
1514 stage_mask: RAY_TRACING_SHADER,
1515 access_mask: ACCELERATION_STRUCTURE_READ,
1516 image_layout: Undefined,
1517 }
1518
1519 TASK_SHADER_UNIFORM_READ {
1520 stage_mask: TASK_SHADER,
1521 access_mask: UNIFORM_READ,
1522 image_layout: Undefined,
1523 }
1524
1525 TASK_SHADER_SAMPLED_READ {
1526 stage_mask: TASK_SHADER,
1527 access_mask: SHADER_SAMPLED_READ,
1528 image_layout: ShaderReadOnlyOptimal,
1529 }
1530
1531 TASK_SHADER_STORAGE_READ {
1532 stage_mask: TASK_SHADER,
1533 access_mask: SHADER_STORAGE_READ,
1534 image_layout: General,
1535 }
1536
1537 TASK_SHADER_STORAGE_WRITE {
1538 stage_mask: TASK_SHADER,
1539 access_mask: SHADER_STORAGE_WRITE,
1540 image_layout: General,
1541 }
1542
1543 TASK_SHADER_ACCELERATION_STRUCTURE_READ {
1544 stage_mask: TASK_SHADER,
1545 access_mask: ACCELERATION_STRUCTURE_READ,
1546 image_layout: Undefined,
1547 }
1548
1549 MESH_SHADER_UNIFORM_READ {
1550 stage_mask: MESH_SHADER,
1551 access_mask: UNIFORM_READ,
1552 image_layout: Undefined,
1553 }
1554
1555 MESH_SHADER_SAMPLED_READ {
1556 stage_mask: MESH_SHADER,
1557 access_mask: SHADER_SAMPLED_READ,
1558 image_layout: ShaderReadOnlyOptimal,
1559 }
1560
1561 MESH_SHADER_STORAGE_READ {
1562 stage_mask: MESH_SHADER,
1563 access_mask: SHADER_STORAGE_READ,
1564 image_layout: General,
1565 }
1566
1567 MESH_SHADER_STORAGE_WRITE {
1568 stage_mask: MESH_SHADER,
1569 access_mask: SHADER_STORAGE_WRITE,
1570 image_layout: General,
1571 }
1572
1573 MESH_SHADER_ACCELERATION_STRUCTURE_READ {
1574 stage_mask: MESH_SHADER,
1575 access_mask: ACCELERATION_STRUCTURE_READ,
1576 image_layout: Undefined,
1577 }
1578
1579 ACCELERATION_STRUCTURE_BUILD_INDIRECT_COMMAND_READ {
1580 stage_mask: ACCELERATION_STRUCTURE_BUILD,
1581 access_mask: INDIRECT_COMMAND_READ,
1582 image_layout: Undefined,
1583 }
1584
1585 ACCELERATION_STRUCTURE_BUILD_SHADER_READ {
1586 stage_mask: ACCELERATION_STRUCTURE_BUILD,
1587 access_mask: SHADER_READ,
1588 image_layout: Undefined,
1589 }
1590
1591 ACCELERATION_STRUCTURE_BUILD_ACCELERATION_STRUCTURE_READ {
1592 stage_mask: ACCELERATION_STRUCTURE_BUILD,
1593 access_mask: ACCELERATION_STRUCTURE_READ,
1594 image_layout: Undefined,
1595 }
1596
1597 ACCELERATION_STRUCTURE_BUILD_ACCELERATION_STRUCTURE_WRITE {
1598 stage_mask: ACCELERATION_STRUCTURE_BUILD,
1599 access_mask: ACCELERATION_STRUCTURE_WRITE,
1600 image_layout: Undefined,
1601 }
1602
1603 ACCELERATION_STRUCTURE_COPY_ACCELERATION_STRUCTURE_READ {
1604 stage_mask: ACCELERATION_STRUCTURE_COPY,
1605 access_mask: ACCELERATION_STRUCTURE_READ,
1606 image_layout: Undefined,
1607 }
1608
1609 ACCELERATION_STRUCTURE_COPY_ACCELERATION_STRUCTURE_WRITE {
1610 stage_mask: ACCELERATION_STRUCTURE_COPY,
1611 access_mask: ACCELERATION_STRUCTURE_WRITE,
1612 image_layout: Undefined,
1613 }
1614
1615 GENERAL {
1617 stage_mask: ALL_COMMANDS,
1618 access_mask: MEMORY_READ | MEMORY_WRITE,
1619 image_layout: General,
1620 }
1621}
1622
1623impl AccessTypes {
1624 #[inline]
1626 #[must_use]
1627 pub const fn stage_mask(&self) -> PipelineStages {
1628 self.stage_mask
1629 }
1630
1631 #[inline]
1633 #[must_use]
1634 pub const fn access_mask(&self) -> AccessFlags {
1635 self.access_mask
1636 }
1637
1638 #[inline]
1640 #[must_use]
1641 pub const fn image_layout(&self, layout_type: ImageLayoutType) -> ImageLayout {
1642 if layout_type.is_optimal() {
1643 self.image_layout
1644 } else {
1645 ImageLayout::General
1646 }
1647 }
1648
1649 #[inline]
1653 #[must_use]
1654 pub const fn union(self, other: Self) -> Self {
1655 AccessTypes {
1656 stage_mask: self.stage_mask.union(other.stage_mask),
1657 access_mask: self.access_mask.union(other.access_mask),
1658 image_layout: if self.image_layout as i32 == other.image_layout as i32 {
1659 self.image_layout
1660 } else {
1661 ImageLayout::General
1662 },
1663 }
1664 }
1665
1666 pub(crate) const fn are_valid_buffer_access_types(self) -> bool {
1667 const VALID_STAGE_FLAGS: PipelineStages = PipelineStages::DRAW_INDIRECT
1668 .union(PipelineStages::VERTEX_SHADER)
1669 .union(PipelineStages::TESSELLATION_CONTROL_SHADER)
1670 .union(PipelineStages::TESSELLATION_EVALUATION_SHADER)
1671 .union(PipelineStages::GEOMETRY_SHADER)
1672 .union(PipelineStages::FRAGMENT_SHADER)
1673 .union(PipelineStages::COMPUTE_SHADER)
1674 .union(PipelineStages::ALL_COMMANDS)
1675 .union(PipelineStages::COPY)
1676 .union(PipelineStages::INDEX_INPUT)
1677 .union(PipelineStages::VERTEX_ATTRIBUTE_INPUT)
1678 .union(PipelineStages::VIDEO_DECODE)
1679 .union(PipelineStages::VIDEO_ENCODE)
1680 .union(PipelineStages::ACCELERATION_STRUCTURE_BUILD)
1681 .union(PipelineStages::RAY_TRACING_SHADER)
1682 .union(PipelineStages::TASK_SHADER)
1683 .union(PipelineStages::MESH_SHADER)
1684 .union(PipelineStages::ACCELERATION_STRUCTURE_COPY);
1685 const VALID_ACCESS_FLAGS: AccessFlags = AccessFlags::INDIRECT_COMMAND_READ
1686 .union(AccessFlags::INDEX_READ)
1687 .union(AccessFlags::VERTEX_ATTRIBUTE_READ)
1688 .union(AccessFlags::UNIFORM_READ)
1689 .union(AccessFlags::TRANSFER_READ)
1690 .union(AccessFlags::TRANSFER_WRITE)
1691 .union(AccessFlags::MEMORY_READ)
1692 .union(AccessFlags::MEMORY_WRITE)
1693 .union(AccessFlags::SHADER_SAMPLED_READ)
1694 .union(AccessFlags::SHADER_STORAGE_READ)
1695 .union(AccessFlags::SHADER_STORAGE_WRITE)
1696 .union(AccessFlags::VIDEO_DECODE_READ)
1697 .union(AccessFlags::VIDEO_ENCODE_WRITE)
1698 .union(AccessFlags::ACCELERATION_STRUCTURE_READ)
1699 .union(AccessFlags::ACCELERATION_STRUCTURE_WRITE)
1700 .union(AccessFlags::SHADER_BINDING_TABLE_READ);
1701
1702 VALID_STAGE_FLAGS.contains(self.stage_mask)
1703 && VALID_ACCESS_FLAGS.contains(self.access_mask)
1704 && matches!(
1705 self.image_layout,
1706 ImageLayout::Undefined
1707 | ImageLayout::General
1708 | ImageLayout::ShaderReadOnlyOptimal
1709 | ImageLayout::TransferSrcOptimal
1710 | ImageLayout::TransferDstOptimal,
1711 )
1712 }
1713
1714 pub(crate) const fn are_valid_image_access_types(self) -> bool {
1715 !matches!(self.image_layout, ImageLayout::Undefined)
1716 }
1717}
1718
1719impl BitOr for AccessTypes {
1720 type Output = Self;
1721
1722 #[inline]
1723 fn bitor(self, rhs: Self) -> Self::Output {
1724 self.union(rhs)
1725 }
1726}
1727
1728impl BitOrAssign for AccessTypes {
1729 #[inline]
1730 fn bitor_assign(&mut self, rhs: Self) {
1731 *self = self.union(rhs);
1732 }
1733}
1734
1735#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
1737#[non_exhaustive]
1738pub enum ImageLayoutType {
1739 Optimal,
1752
1753 General,
1759}
1760
1761impl ImageLayoutType {
1762 #[inline]
1764 #[must_use]
1765 pub const fn is_optimal(self) -> bool {
1766 matches!(self, ImageLayoutType::Optimal)
1767 }
1768
1769 #[inline]
1771 #[must_use]
1772 pub const fn is_general(self) -> bool {
1773 matches!(self, ImageLayoutType::General)
1774 }
1775}
1776
1777#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
1779pub enum HostAccessType {
1780 Read,
1782
1783 Write,
1785}
1786
1787type Result<T = (), E = InvalidSlotError> = ::std::result::Result<T, E>;