vulkanite/
lib.rs

1#![doc = include_str!(concat!("../", std::env!("CARGO_PKG_README")))]
2
3#[cfg(feature = "loaded")]
4mod loaded;
5pub mod vk;
6#[cfg(feature = "raw-window-handle")]
7pub mod window;
8
9use std::cell::Cell;
10use std::ffi::c_char;
11use std::marker::PhantomData;
12use std::mem::{self, MaybeUninit};
13use std::ptr::{self};
14
15#[cfg(feature = "smallvec")]
16use smallvec::SmallVec;
17
18#[cfg(feature = "arrayvec")]
19use arrayvec::ArrayVec;
20
21/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkGetInstanceProcAddr.html>
22/// Entry point of the vulkan library, this is used to retrieve Vulkan functions
23/// This function can be retrieved by loading the library using [Dispatcher::new_loaded], using your own library
24/// loading code or some external libraries provide it like SDL with `SDL_Vulkan_GetVkGetInstanceProcAddr`
25pub type GetInstanceProcAddrSignature = unsafe extern "system" fn(
26    Option<BorrowedHandle<'_, vk::raw::Instance>>,
27    *const c_char,
28) -> *const ();
29
30/// Dispatcher type used to hold the [vk::CommandsDispatcher] object
31/// The dispatcher type loads once all the vulkan commands that can be used then is used
32/// every time a vulkan command is called.
33/// It is initialized by calling [Dispatcher::new] with the [GetInstanceProcAddrSignature] entry point
34/// Or when the `loaded` feature is enabled by calling [Dispatcher::new_loaded] which will load the library
35/// There are two Dispatcher implementation provided:
36/// - [DynamicDispatcher]: Store the commands in static memory, this allows for a cost-free command retrieval
37/// but requires only at most one instance and one device to exist at any time. If this is not the case use the following implementation
38/// - [MultiDispatcher]: Allocate the commands storage on the heap and reference count it. This allows for any number of vulkan
39/// devices and instances to co-exist with their own dispatch table but incurs a small overhead cost (smart handles need to store an additional pointer
40/// and arc cloning needs to be done each time a new smart handle is created)
41pub trait Dispatcher: Clone {
42    /// Return the associated [vk::CommandsDispatcher] with this Dispatcher
43    /// You can then use the command table to call any command that has been loaded or load new commands
44    fn get_command_dispatcher(&self) -> &vk::CommandsDispatcher;
45
46    /// Create a new dispatcher given the get_instance_proc_addr entry point
47    /// this will load basic (non-instance and non-device dependent) commands
48    /// # Safety
49    /// `get_instance_proc_addr` must behave as expected (for any input it should either return [ptr::null()]
50    /// or a pointer to a function with the expected parameters and return value)
51    unsafe fn new(get_instance_proc_addr: GetInstanceProcAddrSignature) -> Self;
52
53    /// Internal function used to load a library and return the dispatcher along with a library object which ensures the llibrary
54    /// is kept loaded while the object is alive
55    /// # Safety
56    /// - The [libloading::Library] object must be dropped only after Vulkan is done being used (all objects have been unitialized and no
57    /// vulkan command is called after)
58    #[cfg(feature = "loaded")]
59    unsafe fn new_loaded_and_lib(
60    ) -> core::result::Result<(Self, libloading::Library), loaded::LoadingError> {
61        let (proc_addr, lib) = loaded::load_proc_addr_and_lib()?;
62
63        Ok((Self::new(proc_addr), lib))
64    }
65
66    fn clone_with_instance(&self, instance: &vk::raw::Instance) -> Self;
67    fn clone_with_device(&self, device: &vk::raw::Device) -> Self;
68
69    /// Create a loads the Vulkan library, retrieve the entry point from it and initialize the dispatcher using it*
70    /// This will return an error if the vulkan library or its entry point cannot be found
71    /// Library unloading depends on the implementation, for [MultiDispatcher] it happends as soon as all dispatcher are dropped.
72    /// While for [DynamicDispatcher] one should call [DynamicDispatcher::unload()]
73    /// SAFETY:
74    /// - The Vulkan library being loaded follows the Vulkan specification
75    /// - When using a [DynamicDispatcher], there should be a call to [DynamicDispatcher::unload()] between two calls to this function
76    #[cfg(feature = "loaded")]
77    unsafe fn new_loaded() -> core::result::Result<Self, loaded::LoadingError>;
78}
79
80/// When using a dynamic dispatcher for a single instance/device, we can put it in static memory
81/// This way, when compiled with optimizations, function called will be optimized to a simple jump
82/// to a given indirect address
83static DYNAMIC_DISPATCHER: vk::CommandsDispatcher = vk::CommandsDispatcher::new();
84
85/// Dynamic dispatcher
86/// Dispatcher implementation loading commands in static memory. This is a cost-free abstraction
87/// assuming you follow the safety rule below.
88/// Cloning this object is free (the object has size 0 and the clone function is empty) and it never
89/// makes any heap allocation. Use this dispatcher if you can.
90///
91/// # Safety
92/// Using a dynamic dispatcher means that at any point, only at most one vulkan instance
93/// and at most one vulkan device exists. This is the case for most Vulkan program but if you cannot
94/// guarantee it, use [MultiDispatcher] instead
95#[derive(Clone, Copy)]
96pub struct DynamicDispatcher(pub(crate) ());
97
98impl Dispatcher for DynamicDispatcher {
99    #[inline(always)]
100    fn get_command_dispatcher(&self) -> &vk::CommandsDispatcher {
101        &DYNAMIC_DISPATCHER
102    }
103
104    unsafe fn new(get_instance_proc_addr: GetInstanceProcAddrSignature) -> Self {
105        DYNAMIC_DISPATCHER.load_proc_addr(get_instance_proc_addr);
106        Self(())
107    }
108
109    fn clone_with_instance(&self, instance: &vk::raw::Instance) -> Self {
110        unsafe { DYNAMIC_DISPATCHER.load_instance(instance) };
111        Self(())
112    }
113
114    fn clone_with_device(&self, device: &vk::raw::Device) -> Self {
115        unsafe { DYNAMIC_DISPATCHER.load_device(device) };
116        Self(())
117    }
118
119    #[cfg(feature = "loaded")]
120    unsafe fn new_loaded() -> core::result::Result<Self, loaded::LoadingError> {
121        let (result, lib) = Self::new_loaded_and_lib()?;
122
123        loaded::DYNAMIC_VULKAN_LIB.0.set(Some(lib));
124        Ok(result)
125    }
126}
127
128#[cfg(feature = "loaded")]
129impl DynamicDispatcher {
130    /// Unloads the loaded library
131    /// # Safety:
132    /// - Only call this function if the dispatcher was loaded with [Dispatcher::new_loaded].
133    /// - Only call this function after all vulkan handles have been freed/destroyed
134    /// - You cannot call any vulkan command before creating a new dispatcher.
135    /// - There should be a call to [Dispatcher::new_loaded] between two consecutive calls to [Self::unload]
136    pub unsafe fn unload() {
137        loaded::DYNAMIC_VULKAN_LIB.0.set(None);
138    }
139}
140
141struct DispatcherWithLib {
142    dispatcher: vk::CommandsDispatcher,
143    #[cfg(feature = "loaded")]
144    library: Option<std::sync::Arc<libloading::Library>>,
145}
146
147/// MultiDispatcher
148/// Dispatcher implementation which stores vulkan commands on the heap using smart pointers
149/// This adds a small overhead:
150/// - Smart Vulkan handles must store an additional pointer
151/// - Creating/dropping smart Vulkan Handles has the additional cost of cloning/dropping a [std::sync::Arc]
152/// If you only use at most one vulkan instance and one device at any given time, you should use [DynamicDispatcher] instead
153/// When the `loaded` feature is enabled and the dispatcher is loaded with [MultiDispatcher::new_loaded],
154/// The vulkan library will be unloaded as soon as all dispatchers are dropped
155#[derive(Clone)]
156pub struct MultiDispatcher(std::sync::Arc<DispatcherWithLib>);
157
158impl Dispatcher for MultiDispatcher {
159    #[inline(always)]
160    fn get_command_dispatcher(&self) -> &vk::CommandsDispatcher {
161        &self.0.dispatcher
162    }
163
164    unsafe fn new(get_instance_proc_addr: GetInstanceProcAddrSignature) -> Self {
165        let dispatcher = std::sync::Arc::new(DispatcherWithLib {
166            dispatcher: vk::CommandsDispatcher::new(),
167            #[cfg(feature = "loaded")]
168            library: None,
169        });
170        dispatcher.dispatcher.load_proc_addr(get_instance_proc_addr);
171
172        Self(dispatcher)
173    }
174
175    fn clone_with_instance(&self, instance: &vk::raw::Instance) -> Self {
176        let dispatcher = std::sync::Arc::new(DispatcherWithLib {
177            dispatcher: self.0.dispatcher.clone(),
178            #[cfg(feature = "loaded")]
179            library: self.0.library.clone(),
180        });
181
182        unsafe { dispatcher.dispatcher.load_instance(instance) };
183        Self(dispatcher)
184    }
185
186    fn clone_with_device(&self, device: &vk::raw::Device) -> Self {
187        let dispatcher = std::sync::Arc::new(DispatcherWithLib {
188            dispatcher: self.0.dispatcher.clone(),
189            #[cfg(feature = "loaded")]
190            library: self.0.library.clone(),
191        });
192
193        unsafe { dispatcher.dispatcher.load_device(device) };
194        Self(dispatcher)
195    }
196
197    #[cfg(feature = "loaded")]
198    unsafe fn new_loaded() -> core::result::Result<Self, loaded::LoadingError> {
199        let (mut result, lib) = Self::new_loaded_and_lib()?;
200
201        // result holds the only reference to the inner dispatcher
202        // so unwrap will never fail
203        let library = std::sync::Arc::new(lib);
204        std::sync::Arc::get_mut(&mut result.0).unwrap().library = Some(library);
205        Ok(result)
206    }
207}
208
209/// See <https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#memory-allocation>
210/// Cost-free allocator implementation for Vulkan
211/// Vulkan allows a custom memory allocator to be specified for host allocations
212/// Note that the vulkan implementation is not required to use this allocator (for example it might have to allocate
213/// memory with execute permissions), but you will at least receive the [Allocator::on_internal_alloc] and [Allocator::on_internal_free]
214/// notifications
215/// # Safety
216/// The implementations of alloc/realloc/free must satisfy an allocator behavior and the requirements of the specification
217/// If for some reason you choose the re-implement the pfn_* functions, they also need to follow the specification
218pub unsafe trait Allocator: Sized + Clone {
219    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/PFN_vkAllocationFunction.html>
220    fn alloc(
221        &self,
222        size: usize,
223        alignment: usize,
224        allocation_scope: vk::SystemAllocationScope,
225    ) -> *mut ();
226    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/PFN_vkReallocationFunction.html>
227    fn realloc(
228        &self,
229        original: *mut (),
230        size: usize,
231        alignment: usize,
232        allocation_scope: vk::SystemAllocationScope,
233    ) -> *mut ();
234    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/PFN_vkFreeFunction.html>
235    fn free(&self, memory: *mut ());
236    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/PFN_vkInternalAllocationNotification.html>
237    fn on_internal_alloc(
238        &self,
239        size: usize,
240        allocation_type: vk::InternalAllocationType,
241        allocation_scope: vk::SystemAllocationScope,
242    );
243    /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/PFN_vkInternalFreeNotification.html>
244    fn on_internal_free(
245        &self,
246        size: usize,
247        allocation_type: vk::InternalAllocationType,
248        allocation_scope: vk::SystemAllocationScope,
249    );
250
251    extern "system" fn pfn_allocation(
252        user_data: *mut (),
253        size: usize,
254        alignment: usize,
255        allocation_scope: vk::SystemAllocationScope,
256    ) -> *mut () {
257        let allocator: &Self = unsafe { &*user_data.cast() };
258        allocator.alloc(size, alignment, allocation_scope)
259    }
260
261    extern "system" fn pfn_reallocation(
262        user_data: *mut (),
263        original: *mut (),
264        size: usize,
265        alignment: usize,
266        allocation_scope: vk::SystemAllocationScope,
267    ) -> *mut () {
268        let allocator: &Self = unsafe { &*user_data.cast() };
269        allocator.realloc(original, size, alignment, allocation_scope)
270    }
271
272    extern "system" fn pfn_free(user_data: *mut (), memory: *mut ()) {
273        let allocator: &Self = unsafe { &*user_data.cast() };
274        allocator.free(memory)
275    }
276
277    extern "system" fn pfn_internal_allocation(
278        user_data: *mut (),
279        size: usize,
280        allocation_type: vk::InternalAllocationType,
281        allocation_scope: vk::SystemAllocationScope,
282    ) {
283        let allocator: &Self = unsafe { &*user_data.cast() };
284        allocator.on_internal_alloc(size, allocation_type, allocation_scope)
285    }
286
287    extern "system" fn pfn_internal_free(
288        user_data: *mut (),
289        size: usize,
290        allocation_type: vk::InternalAllocationType,
291        allocation_scope: vk::SystemAllocationScope,
292    ) {
293        let allocator: &Self = unsafe { &*user_data.cast() };
294        allocator.on_internal_free(size, allocation_type, allocation_scope)
295    }
296
297    /// SAFETY:
298    /// When re-implementing this function and using the provided pfn_* functions, you must ensure that the user_data value is a reference
299    /// to self that lives as long as the allocation callback
300    /// Moreover, as stated in <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkAllocationCallbacks.html>
301    /// pfn_internal_allocation and pfn_internal_free can only either be both None or be both Some
302    fn get_allocation_callbacks(&self) -> Option<vk::AllocationCallbacks> {
303        Some(vk::AllocationCallbacks {
304            p_user_data: (self as *const Self).cast(),
305            pfn_allocation: Self::pfn_allocation as *const (),
306            pfn_reallocation: Self::pfn_reallocation as *const (),
307            pfn_free: Self::pfn_free as *const (),
308            pfn_internal_allocation: Self::pfn_internal_allocation as *const (),
309            pfn_internal_free: Self::pfn_free as *const (),
310        })
311    }
312}
313
314/// The default vulkan allocator, Using this allocator will let Vulkan use the default allocator
315/// It is the same as specifying NULL (on C) or None (on Ash) every time the parameter pAllocator is required
316#[derive(Clone, Copy)]
317pub struct DefaultAllocator;
318
319unsafe impl Allocator for DefaultAllocator {
320    fn alloc(&self, _: usize, _: usize, _: vk::SystemAllocationScope) -> *mut () {
321        ptr::null_mut()
322    }
323
324    fn realloc(&self, _: *mut (), _: usize, _: usize, _: vk::SystemAllocationScope) -> *mut () {
325        ptr::null_mut()
326    }
327
328    fn free(&self, _: *mut ()) {}
329
330    fn on_internal_alloc(
331        &self,
332        _: usize,
333        _: vk::InternalAllocationType,
334        _: vk::SystemAllocationScope,
335    ) {
336    }
337
338    fn on_internal_free(
339        &self,
340        _: usize,
341        _: vk::InternalAllocationType,
342        _: vk::SystemAllocationScope,
343    ) {
344    }
345
346    #[inline(always)]
347    /// By returning None, we ask Vulkan to use its default allocator
348    fn get_allocation_callbacks(&self) -> Option<vk::AllocationCallbacks> {
349        None
350    }
351}
352
353/// Quality-Of-Life macro to create bitflags with multiple flags.
354/// # Example
355/// ```
356/// let debug_info = vk::DebugUtilsMessengerCreateInfoEXT::default()
357///     .message_severity(
358///         flagbits!(vk::DebugUtilsMessageSeverityFlagsEXT::{Info | Warning | Error}),
359///     );
360/// ```
361#[macro_export]
362macro_rules! flagbits {
363    ( $enum:ident ::{ $($variant:ident)|+ } ) => {
364        $($enum::$variant)|+
365    };
366    ( vk :: $enum:ident ::{ $($variant:ident)|+ } ) => {
367        $(vk::$enum::$variant)|+
368    }
369}
370
371mod private {
372    /// For safety, prevent types outside this crate to implement Vulkan-specific traits
373    pub trait Sealed {}
374}
375
376/// If A implements [`Alias<B>`], this means A and B have exactly the same memory representation
377/// Thus transmuting from A to B is safe
378pub unsafe trait Alias<T>: Sized {}
379
380/// T has always the same memory representation as itself
381unsafe impl<T> Alias<T> for T {}
382
383/// A dispatchable or non-dispatchable Vulkan Handle
384pub trait Handle: private::Sealed + Sized {
385    type InnerType: Copy;
386    const TYPE: vk::ObjectType;
387
388    /// Retrieve the inner content of the vulkan handle, to be used by other Vulkan librairies not using this crate
389    fn as_raw(&self) -> Self::InnerType;
390
391    /// Convert a pointer to a handle
392    /// When calling this code, the user must ensure the following:
393    /// - The pointer given is a valid Vulkan handle for the appropriate type
394    /// - The handle must live at least as long as the object being created
395    unsafe fn from_raw(x: Self::InnerType) -> Self;
396
397    /// Same as [Handle::from_raw] but allows for types that can be zero (usize or u64 depending on the handle)
398    /// Will fail if x is null/zero
399    unsafe fn try_from_raw<T>(x: T) -> Option<Self>
400    where
401        Self::InnerType: TryFrom<T>,
402    {
403        Self::InnerType::try_from(x).ok().map(|t| Self::from_raw(t))
404    }
405
406    /// Return a representation of &self
407    /// The advantage is that BorrowedHandle<'a, Self> has internally the exact same memory
408    /// representation as the raw handle it represents and therefore should be used when a deref is not enough
409    /// like for vulkan commands that require arrays of handles
410    #[inline(always)]
411    fn borrow<'a>(&'a self) -> BorrowedHandle<'a, Self> {
412        BorrowedHandle {
413            value: self.as_raw(),
414            phantom: PhantomData,
415        }
416    }
417
418    /// See [Handle::borrow]
419    #[inline(always)]
420    fn borrow_mut<'a>(&'a mut self) -> BorrowedMutHandle<'a, Self> {
421        BorrowedMutHandle {
422            value: self.as_raw(),
423            phantom: PhantomData,
424        }
425    }
426
427    /// clone the current object, this function is unsafe as the caller must ensure that only one of the two
428    /// handles is destroyed, moreover, the second handle must not be used after the first has been destroyed
429    unsafe fn clone(&self) -> Self;
430}
431
432/// This represents a reference to an handle
433/// Its internal representation is the same as the handle
434#[repr(transparent)]
435#[derive(Debug, Clone, Copy)]
436pub struct BorrowedHandle<'a, T: Handle> {
437    value: T::InnerType,
438    phantom: PhantomData<&'a T>,
439}
440
441/// BorrowedHandle<'a, T> is repr(transparent) of T
442unsafe impl<'a, T: Handle> Alias<T> for BorrowedHandle<'a, T> {}
443
444impl<'a, T: Handle> AsRef<T> for BorrowedHandle<'a, T> {
445    #[inline(always)]
446    fn as_ref(&self) -> &T {
447        // SAFETY: BorrowedHandle<T> and T have the same internal representation
448        // Moreover, the reference will only live as long as the borrowed handle
449        // (it cannot live as long as the original one as we are not tracking it location)
450        unsafe { mem::transmute(self) }
451    }
452}
453
454/// This represents a reference to a mutable handle
455/// Its internal representation is the same as the handle
456#[repr(transparent)]
457#[derive(Debug, Clone, Copy)]
458pub struct BorrowedMutHandle<'a, T: Handle> {
459    value: T::InnerType,
460    phantom: PhantomData<&'a mut T>,
461}
462
463/// BorrowedMutHandle<'a, T> is repr(transparent) of T
464unsafe impl<'a, T: Handle> Alias<T> for BorrowedMutHandle<'a, T> {}
465
466impl<'a, T: Handle> AsMut<T> for BorrowedMutHandle<'a, T> {
467    #[inline(always)]
468    fn as_mut(&mut self) -> &mut T {
469        // SAFETY: Same as [BorrowedHandle::AsRef]
470        unsafe { mem::transmute(self) }
471    }
472}
473
474/// A trait implemented by Vulkan C structs whose first 2 fields are:
475///     VkStructureType        sType;
476///     const void*            pNext;
477/// sType must always be set to STRUCTURE_TYPE
478/// This trait contains the minimum to be object safe, [ExtendableStructure] extends on it
479pub unsafe trait ExtendableStructureBase {
480    fn header(&self) -> *const Header {
481        ptr::from_ref(self).cast()
482    }
483
484    fn header_mut(&mut self) -> *mut Header {
485        ptr::from_mut(self).cast()
486    }
487}
488
489pub unsafe trait ExtendableStructure: ExtendableStructureBase + Default {
490    const STRUCTURE_TYPE: vk::StructureType;
491
492    /// SAFETY: Same as [ExtendableStructureBase::header]
493    unsafe fn retrieve_next(&self) -> &Cell<*const Header> {
494        &unsafe { &*self.header() }.p_next
495    }
496
497    /// Assuming the current structure chain is the following:
498    /// Self -> Ext1 -> Ext2 -> Ext3
499    /// calling this function with Ext4 will result in:
500    /// Self -> Ext4 -> Ext1 -> Ext2 -> Ext3
501    /// This function will never cause cycles in the structure chain
502    /// This function is unsafe because it discards the lifetime (ExtendableStructure does not have a lifetime parameter)
503    /// Also it does not check that T is a valid extension to be added to Self and only requires references (and not mutable references)
504    unsafe fn push_next_unchecked<T: ExtendableStructure>(&self, ext: &T) {
505        let my_next = self.retrieve_next();
506        let other_next = ext.retrieve_next();
507        other_next.set(my_next.get());
508        my_next.set(ptr::from_ref(ext).cast());
509    }
510
511    /// Return a unitialized structure except the structure type being correctly set
512    /// and the p_next pointer being set to null
513    fn new_uninit() -> MaybeUninit<Self> {
514        let mut result: MaybeUninit<Self> = MaybeUninit::uninit();
515        let header = Header {
516            s_type: Self::STRUCTURE_TYPE,
517            p_next: Cell::new(ptr::null()),
518        };
519        // SAFETY: result is a C struct which starts with the fields from Header
520        unsafe { result.as_mut_ptr().cast::<Header>().write(header) };
521        result
522    }
523}
524
525/// If an extendable structure A implements ExtendingStructure< B >
526/// This means A can be used to extend B
527/// For example, VkPhysicalDeviceFeatures2 can be used to extend VkDeviceCreateInfo
528/// So vk::PhysicalDeviceFeatures2 has the trait ExtendingStructure<vk::DeviceCreateInfo>
529/// This is used for additional security, making it impossible to extend a structure
530/// with an extension that wasn't planed for this structure
531pub unsafe trait ExtendingStructure<T: ExtendableStructure>: ExtendableStructure {}
532
533/// For simplicity, say that every structure can extend itself
534unsafe impl<T: ExtendableStructure> ExtendingStructure<T> for T {}
535
536#[repr(C)]
537pub struct Header {
538    s_type: vk::StructureType,
539    p_next: Cell<*const Header>,
540}
541
542/// Represent an object that can be used as the return value of a vulkan function that outputs a structure chain
543/// It must therefore internally represent what vulkan recognizes as a structure chain
544pub unsafe trait StructureChainOut<H>: Sized
545where
546    H: ExtendableStructure,
547{
548    /// Setup an uninitialized structure chain
549    /// After this call, for the structure chain to be initialized, each structure field (with the exception of the structure type
550    /// and the p_next pointer) must be initialized (usually by calling the appropriate vulkan command)
551    /// The structure type and p_next pointer of each struct are set so that a vulkan commands sees a pointer to the head
552    /// as a valid chain containing all structures
553    /// Calling setup_uninit should be enough to then call a vulkan command filling this structure chain, moreover after
554    /// the call to this vulkan command, the whole structure chain should be considered initialized
555    fn setup_uninit(chain: &mut MaybeUninit<Self>);
556
557    /// Return a mutable pointer to the head structure, which can then be passed to vulkan commands
558    fn get_uninit_head_ptr(chain: *mut Self) -> *mut H;
559
560    /// Function to call after a vulkan function initialized this structure to make sure there is no dangling pointer
561    /// or anything which could cause undefined behavior
562    fn setup_cleanup(chain: *mut Self) {
563        // Clearing the dangling pointer from the head should be enough
564        // A user should not be able to use the p_next pointer from the chain structure without unsafe code
565        let head = Self::get_uninit_head_ptr(chain).cast::<Header>();
566        unsafe { ptr::addr_of_mut!((*head).p_next).write(Cell::new(ptr::null())) };
567    }
568}
569
570/// Structure chain trait
571pub unsafe trait StructureChain<H>: AsRef<H> + AsMut<H> + Sized
572where
573    H: ExtendableStructure,
574{
575    /// Return a mutable reference to the given structure
576    /// Will panic if this structure is not part of the structure chain
577    fn get_mut<T: ExtendingStructure<H>>(&mut self) -> &mut T;
578
579    /// Return a reference to the given structure
580    /// Will panic if this structure is not part of the structure chain
581    fn get<T: ExtendingStructure<H>>(&self) -> &T;
582
583    /// Unlink the given structure from the chain
584    /// Will panic if this structure is not part of the structure chain
585    fn unlink<T: ExtendingStructure<H>>(&mut self);
586
587    /// Link the given structure from the chain
588    /// Do not call this on a structure that has not been unlinked previously
589    /// Calling link on an already linked structure is safe but has the side effect of unlinking
590    /// all the other structures linked before the two link calls (which you probably do not want)
591    /// Will panic if this structure is not part of the structure chain
592    fn link<T: ExtendingStructure<H>>(&mut self);
593}
594
595unsafe impl<H: ExtendableStructure> StructureChainOut<H> for H {
596    fn setup_uninit(chain: &mut MaybeUninit<Self>) {
597        // SAFETY: H is a C struct which starts with Header
598        unsafe {
599            chain.as_mut_ptr().cast::<Header>().write(Header {
600                s_type: Self::STRUCTURE_TYPE,
601                p_next: Cell::new(ptr::null()),
602            })
603        }
604    }
605
606    fn get_uninit_head_ptr(chain: *mut Self) -> *mut H {
607        chain
608    }
609
610    fn setup_cleanup(_: *mut Self) {
611        // self.s_type is already empty, nothing to be done here
612    }
613}
614
615macro_rules! make_structure_chain_type {
616    ($name: ident, $($ext_ty:ident => ($ext_nb:tt, $ext_name:ident)),*) => {
617
618#[doc(hidden)]
619pub struct $name<H, $($ext_ty),*>
620where
621    H: ExtendableStructure,
622    $($ext_ty: ExtendingStructure<H>),*
623{
624    head: H,
625    $($ext_name: ($ext_ty, bool),)*
626    has_changed: Cell<bool>,
627}
628
629impl<H, $($ext_ty),*>  $name<H, $($ext_ty),*>
630where
631    H: ExtendableStructure,
632    $($ext_ty: ExtendingStructure<H>),* {
633
634        pub fn new(head: H, $($ext_name: $ext_ty),*) -> Self {
635            Self {
636                head,
637                $($ext_name: ($ext_name, true),)*
638                has_changed: Cell::new(true),
639            }
640        }
641
642        fn perform_linking(&self) {
643            self.has_changed.set(false);
644            let mut _prev_ptr = ptr::null();
645            $(
646                if self.$ext_name.1 {
647                    unsafe { self.$ext_name.0.retrieve_next().set(_prev_ptr) };
648                    _prev_ptr = ptr::from_ref(&self.$ext_name.0).cast();
649                }
650            )*
651            unsafe { self.head.retrieve_next().set(_prev_ptr) };
652        }
653    }
654
655impl<H, $($ext_ty),*> AsRef<H> for $name<H, $($ext_ty),*>
656where
657    H: ExtendableStructure,
658    $($ext_ty: ExtendingStructure<H>),* {
659        fn as_ref(&self) -> &H {
660            if self.has_changed.get(){
661                self.perform_linking();
662            }
663            &self.head
664        }
665    }
666
667impl<H, $($ext_ty),*> AsMut<H> for $name<H, $($ext_ty),*>
668    where
669        H: ExtendableStructure,
670        $($ext_ty: ExtendingStructure<H>),* {
671            fn as_mut(&mut self) -> &mut H {
672                if self.has_changed.get(){
673                    self.perform_linking();
674                }
675                &mut self.head
676            }
677    }
678
679impl<H, $($ext_ty),*> Default for $name<H, $($ext_ty),*>
680where
681    H: ExtendableStructure,
682    $($ext_ty: ExtendingStructure<H>),*
683{
684    fn default() -> Self {
685        Self {
686            head: Default::default(),
687            $($ext_name: (Default::default(), true),)*
688            has_changed: Cell::new(true),
689        }
690    }
691}
692
693unsafe impl<H, $($ext_ty),*> StructureChain<H> for $name<H, $($ext_ty),*>
694where
695    H: ExtendableStructure,
696    $($ext_ty: ExtendingStructure<H>),*
697{
698    fn get_mut<T: ExtendingStructure<H>>(&mut self) -> &mut T {
699        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
700            self.perform_linking();
701            unsafe {
702                mem::transmute(self)
703            }
704        } $(else if $ext_ty::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
705            unsafe {
706                mem::transmute(self)
707            }
708        })* else {
709            panic!(
710                "Unexpected type for structure chain {}",
711                std::any::type_name::<H>()
712            )
713        }
714    }
715
716    fn get<T: ExtendingStructure<H>>(&self) -> &T {
717        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
718            self.perform_linking();
719            unsafe {
720                mem::transmute(self)
721            }
722        } $(else if $ext_ty::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
723            unsafe {
724                mem::transmute(self)
725            }
726        })* else {
727            panic!(
728                "Unexpected type for structure chain {}",
729                std::any::type_name::<H>()
730            )
731        }
732    }
733
734    fn unlink<T: ExtendingStructure<H>>(&mut self) {
735        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
736            panic!("Cannot unlink head structure!");
737        }
738        self.has_changed.set(true);
739
740        if false {
741        } $(else if $ext_ty::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
742            self.$ext_name.1 = false;
743        })* else {
744            panic!(
745                "Unexpected type for structure chain {}",
746                std::any::type_name::<H>()
747            )
748        }
749    }
750
751    fn link<T: ExtendingStructure<H>>(&mut self) {
752        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
753            panic!("Head structure is always linked!");
754        }
755        self.has_changed.set(true);
756
757        if false {
758        } $(else if $ext_ty::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
759            self.$ext_name.1 = true;
760        })* else {
761            panic!(
762                "Unexpected type for structure chain {}",
763                std::any::type_name::<H>()
764            )
765        }
766    }
767}
768
769unsafe impl<H, $($ext_ty),*> StructureChainOut<H> for $name<H, $($ext_ty),*>
770where
771    H: ExtendableStructure,
772    $($ext_ty: ExtendingStructure<H>),*
773{
774    fn setup_uninit(chain: &mut MaybeUninit<Self>) {
775        let chain_ptr = chain.as_mut_ptr();
776
777        // SAFETY: Each structure in this chain is a C struct which start with
778        // the fields from Header
779        unsafe {
780            ptr::addr_of_mut!((*chain_ptr).has_changed).write(Cell::new(false));
781
782            let mut _prev_header = Header {
783                s_type: H::STRUCTURE_TYPE,
784                p_next: Cell::new(ptr::null()),
785            };
786            let prev_ptr: *mut Header = ptr::addr_of_mut!((*chain_ptr).head).cast();
787
788            $(
789                let ptr = ptr::addr_of_mut!((*chain_ptr).$ext_name.0).cast();
790                _prev_header.p_next = Cell::new(ptr);
791                prev_ptr.write(_prev_header);
792
793                let prev_ptr = ptr;
794                let mut _prev_header = Header {
795                    s_type: $ext_ty::STRUCTURE_TYPE,
796                    p_next: Cell::new(ptr::null()),
797                };
798
799                ptr::addr_of_mut!((*chain_ptr).$ext_name.1).write(true);
800            )*
801
802            prev_ptr.write(_prev_header);
803        }
804    }
805
806    fn get_uninit_head_ptr(chain: *mut Self) -> *mut H {
807        unsafe { ptr::addr_of_mut!((*chain).head).cast() }
808    }
809}
810
811unsafe impl<H, $($ext_ty),*> StructureChainOut<H> for (H, $($ext_ty,)*)
812where
813    H: ExtendableStructure,
814    $($ext_ty: ExtendingStructure<H>),*
815{
816    fn setup_uninit(chain: &mut MaybeUninit<Self>) {
817        let chain_ptr = chain.as_mut_ptr();
818
819        // SAFETY: Each structure in this chain is a C struct which start with
820        // the fields from Header
821        unsafe {
822            let mut _prev_header = Header {
823                s_type: H::STRUCTURE_TYPE,
824                p_next: Cell::new(ptr::null()),
825            };
826            let prev_ptr: *mut Header = ptr::addr_of_mut!((*chain_ptr).0).cast();
827
828            $(
829                let ptr = ptr::addr_of_mut!((*chain_ptr).$ext_nb).cast();
830                _prev_header.p_next = Cell::new(ptr);
831                prev_ptr.write(_prev_header);
832
833                let prev_ptr = ptr;
834                let mut _prev_header = Header {
835                    s_type: $ext_ty::STRUCTURE_TYPE,
836                    p_next: Cell::new(ptr::null()),
837                };
838            )*
839
840            prev_ptr.write(_prev_header);
841        }
842    }
843
844    fn get_uninit_head_ptr(chain: *mut Self) -> *mut H {
845        unsafe { ptr::addr_of_mut!((*chain).0).cast() }
846    }
847}
848};
849}
850
851make_structure_chain_type! {StructureChain0,}
852make_structure_chain_type! {StructureChain1, V1 => (1,ext1)}
853make_structure_chain_type! {StructureChain2, V1 => (1,ext1), V2 => (2,ext2)}
854make_structure_chain_type! {StructureChain3, V1 => (1,ext1), V2 => (2,ext2), V3 => (3,ext3)}
855make_structure_chain_type! {StructureChain4, V1 => (1,ext1), V2 => (2,ext2), V3 => (3,ext3), V4 => (4,ext4)}
856make_structure_chain_type! {StructureChain5, V1 => (1,ext1), V2 => (2,ext2), V3 => (3,ext3), V4 => (4,ext4), V5 => (5,ext5)}
857make_structure_chain_type! {StructureChain6, V1 => (1,ext1), V2 => (2,ext2), V3 => (3,ext3), V4 => (4,ext4), V5 => (5,ext5), V6 => (6,ext6) }
858
859/// Structure Chain that can take an arbitrary number of structures extending it
860/// This is done by putting the structures on the heap
861pub struct StructureChainVec<H: ExtendableStructure> {
862    head: H,
863    content: Vec<(Box<dyn ExtendableStructureBase>, Cell<bool>)>,
864    has_changed: Cell<bool>,
865}
866
867impl<H> StructureChainVec<H>
868where
869    H: ExtendableStructure,
870{
871    pub fn new(head: H) -> Self {
872        Self::new_with_capacity(head, 0)
873    }
874
875    pub fn new_with_capacity(head: H, capacity: usize) -> Self {
876        Self {
877            head,
878            content: Vec::with_capacity(capacity),
879            has_changed: Cell::new(true),
880        }
881    }
882
883    /// Add a new structure to the structure chain
884    /// Note: No check is done that the structure is not already part of this structure chain
885    /// When pushing a structure, it is pushed in a linked state
886    pub fn push<T: ExtendingStructure<H> + 'static>(&mut self, structure: T) {
887        self.has_changed.set(true);
888        self.content.push((Box::new(structure), Cell::new(true)));
889    }
890
891    fn perform_linking(&self) {
892        self.has_changed.set(false);
893        let mut prev_ptr = ptr::null();
894        for (structure, is_linked) in &self.content {
895            if is_linked.get() {
896                let next_header = structure.header();
897                unsafe { &*next_header }.p_next.set(prev_ptr);
898                prev_ptr = next_header;
899            }
900        }
901        unsafe { self.head.retrieve_next().set(prev_ptr) };
902    }
903}
904
905impl<H> AsRef<H> for StructureChainVec<H>
906where
907    H: ExtendableStructure,
908{
909    fn as_ref(&self) -> &H {
910        if self.has_changed.get() {
911            self.perform_linking();
912        }
913        &self.head
914    }
915}
916
917impl<H> AsMut<H> for StructureChainVec<H>
918where
919    H: ExtendableStructure,
920{
921    fn as_mut(&mut self) -> &mut H {
922        if self.has_changed.get() {
923            self.perform_linking();
924        }
925        &mut self.head
926    }
927}
928
929unsafe impl<H> StructureChain<H> for StructureChainVec<H>
930where
931    H: ExtendableStructure,
932{
933    fn get_mut<T: ExtendingStructure<H>>(&mut self) -> &mut T {
934        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
935            self.perform_linking();
936            return unsafe { mem::transmute(self) };
937        }
938
939        for (structure, _) in &mut self.content {
940            let header = structure.header_mut();
941            if unsafe { (*header).s_type } == T::STRUCTURE_TYPE {
942                return unsafe { mem::transmute(header) };
943            }
944        }
945
946        panic!(
947            "Type {} is not part of the structure chain",
948            std::any::type_name::<H>()
949        )
950    }
951
952    fn get<T: ExtendingStructure<H>>(&self) -> &T {
953        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
954            self.perform_linking();
955            return unsafe { mem::transmute(self) };
956        }
957
958        for (structure, _) in &self.content {
959            let header = structure.header();
960            if unsafe { (*header).s_type } == T::STRUCTURE_TYPE {
961                return unsafe { mem::transmute(header) };
962            }
963        }
964
965        panic!(
966            "Type {} is not part of the structure chain",
967            std::any::type_name::<H>()
968        )
969    }
970
971    fn unlink<T: ExtendingStructure<H>>(&mut self) {
972        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
973            panic!("Cannot unlink head structure!");
974        }
975        self.has_changed.set(true);
976
977        for (structure, is_linked) in &self.content {
978            let header = structure.header();
979            if unsafe { (*header).s_type } == T::STRUCTURE_TYPE {
980                is_linked.set(false);
981                return;
982            }
983        }
984
985        panic!(
986            "Type {} is not part of the structure chain",
987            std::any::type_name::<H>()
988        )
989    }
990
991    fn link<T: ExtendingStructure<H>>(&mut self) {
992        if H::STRUCTURE_TYPE == T::STRUCTURE_TYPE {
993            panic!("Head structure is always linked!");
994        }
995        self.has_changed.set(true);
996
997        for (structure, is_linked) in &self.content {
998            let header = structure.header();
999            if unsafe { (*header).s_type } == T::STRUCTURE_TYPE {
1000                is_linked.set(true);
1001                return;
1002            }
1003        }
1004
1005        panic!(
1006            "Type {} is not part of the structure chain",
1007            std::any::type_name::<H>()
1008        )
1009    }
1010}
1011
1012#[macro_export]
1013macro_rules! create_structure_chain {
1014    ($head:ty $(,)?) => {
1015        $crate::StructureChain0::<$head>::default()
1016    };
1017    ($head:ty, $ext1:ty $(,)?) => {
1018        $crate::StructureChain1::<$head, $ext1>::default()
1019    };
1020    ($head:ty, $ext1:ty, $ext2:ty $(,)?) => {
1021        $crate::StructureChain2::<$head, $ext1, $ext2>::default()
1022    };
1023    ($head:ty, $ext1:ty, $ext2:ty, $ext3:ty $(,)?) => {
1024        $crate::StructureChain3::<$head, $ext1, $ext2, $ext3>::default()
1025    };
1026    ($head:ty, $ext1:ty, $ext2:ty, $ext3:ty, $ext4:ty $(,)?) => {
1027        $crate::StructureChain4::<$head, $ext1, $ext2, $ext3, $ext4>::default()
1028    };
1029    ($head:ty, $ext1:ty, $ext2:ty, $ext3:ty, $ext4:ty, $ext5:ty $(,)?) => {
1030        $crate::StructureChain5::<$head, $ext1, $ext2, $ext3, $ext4, $ext5>::default()
1031    };
1032    ($head:ty, $ext1:ty, $ext2:ty, $ext3:ty, $ext4:ty, $ext5:ty, $ext6:ty $(,)?) => {
1033        $crate::StructureChain6::<$head, $ext1, $ext2, $ext3, $ext4, $ext5, $ext6>::default()
1034    };
1035}
1036
1037#[macro_export]
1038macro_rules! structure_chain {
1039    ($head:expr) => {
1040        $crate::StructureChain0::new($head)
1041    };
1042    ($head:expr, $ext1:expr $(,)?) => {
1043        $crate::StructureChain1::new($head, $ext1)
1044    };
1045    ($head:expr, $ext1:expr, $ext2:expr $(,)?) => {
1046        $crate::StructureChain2::new($head, $ext1, $ext2)
1047    };
1048    ($head:expr, $ext1:expr, $ext2:expr, $ext3:expr $(,)?) => {
1049        $crate::StructureChain3::new($head, $ext1, $ext2, $ext3)
1050    };
1051    ($head:expr, $ext1:expr, $ext2:expr, $ext3:expr, $ext4:expr $(,)?) => {
1052        $crate::StructureChain4::new($head, $ext1, $ext2, $ext3, $ext4)
1053    };
1054    ($head:expr, $ext1:expr, $ext2:expr, $ext3:expr, $ext4:expr, $ext5:expr $(,)?) => {
1055        $crate::StructureChain5::new($head, $ext1, $ext2, $ext3, $ext4, $ext5)
1056    };
1057    ($head:expr, $ext1:expr, $ext2:expr, $ext3:expr, $ext4:expr, $ext5:expr, $ext6:expr $(,)?) => {
1058        $crate::StructureChain6::new($head, $ext1, $ext2, $ext3, $ext4, $ext5, $ext6)
1059    };
1060    ($head:expr, $($ext:expr),*  $(,)?) => {{
1061        // TODO: this can be optimized using new_with_capacity
1062        let mut chain = $crate::StructureChainVec::new($head);
1063        $(
1064            chain.push($ext);
1065        )*
1066        chain
1067    }}
1068}
1069
1070/// Includes a file as a reference to a u32 array.
1071/// This macro is really similar to rust macro [include_bytes], the main difference is that data is provided as a u32 array instead of a u8 array
1072/// As a consequence the data is 4-byte aligned. Moreover, if the file included has not a size which is a multiple of 4 bytes, it will cause a compile-time error
1073/// The main purpose of this macro in this library is to embed spirv code in a program, as include_bytes! requires at least an additional copy and can easily be misused for this case
1074///
1075/// The file is located relative to the current file (similarly to how modules are found). The provided path is interpreted in a platform-specific way at compile time. So, for instance, an invocation with a Windows path containing backslashes \ would not compile correctly on Unix.
1076///
1077/// This macro will yield an expression of type &'static \[u32; N\] which is the contents of the file.
1078/// This macro is inspired by <https://users.rust-lang.org/t/can-i-conveniently-compile-bytes-into-a-rust-program-with-a-specific-alignment/24049>
1079/// # Example
1080/// ```
1081/// let vertex_shader = include_spirv!("vert.spirv");
1082/// let vertex_module = device.create_shader_module(
1083///     &vk::ShaderModuleCreateInfo::default().code(vertex_shader),
1084/// )?;
1085/// ```
1086#[macro_export]
1087macro_rules! include_spirv {
1088    ($path:literal) => {{
1089        #[repr(align(4))]
1090        struct AlignedStruct<Bytes: ?Sized> {
1091            bytes: Bytes,
1092        }
1093
1094        static ALIGNED: &'static AlignedStruct<[u8]> = {
1095            let bytes = include_bytes!($path);
1096            assert!(
1097                bytes.len() % 4 == 0,
1098                concat!(
1099                    "The file ",
1100                    $path,
1101                    " must have a size which is a multiple of 4 bytes"
1102                )
1103            );
1104            &AlignedStruct { bytes: *bytes }
1105        };
1106
1107        unsafe {
1108            std::slice::from_raw_parts(
1109                ALIGNED.bytes.as_ptr() as *const u32,
1110                ALIGNED.bytes.len() / 4,
1111            )
1112        }
1113    }};
1114}
1115
1116/// A trait implemented by types which can allocate memory for an array of given size in a contiguous memory
1117/// This is used for vulkan commands returning arrays
1118/// [`Vec<T>`] implements this trait as well as [SmallVec] if the smallvec feature is enabled and [ArrayVec] if the arrayvec feature is enabled
1119/// This trait is unsafe because no allocating a memory area of the proper size when calling
1120/// allocate_with_capacity can cause undefined behavior when using this library
1121pub unsafe trait DynamicArray<T>: IntoIterator<Item = T> {
1122    /// Returns an array with at least the given capacity available
1123    /// Calling get_content_mut_ptr on an object allocated with allocate_with_capacity(capacity) should return
1124    /// A contiguous properly aligned allocated region of memory which can hold capacity elements of T
1125    #[doc(hidden)]
1126    fn create_with_capacity(capacity: usize) -> Self;
1127
1128    /// Called after creation (in the case where a Vulkan command returns VK_INCOMPLETE)
1129    /// The new capacity should be strictly greater than the current one
1130    /// You can assume the length of the vector is 0 when calling this function
1131    #[doc(hidden)]
1132    fn update_with_capacity(&mut self, new_capacity: usize);
1133
1134    /// Returns a pointer to the array memory
1135    #[doc(hidden)]
1136    fn get_content_mut_ptr(&mut self) -> *mut T;
1137
1138    /// Set the array length to size len
1139    /// The array must have been allocated with allocate_with_capacity(capacity)
1140    /// With capacity >= len and the first len elements of the array
1141    /// must be well defined
1142    #[doc(hidden)]
1143    unsafe fn resize_with_len(&mut self, len: usize);
1144}
1145
1146/// When using advanced commands, we must be able to provide a dynamic array for both the type and the underlying type
1147/// This trait allows given a type T with a dynamic array to get a dynamic array for another type S
1148pub trait AdvancedDynamicArray<T, S>: DynamicArray<T> + FromIterator<T> {
1149    type InnerArrayType: DynamicArray<S>;
1150}
1151
1152unsafe impl<T> DynamicArray<T> for Vec<T> {
1153    fn create_with_capacity(capacity: usize) -> Self {
1154        Self::with_capacity(capacity)
1155    }
1156
1157    fn update_with_capacity(&mut self, new_capacity: usize) {
1158        // we assume the length is 0, otherwise the appropriate value would be
1159        // (with underflow checking) new_capacity - self.len()
1160        self.reserve(new_capacity)
1161    }
1162
1163    fn get_content_mut_ptr(&mut self) -> *mut T {
1164        self.as_mut_ptr()
1165    }
1166
1167    unsafe fn resize_with_len(&mut self, len: usize) {
1168        self.set_len(len)
1169    }
1170}
1171
1172impl<T, S> AdvancedDynamicArray<T, S> for Vec<T> {
1173    type InnerArrayType = Vec<S>;
1174}
1175
1176#[cfg(feature = "smallvec")]
1177unsafe impl<T, A> DynamicArray<T> for SmallVec<A>
1178where
1179    A: smallvec::Array<Item = T>,
1180{
1181    fn create_with_capacity(capacity: usize) -> Self {
1182        Self::with_capacity(capacity)
1183    }
1184
1185    fn update_with_capacity(&mut self, new_capacity: usize) {
1186        self.reserve(new_capacity)
1187    }
1188
1189    fn get_content_mut_ptr(&mut self) -> *mut T {
1190        self.as_mut_ptr()
1191    }
1192
1193    unsafe fn resize_with_len(&mut self, len: usize) {
1194        self.set_len(len)
1195    }
1196}
1197
1198#[cfg(feature = "smallvec")]
1199impl<T, S, const N: usize> AdvancedDynamicArray<T, S> for SmallVec<[T; N]> {
1200    type InnerArrayType = SmallVec<[S; N]>;
1201}
1202
1203#[cfg(feature = "arrayvec")]
1204unsafe impl<T, const N: usize> DynamicArray<T> for ArrayVec<T, N> {
1205    fn create_with_capacity(capacity: usize) -> Self {
1206        if capacity > N {
1207            panic!("Trying to use an ArrayVec of size {N} with capacity {capacity}")
1208        }
1209        Self::new()
1210    }
1211
1212    fn update_with_capacity(&mut self, new_capacity: usize) {
1213        if new_capacity > N {
1214            panic!("Trying to use an ArrayVec of size {N} with capacity {new_capacity}")
1215        }
1216        // ArrayVecs always have a fixed capacity
1217    }
1218
1219    fn get_content_mut_ptr(&mut self) -> *mut T {
1220        self.as_mut_ptr()
1221    }
1222
1223    unsafe fn resize_with_len(&mut self, len: usize) {
1224        self.set_len(len)
1225    }
1226}
1227
1228#[cfg(feature = "arrayvec")]
1229impl<T, S, const N: usize> AdvancedDynamicArray<T, S> for ArrayVec<T, N> {
1230    type InnerArrayType = ArrayVec<S, N>;
1231}
1232
1233/// Custom type which represents types that can be seen as slices
1234/// This is especially useful for this crate as there are multiple commands/structs
1235/// which accept slices but for which one would usually only supply one element
1236/// using [std::slice::from_ref].
1237///
1238/// With this trait, all of these `from_ref` calls
1239/// are avoided. There is also an implementation for [`Option<&T>`]
1240pub trait AsSlice<'a, T>: Copy {
1241    #[doc(hidden)]
1242    fn as_slice(self) -> &'a [T];
1243}
1244
1245impl<'a, T> AsSlice<'a, T> for &'a [T] {
1246    fn as_slice(self) -> &'a [T] {
1247        self
1248    }
1249}
1250
1251impl<'a, T, const N: usize> AsSlice<'a, T> for &'a [T; N] {
1252    fn as_slice(self) -> &'a [T] {
1253        self
1254    }
1255}
1256
1257impl<'a, T> AsSlice<'a, T> for &'a T {
1258    fn as_slice(self) -> &'a [T] {
1259        std::slice::from_ref(self)
1260    }
1261}
1262
1263impl<'a, T> AsSlice<'a, T> for &'a Option<T> {
1264    fn as_slice(self) -> &'a [T] {
1265        self.as_slice()
1266    }
1267}
1268
1269impl<'a, T> AsSlice<'a, T> for Option<&'a T> {
1270    fn as_slice(self) -> &'a [T] {
1271        self.map_or(&[], std::slice::from_ref)
1272    }
1273}
1274
1275impl<'a, T> AsSlice<'a, T> for &'a Vec<T> {
1276    fn as_slice(self) -> &'a [T] {
1277        self.as_slice()
1278    }
1279}
1280
1281impl<'a, T> AsSlice<'a, T> for &'a Box<T> {
1282    fn as_slice(self) -> &'a [T] {
1283        std::slice::from_ref(self)
1284    }
1285}
1286
1287/// Implement the AsSlice trait for `()``, some vulkan commands/structs take as parameter `Option<impl AsSlice<...>>`
1288/// With this type, if you want to to give as parameter None (the compiler cannot infer the type, although this is not useful)
1289/// you can use [`None::<()>`] (instead of `None::<&vk::AttachmentReference>` for example):
1290///
1291/// # Example
1292///
1293/// ```
1294/// let subpass = vk::SubpassDescription::default()
1295///     .pipeline_bind_point(vk::PipelineBindPoint::Graphics)
1296///     .color_attachment(&color_ref, None::<()>);
1297/// ```
1298impl<'a, T> AsSlice<'a, T> for () {
1299    fn as_slice(self) -> &'a [T] {
1300        &[]
1301    }
1302}
1303
1304#[cfg(feature = "smallvec")]
1305impl<'a, T, const N: usize> AsSlice<'a, T> for &'a SmallVec<[T; N]> {
1306    fn as_slice(self) -> &'a [T] {
1307        self.as_slice()
1308    }
1309}
1310
1311#[cfg(feature = "arrayvec")]
1312impl<'a, T, const N: usize> AsSlice<'a, T> for &'a ArrayVec<T, N> {
1313    fn as_slice(self) -> &'a [T] {
1314        self.as_slice()
1315    }
1316}