screen_13/driver/
buffer.rs

1//! Buffer resource types
2
3use {
4    super::{DriverError, device::Device, vk_sync::AccessType},
5    ash::vk,
6    derive_builder::{Builder, UninitializedFieldError},
7    gpu_allocator::{
8        MemoryLocation,
9        vulkan::{Allocation, AllocationCreateDesc, AllocationScheme},
10    },
11    log::trace,
12    log::warn,
13    std::{
14        fmt::{Debug, Formatter},
15        mem::ManuallyDrop,
16        ops::{Deref, DerefMut, Range},
17        sync::Arc,
18        thread::panicking,
19    },
20};
21
22#[cfg(feature = "parking_lot")]
23use parking_lot::Mutex;
24
25#[cfg(not(feature = "parking_lot"))]
26use std::sync::Mutex;
27
28/// Smart pointer handle to a [buffer] object.
29///
30/// Also contains information about the object.
31///
32/// ## `Deref` behavior
33///
34/// `Buffer` automatically dereferences to [`vk::Buffer`] (via the [`Deref`] trait), so you
35/// can call `vk::Buffer`'s methods on a value of type `Buffer`. To avoid name clashes with
36/// `vk::Buffer`'s methods, the methods of `Buffer` itself are associated functions, called using
37/// [fully qualified syntax]:
38///
39/// ```no_run
40/// # use std::sync::Arc;
41/// # use ash::vk;
42/// # use screen_13::driver::{AccessType, DriverError};
43/// # use screen_13::driver::device::{Device, DeviceInfo};
44/// # use screen_13::driver::buffer::{Buffer, BufferInfo};
45/// # fn main() -> Result<(), DriverError> {
46/// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
47/// # let info = BufferInfo::device_mem(8, vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS);
48/// # let my_buf = Buffer::create(&device, info)?;
49/// let addr = Buffer::device_address(&my_buf);
50/// # Ok(()) }
51/// ```
52///
53/// [buffer]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkBuffer.html
54/// [deref]: core::ops::Deref
55/// [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
56pub struct Buffer {
57    accesses: Mutex<BufferAccess>,
58    allocation: ManuallyDrop<Allocation>,
59    buffer: vk::Buffer,
60    device: Arc<Device>,
61
62    /// Information used to create this object.
63    pub info: BufferInfo,
64
65    /// A name for debugging purposes.
66    pub name: Option<String>,
67}
68
69impl Buffer {
70    /// Creates a new buffer on the given device.
71    ///
72    /// # Examples
73    ///
74    /// Basic usage:
75    ///
76    /// ```no_run
77    /// # use std::sync::Arc;
78    /// # use ash::vk;
79    /// # use screen_13::driver::DriverError;
80    /// # use screen_13::driver::device::{Device, DeviceInfo};
81    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
82    /// # fn main() -> Result<(), DriverError> {
83    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
84    /// const SIZE: vk::DeviceSize = 1024;
85    /// let info = BufferInfo::host_mem(SIZE, vk::BufferUsageFlags::UNIFORM_BUFFER);
86    /// let buf = Buffer::create(&device, info)?;
87    ///
88    /// assert_ne!(*buf, vk::Buffer::null());
89    /// assert_eq!(buf.info.size, SIZE);
90    /// # Ok(()) }
91    /// ```
92    #[profiling::function]
93    pub fn create(device: &Arc<Device>, info: impl Into<BufferInfo>) -> Result<Self, DriverError> {
94        let info = info.into();
95
96        trace!("create: {:?}", info);
97
98        debug_assert_ne!(info.size, 0, "Size must be non-zero");
99
100        let device = Arc::clone(device);
101        let buffer_info = vk::BufferCreateInfo::default()
102            .size(info.size)
103            .usage(info.usage)
104            .sharing_mode(vk::SharingMode::CONCURRENT)
105            .queue_family_indices(&device.physical_device.queue_family_indices);
106        let buffer = unsafe {
107            device.create_buffer(&buffer_info, None).map_err(|err| {
108                warn!("{err}");
109
110                DriverError::Unsupported
111            })?
112        };
113        let mut requirements = unsafe { device.get_buffer_memory_requirements(buffer) };
114        requirements.alignment = requirements.alignment.max(info.alignment);
115
116        let memory_location = if info.mappable {
117            MemoryLocation::CpuToGpu
118        } else {
119            MemoryLocation::GpuOnly
120        };
121        let allocation = {
122            profiling::scope!("allocate");
123
124            #[cfg_attr(not(feature = "parking_lot"), allow(unused_mut))]
125            let mut allocator = device.allocator.lock();
126
127            #[cfg(not(feature = "parking_lot"))]
128            let mut allocator = allocator.unwrap();
129
130            allocator
131                .allocate(&AllocationCreateDesc {
132                    name: "buffer",
133                    requirements,
134                    location: memory_location,
135                    linear: true, // Buffers are always linear
136                    allocation_scheme: AllocationScheme::GpuAllocatorManaged,
137                })
138                .map_err(|err| {
139                    warn!("{err}");
140
141                    DriverError::Unsupported
142                })
143        }?;
144
145        // Bind memory to the buffer
146        unsafe {
147            device
148                .bind_buffer_memory(buffer, allocation.memory(), allocation.offset())
149                .map_err(|err| {
150                    warn!("{err}");
151
152                    DriverError::Unsupported
153                })?
154        };
155
156        Ok(Self {
157            accesses: Mutex::new(BufferAccess::new(info.size)),
158            allocation: ManuallyDrop::new(allocation),
159            buffer,
160            device,
161            info,
162            name: None,
163        })
164    }
165
166    /// Creates a new mappable buffer on the given device and fills it with the data in `slice`.
167    ///
168    /// # Examples
169    ///
170    /// Basic usage:
171    ///
172    /// ```no_run
173    /// # use std::sync::Arc;
174    /// # use ash::vk;
175    /// # use screen_13::driver::DriverError;
176    /// # use screen_13::driver::device::{Device, DeviceInfo};
177    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
178    /// # fn main() -> Result<(), DriverError> {
179    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
180    /// const DATA: [u8; 4] = [0xfe, 0xed, 0xbe, 0xef];
181    /// let buf = Buffer::create_from_slice(&device, vk::BufferUsageFlags::UNIFORM_BUFFER, &DATA)?;
182    ///
183    /// assert_ne!(*buf, vk::Buffer::null());
184    /// assert_eq!(buf.info.size, 4);
185    /// assert_eq!(Buffer::mapped_slice(&buf), &DATA);
186    /// # Ok(()) }
187    /// ```
188    #[profiling::function]
189    pub fn create_from_slice(
190        device: &Arc<Device>,
191        usage: vk::BufferUsageFlags,
192        slice: impl AsRef<[u8]>,
193    ) -> Result<Self, DriverError> {
194        let slice = slice.as_ref();
195        let info = BufferInfo::host_mem(slice.len() as _, usage);
196        let mut buffer = Self::create(device, info)?;
197
198        Self::copy_from_slice(&mut buffer, 0, slice);
199
200        Ok(buffer)
201    }
202
203    /// Keeps track of some `next_access` which affects this object.
204    ///
205    /// Returns the previous access for which a pipeline barrier should be used to prevent data
206    /// corruption.
207    ///
208    /// # Note
209    ///
210    /// Used to maintain object state when passing a _Screen 13_-created `vk::Buffer` handle to
211    /// external code such as [_Ash_] or [_Erupt_] bindings.
212    ///
213    /// # Examples
214    ///
215    /// Basic usage:
216    ///
217    /// ```no_run
218    /// # use std::sync::Arc;
219    /// # use ash::vk;
220    /// # use screen_13::driver::{AccessType, DriverError};
221    /// # use screen_13::driver::device::{Device, DeviceInfo};
222    /// # use screen_13::driver::buffer::{Buffer, BufferInfo, BufferSubresourceRange};
223    /// # fn main() -> Result<(), DriverError> {
224    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
225    /// # const SIZE: vk::DeviceSize = 1024;
226    /// # let info = BufferInfo::device_mem(SIZE, vk::BufferUsageFlags::STORAGE_BUFFER);
227    /// # let my_buf = Buffer::create(&device, info)?;
228    /// // Initially we want to "write"
229    /// let access = AccessType::ComputeShaderWrite;
230    /// let access_range = BufferSubresourceRange { start: 0, end: SIZE };
231    /// let mut accesses = Buffer::access(&my_buf, access, access_range);
232    ///
233    /// assert_eq!(accesses.next(), Some((AccessType::Nothing, access_range)));
234    /// assert!(accesses.next().is_none());
235    ///
236    /// // External code may now "write"; no barrier required in this case
237    ///
238    /// // Subsequently we want to "read"
239    /// let access = AccessType::ComputeShaderReadOther;
240    /// let mut accesses = Buffer::access(&my_buf, access, access_range);
241    ///
242    /// assert_eq!(accesses.next(), Some((AccessType::ComputeShaderWrite, access_range)));
243    /// assert!(accesses.next().is_none());
244    ///
245    /// // A barrier on "write" before "read" is required! A render graph will do this
246    /// // automatically when resovled, but manual access like this requires manual barriers
247    /// # Ok(()) }
248    /// ```
249    ///
250    /// [_Ash_]: https://crates.io/crates/ash
251    /// [_Erupt_]: https://crates.io/crates/erupt
252    #[profiling::function]
253    pub fn access(
254        this: &Self,
255        access: AccessType,
256        access_range: impl Into<BufferSubresourceRange>,
257    ) -> impl Iterator<Item = (AccessType, BufferSubresourceRange)> + '_ {
258        let mut access_range: BufferSubresourceRange = access_range.into();
259
260        if access_range.end == vk::WHOLE_SIZE {
261            access_range.end = this.info.size;
262        }
263
264        let accesses = this.accesses.lock();
265
266        #[cfg(not(feature = "parking_lot"))]
267        let accesses = accesses.unwrap();
268
269        BufferAccessIter::new(accesses, access, access_range)
270    }
271
272    /// Updates a mappable buffer starting at `offset` with the data in `slice`.
273    ///
274    /// # Panics
275    ///
276    /// Panics if the buffer was not created with the `mappable` flag set to `true`.
277    ///
278    /// # Examples
279    ///
280    /// Basic usage:
281    ///
282    /// ```no_run
283    /// # use std::sync::Arc;
284    /// # use ash::vk;
285    /// # use screen_13::driver::DriverError;
286    /// # use screen_13::driver::device::{Device, DeviceInfo};
287    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
288    /// # fn main() -> Result<(), DriverError> {
289    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
290    /// # let info = BufferInfo::host_mem(4, vk::BufferUsageFlags::empty());
291    /// # let mut my_buf = Buffer::create(&device, info)?;
292    /// const DATA: [u8; 4] = [0xde, 0xad, 0xc0, 0xde];
293    /// Buffer::copy_from_slice(&mut my_buf, 0, &DATA);
294    ///
295    /// assert_eq!(Buffer::mapped_slice(&my_buf), &DATA);
296    /// # Ok(()) }
297    /// ```
298    #[profiling::function]
299    pub fn copy_from_slice(this: &mut Self, offset: vk::DeviceSize, slice: impl AsRef<[u8]>) {
300        let slice = slice.as_ref();
301        Self::mapped_slice_mut(this)[offset as _..offset as usize + slice.len()]
302            .copy_from_slice(slice);
303    }
304
305    /// Returns the device address of this object.
306    ///
307    /// # Panics
308    ///
309    /// Panics if the buffer was not created with the `SHADER_DEVICE_ADDRESS` usage flag.
310    ///
311    /// # Examples
312    ///
313    /// Basic usage:
314    ///
315    /// ```no_run
316    /// # use std::sync::Arc;
317    /// # use ash::vk;
318    /// # use screen_13::driver::DriverError;
319    /// # use screen_13::driver::device::{Device, DeviceInfo};
320    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
321    /// # fn main() -> Result<(), DriverError> {
322    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
323    /// # let info = BufferInfo::host_mem(4, vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS);
324    /// # let my_buf = Buffer::create(&device, info)?;
325    /// let addr = Buffer::device_address(&my_buf);
326    ///
327    /// assert_ne!(addr, 0);
328    /// # Ok(()) }
329    /// ```
330    #[profiling::function]
331    pub fn device_address(this: &Self) -> vk::DeviceAddress {
332        debug_assert!(
333            this.info
334                .usage
335                .contains(vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS)
336        );
337
338        unsafe {
339            this.device.get_buffer_device_address(
340                &vk::BufferDeviceAddressInfo::default().buffer(this.buffer),
341            )
342        }
343    }
344
345    /// Returns a mapped slice.
346    ///
347    /// # Panics
348    ///
349    /// Panics if the buffer was not created with the `mappable` flag set to `true`.
350    ///
351    /// # Examples
352    ///
353    /// Basic usage:
354    ///
355    /// ```no_run
356    /// # use std::sync::Arc;
357    /// # use ash::vk;
358    /// # use screen_13::driver::DriverError;
359    /// # use screen_13::driver::device::{Device, DeviceInfo};
360    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
361    /// # fn main() -> Result<(), DriverError> {
362    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
363    /// # const DATA: [u8; 4] = [0; 4];
364    /// # let my_buf = Buffer::create_from_slice(&device, vk::BufferUsageFlags::empty(), &DATA)?;
365    /// // my_buf is mappable and filled with four zeroes
366    /// let data = Buffer::mapped_slice(&my_buf);
367    ///
368    /// assert_eq!(data.len(), 4);
369    /// assert_eq!(data[0], 0x00);
370    /// # Ok(()) }
371    /// ```
372    #[profiling::function]
373    pub fn mapped_slice(this: &Self) -> &[u8] {
374        debug_assert!(
375            this.info.mappable,
376            "Buffer is not mappable - create using mappable flag"
377        );
378
379        &this.allocation.mapped_slice().unwrap()[0..this.info.size as usize]
380    }
381
382    /// Returns a mapped mutable slice.
383    ///
384    /// # Panics
385    ///
386    /// Panics if the buffer was not created with the `mappable` flag set to `true`.
387    ///
388    /// # Examples
389    ///
390    /// Basic usage:
391    ///
392    /// ```no_run
393    /// # use std::sync::Arc;
394    /// # use ash::vk;
395    /// # use glam::Mat4;
396    /// # use screen_13::driver::DriverError;
397    /// # use screen_13::driver::device::{Device, DeviceInfo};
398    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
399    /// # fn main() -> Result<(), DriverError> {
400    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
401    /// # const DATA: [u8; 4] = [0; 4];
402    /// # let mut my_buf = Buffer::create_from_slice(&device, vk::BufferUsageFlags::empty(), &DATA)?;
403    /// let mut data = Buffer::mapped_slice_mut(&mut my_buf);
404    /// data.copy_from_slice(&42f32.to_be_bytes());
405    ///
406    /// assert_eq!(data.len(), 4);
407    /// assert_eq!(data[0], 0x42);
408    /// # Ok(()) }
409    /// ```
410    #[profiling::function]
411    pub fn mapped_slice_mut(this: &mut Self) -> &mut [u8] {
412        debug_assert!(
413            this.info.mappable,
414            "Buffer is not mappable - create using mappable flag"
415        );
416
417        &mut this.allocation.mapped_slice_mut().unwrap()[0..this.info.size as usize]
418    }
419}
420
421impl Debug for Buffer {
422    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
423        if let Some(name) = &self.name {
424            write!(f, "{} ({:?})", name, self.buffer)
425        } else {
426            write!(f, "{:?}", self.buffer)
427        }
428    }
429}
430
431impl Deref for Buffer {
432    type Target = vk::Buffer;
433
434    fn deref(&self) -> &Self::Target {
435        &self.buffer
436    }
437}
438
439impl Drop for Buffer {
440    #[profiling::function]
441    fn drop(&mut self) {
442        if panicking() {
443            return;
444        }
445
446        {
447            profiling::scope!("deallocate");
448
449            #[cfg_attr(not(feature = "parking_lot"), allow(unused_mut))]
450            let mut allocator = self.device.allocator.lock();
451
452            #[cfg(not(feature = "parking_lot"))]
453            let mut allocator = allocator.unwrap();
454
455            allocator.free(unsafe { ManuallyDrop::take(&mut self.allocation) })
456        }
457        .unwrap_or_else(|_| warn!("Unable to free buffer allocation"));
458
459        unsafe {
460            self.device.destroy_buffer(self.buffer, None);
461        }
462    }
463}
464
465#[derive(Debug)]
466struct BufferAccess {
467    accesses: Vec<(AccessType, vk::DeviceSize)>,
468    size: vk::DeviceSize,
469}
470
471impl BufferAccess {
472    fn new(size: vk::DeviceSize) -> Self {
473        Self {
474            accesses: vec![(AccessType::Nothing, 0)],
475            size,
476        }
477    }
478}
479
480struct BufferAccessIter<T> {
481    access: AccessType,
482    access_range: BufferSubresourceRange,
483    buffer: T,
484    idx: usize,
485}
486
487impl<T> BufferAccessIter<T>
488where
489    T: DerefMut<Target = BufferAccess>,
490{
491    fn new(buffer: T, access: AccessType, access_range: BufferSubresourceRange) -> Self {
492        debug_assert!(access_range.start < access_range.end);
493        debug_assert!(access_range.end <= buffer.size);
494
495        #[cfg(debug_assertions)]
496        {
497            let access_start = |(_, access_start): &(AccessType, vk::DeviceSize)| *access_start;
498
499            assert_eq!(buffer.accesses.first().map(access_start), Some(0));
500            assert!(buffer.accesses.last().map(access_start).unwrap() < buffer.size);
501
502            // Custom is-sorted-by key to additionally check that all access starts are unique
503            let (mut prev_access, mut prev_start) = buffer.accesses.first().copied().unwrap();
504            for (next_access, next_start) in buffer.accesses.iter().skip(1).copied() {
505                debug_assert_ne!(prev_access, next_access);
506                debug_assert!(prev_start < next_start);
507
508                prev_access = next_access;
509                prev_start = next_start;
510            }
511        };
512
513        // The needle will always be odd, and the probe always even, the result will always be err
514        let needle = (access_range.start << 1) | 1;
515        let idx = buffer
516            .accesses
517            .binary_search_by(|(_, probe)| (probe << 1).cmp(&needle));
518
519        debug_assert!(idx.is_err());
520
521        let mut idx = unsafe { idx.unwrap_err_unchecked() };
522
523        // The first access will always be at start == 0, which is even, so idx cannot be 0
524        debug_assert_ne!(idx, 0);
525
526        idx -= 1;
527
528        Self {
529            access,
530            access_range,
531            buffer,
532            idx,
533        }
534    }
535}
536
537impl<T> Iterator for BufferAccessIter<T>
538where
539    T: DerefMut<Target = BufferAccess>,
540{
541    type Item = (AccessType, BufferSubresourceRange);
542
543    fn next(&mut self) -> Option<Self::Item> {
544        debug_assert!(self.access_range.start <= self.access_range.end);
545        debug_assert!(self.access_range.end <= self.buffer.size);
546
547        if self.access_range.start == self.access_range.end {
548            return None;
549        }
550
551        debug_assert!(self.buffer.accesses.get(self.idx).is_some());
552
553        let (access, access_start) = unsafe { *self.buffer.accesses.get_unchecked(self.idx) };
554        let access_end = self
555            .buffer
556            .accesses
557            .get(self.idx + 1)
558            .map(|(_, access_start)| *access_start)
559            .unwrap_or(self.buffer.size);
560        let mut access_range = self.access_range;
561
562        access_range.end = access_range.end.min(access_end);
563        self.access_range.start = access_range.end;
564
565        if access == self.access {
566            self.idx += 1;
567        } else if access_start < access_range.start {
568            if let Some((_, access_start)) = self
569                .buffer
570                .accesses
571                .get_mut(self.idx + 1)
572                .filter(|(access, _)| *access == self.access && access_end == access_range.end)
573            {
574                *access_start = access_range.start;
575                self.idx += 1;
576            } else {
577                self.idx += 1;
578                self.buffer
579                    .accesses
580                    .insert(self.idx, (self.access, access_range.start));
581
582                if access_end > access_range.end {
583                    self.buffer
584                        .accesses
585                        .insert(self.idx + 1, (access, access_range.end));
586                }
587
588                self.idx += 1;
589            }
590        } else if self.idx > 0 {
591            if self
592                .buffer
593                .accesses
594                .get(self.idx - 1)
595                .filter(|(access, _)| *access == self.access)
596                .is_some()
597            {
598                if access_end == access_range.end {
599                    self.buffer.accesses.remove(self.idx);
600
601                    if self
602                        .buffer
603                        .accesses
604                        .get(self.idx)
605                        .filter(|(access, _)| *access == self.access)
606                        .is_some()
607                    {
608                        self.buffer.accesses.remove(self.idx);
609                        self.idx -= 1;
610                    }
611                } else {
612                    debug_assert!(self.buffer.accesses.get(self.idx).is_some());
613
614                    let (_, access_start) =
615                        unsafe { self.buffer.accesses.get_unchecked_mut(self.idx) };
616                    *access_start = access_range.end;
617                }
618            } else if access_end == access_range.end {
619                debug_assert!(self.buffer.accesses.get(self.idx).is_some());
620
621                let (access, _) = unsafe { self.buffer.accesses.get_unchecked_mut(self.idx) };
622                *access = self.access;
623
624                if self
625                    .buffer
626                    .accesses
627                    .get(self.idx + 1)
628                    .filter(|(access, _)| *access == self.access)
629                    .is_some()
630                {
631                    self.buffer.accesses.remove(self.idx + 1);
632                } else {
633                    self.idx += 1;
634                }
635            } else {
636                if let Some((_, access_start)) = self.buffer.accesses.get_mut(self.idx) {
637                    *access_start = access_range.end;
638                }
639
640                self.buffer
641                    .accesses
642                    .insert(self.idx, (self.access, access_range.start));
643                self.idx += 2;
644            }
645        } else if let Some((_, access_start)) = self
646            .buffer
647            .accesses
648            .get_mut(1)
649            .filter(|(access, _)| *access == self.access && access_end == access_range.end)
650        {
651            *access_start = 0;
652            self.buffer.accesses.remove(0);
653        } else if access_end > access_range.end {
654            self.buffer.accesses.insert(0, (self.access, 0));
655
656            debug_assert!(self.buffer.accesses.get(1).is_some());
657
658            let (_, access_start) = unsafe { self.buffer.accesses.get_unchecked_mut(1) };
659            *access_start = access_range.end;
660        } else {
661            debug_assert!(!self.buffer.accesses.is_empty());
662
663            let (access, _) = unsafe { self.buffer.accesses.get_unchecked_mut(0) };
664            *access = self.access;
665
666            if self
667                .buffer
668                .accesses
669                .get(1)
670                .filter(|(access, _)| *access == self.access)
671                .is_some()
672            {
673                self.buffer.accesses.remove(1);
674            } else {
675                self.idx += 1;
676            }
677        }
678
679        Some((access, access_range))
680    }
681}
682
683/// Information used to create a [`Buffer`] instance.
684#[derive(Builder, Clone, Copy, Debug, Eq, Hash, PartialEq)]
685#[builder(
686    build_fn(private, name = "fallible_build", error = "BufferInfoBuilderError"),
687    derive(Clone, Copy, Debug),
688    pattern = "owned"
689)]
690#[non_exhaustive]
691pub struct BufferInfo {
692    /// Byte alignment of the base device address of the buffer.
693    ///
694    /// Must be a power of two.
695    #[builder(default = "1")]
696    pub alignment: vk::DeviceSize,
697
698    /// Specifies a buffer whose memory is host visible and may be mapped.
699    #[builder(default)]
700    pub mappable: bool,
701
702    /// Size in bytes of the buffer to be created.
703    pub size: vk::DeviceSize,
704
705    /// A bitmask of specifying allowed usages of the buffer.
706    #[builder(default)]
707    pub usage: vk::BufferUsageFlags,
708}
709
710impl BufferInfo {
711    /// Specifies a non-mappable buffer with the given `size` and `usage` values.
712    ///
713    /// Device-local memory (located on the GPU) is used.
714    #[inline(always)]
715    pub const fn device_mem(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfo {
716        BufferInfo {
717            alignment: 1,
718            mappable: false,
719            size,
720            usage,
721        }
722    }
723
724    /// Specifies a mappable buffer with the given `size` and `usage` values.
725    ///
726    /// Host-local memory (located in CPU-accesible RAM) is used.
727    ///
728    /// # Note
729    ///
730    /// For convenience the given usage value will be bitwise OR'd with
731    /// `TRANSFER_DST | TRANSFER_SRC`.
732    #[inline(always)]
733    pub const fn host_mem(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfo {
734        let usage = vk::BufferUsageFlags::from_raw(
735            usage.as_raw()
736                | vk::BufferUsageFlags::TRANSFER_DST.as_raw()
737                | vk::BufferUsageFlags::TRANSFER_SRC.as_raw(),
738        );
739
740        BufferInfo {
741            alignment: 1,
742            mappable: true,
743            size,
744            usage,
745        }
746    }
747
748    /// Specifies a non-mappable buffer with the given `size` and `usage` values.
749    #[allow(clippy::new_ret_no_self)]
750    #[deprecated = "Use BufferInfo::device_mem()"]
751    #[doc(hidden)]
752    pub fn new(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfoBuilder {
753        Self::device_mem(size, usage).to_builder()
754    }
755
756    /// Specifies a mappable buffer with the given `size` and `usage` values.
757    ///
758    /// # Note
759    ///
760    /// For convenience the given usage value will be bitwise OR'd with
761    /// `TRANSFER_DST | TRANSFER_SRC`.
762    #[deprecated = "Use BufferInfo::host_mem()"]
763    #[doc(hidden)]
764    pub fn new_mappable(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfoBuilder {
765        Self::host_mem(size, usage).to_builder()
766    }
767
768    /// Converts a `BufferInfo` into a `BufferInfoBuilder`.
769    #[inline(always)]
770    pub fn to_builder(self) -> BufferInfoBuilder {
771        BufferInfoBuilder {
772            alignment: Some(self.alignment),
773            mappable: Some(self.mappable),
774            size: Some(self.size),
775            usage: Some(self.usage),
776        }
777    }
778}
779
780impl BufferInfoBuilder {
781    /// Builds a new `BufferInfo`.
782    ///    
783    /// # Panics
784    ///
785    /// If any of the following values have not been set this function will panic:
786    ///
787    /// * `size`
788    ///
789    /// If `alignment` is not a power to two this function will panic.
790    #[inline(always)]
791    pub fn build(self) -> BufferInfo {
792        let res = match self.fallible_build() {
793            Err(BufferInfoBuilderError(err)) => panic!("{err}"),
794            Ok(info) => info,
795        };
796
797        assert_eq!(
798            res.alignment.count_ones(),
799            1,
800            "Alignment must be a power of two"
801        );
802
803        res
804    }
805}
806
807impl From<BufferInfoBuilder> for BufferInfo {
808    fn from(info: BufferInfoBuilder) -> Self {
809        info.build()
810    }
811}
812
813#[derive(Debug)]
814struct BufferInfoBuilderError(UninitializedFieldError);
815
816impl From<UninitializedFieldError> for BufferInfoBuilderError {
817    fn from(err: UninitializedFieldError) -> Self {
818        Self(err)
819    }
820}
821
822/// Specifies a range of buffer data.
823#[derive(Clone, Copy, Debug, PartialEq)]
824pub struct BufferSubresourceRange {
825    /// The start of range.
826    pub start: vk::DeviceSize,
827
828    /// The non-inclusive end of the range.
829    pub end: vk::DeviceSize,
830}
831
832impl BufferSubresourceRange {
833    #[cfg(test)]
834    pub(crate) fn intersects(self, other: Self) -> bool {
835        self.start < other.end && self.end > other.start
836    }
837}
838
839impl From<BufferInfo> for BufferSubresourceRange {
840    fn from(info: BufferInfo) -> Self {
841        Self {
842            start: 0,
843            end: info.size,
844        }
845    }
846}
847
848impl From<Range<vk::DeviceSize>> for BufferSubresourceRange {
849    fn from(range: Range<vk::DeviceSize>) -> Self {
850        Self {
851            start: range.start,
852            end: range.end,
853        }
854    }
855}
856
857impl From<Option<Range<vk::DeviceSize>>> for BufferSubresourceRange {
858    fn from(range: Option<Range<vk::DeviceSize>>) -> Self {
859        range.unwrap_or(0..vk::WHOLE_SIZE).into()
860    }
861}
862
863impl From<BufferSubresourceRange> for Range<vk::DeviceSize> {
864    fn from(subresource: BufferSubresourceRange) -> Self {
865        subresource.start..subresource.end
866    }
867}
868
869#[cfg(test)]
870mod tests {
871    use {
872        super::*,
873        rand::{Rng, SeedableRng, rngs::SmallRng},
874    };
875
876    type Info = BufferInfo;
877    type Builder = BufferInfoBuilder;
878
879    const FUZZ_COUNT: usize = 100_000;
880
881    #[test]
882    pub fn buffer_access() {
883        let mut buffer = BufferAccess::new(100);
884
885        {
886            let mut accesses = BufferAccessIter::new(
887                &mut buffer,
888                AccessType::TransferWrite,
889                buffer_subresource_range(0..10),
890            );
891
892            assert_eq!(accesses.buffer.accesses, vec![(AccessType::Nothing, 0)]);
893            assert_eq!(
894                accesses.next().unwrap(),
895                (AccessType::Nothing, buffer_subresource_range(0..10))
896            );
897            assert_eq!(
898                accesses.buffer.accesses,
899                vec![(AccessType::TransferWrite, 0), (AccessType::Nothing, 10)]
900            );
901            assert!(accesses.next().is_none());
902        }
903
904        {
905            let mut accesses = BufferAccessIter::new(
906                &mut buffer,
907                AccessType::TransferRead,
908                buffer_subresource_range(5..15),
909            );
910
911            assert_eq!(
912                accesses.buffer.accesses,
913                vec![(AccessType::TransferWrite, 0), (AccessType::Nothing, 10)]
914            );
915            assert_eq!(
916                accesses.next().unwrap(),
917                (AccessType::TransferWrite, buffer_subresource_range(5..10))
918            );
919            assert_eq!(
920                accesses.buffer.accesses,
921                vec![
922                    (AccessType::TransferWrite, 0),
923                    (AccessType::TransferRead, 5),
924                    (AccessType::Nothing, 10)
925                ]
926            );
927            assert_eq!(
928                accesses.next().unwrap(),
929                (AccessType::Nothing, buffer_subresource_range(10..15))
930            );
931            assert_eq!(
932                accesses.buffer.accesses,
933                vec![
934                    (AccessType::TransferWrite, 0),
935                    (AccessType::TransferRead, 5),
936                    (AccessType::Nothing, 15)
937                ]
938            );
939            assert!(accesses.next().is_none());
940        }
941
942        {
943            let mut accesses = BufferAccessIter::new(
944                &mut buffer,
945                AccessType::HostRead,
946                buffer_subresource_range(0..100),
947            );
948
949            assert_eq!(
950                accesses.buffer.accesses,
951                vec![
952                    (AccessType::TransferWrite, 0),
953                    (AccessType::TransferRead, 5),
954                    (AccessType::Nothing, 15)
955                ]
956            );
957            assert_eq!(
958                accesses.next().unwrap(),
959                (AccessType::TransferWrite, buffer_subresource_range(0..5))
960            );
961            assert_eq!(
962                accesses.buffer.accesses,
963                vec![
964                    (AccessType::HostRead, 0),
965                    (AccessType::TransferRead, 5),
966                    (AccessType::Nothing, 15)
967                ]
968            );
969            assert_eq!(
970                accesses.next().unwrap(),
971                (AccessType::TransferRead, buffer_subresource_range(5..15))
972            );
973            assert_eq!(
974                accesses.buffer.accesses,
975                vec![(AccessType::HostRead, 0), (AccessType::Nothing, 15)]
976            );
977            assert_eq!(
978                accesses.next().unwrap(),
979                (AccessType::Nothing, buffer_subresource_range(15..100))
980            );
981            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostRead, 0),]);
982            assert!(accesses.next().is_none());
983        }
984
985        {
986            let mut accesses = BufferAccessIter::new(
987                &mut buffer,
988                AccessType::HostWrite,
989                buffer_subresource_range(0..100),
990            );
991
992            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostRead, 0)]);
993            assert_eq!(
994                accesses.next().unwrap(),
995                (AccessType::HostRead, buffer_subresource_range(0..100))
996            );
997            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
998            assert!(accesses.next().is_none());
999        }
1000
1001        {
1002            let mut accesses = BufferAccessIter::new(
1003                &mut buffer,
1004                AccessType::HostWrite,
1005                buffer_subresource_range(0..100),
1006            );
1007
1008            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1009            assert_eq!(
1010                accesses.next().unwrap(),
1011                (AccessType::HostWrite, buffer_subresource_range(0..100))
1012            );
1013            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1014            assert!(accesses.next().is_none());
1015        }
1016
1017        {
1018            let mut accesses = BufferAccessIter::new(
1019                &mut buffer,
1020                AccessType::HostWrite,
1021                buffer_subresource_range(1..99),
1022            );
1023
1024            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1025            assert_eq!(
1026                accesses.next().unwrap(),
1027                (AccessType::HostWrite, buffer_subresource_range(1..99))
1028            );
1029            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1030            assert!(accesses.next().is_none());
1031        }
1032
1033        {
1034            let mut accesses = BufferAccessIter::new(
1035                &mut buffer,
1036                AccessType::HostRead,
1037                buffer_subresource_range(1..99),
1038            );
1039
1040            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1041            assert_eq!(
1042                accesses.next().unwrap(),
1043                (AccessType::HostWrite, buffer_subresource_range(1..99))
1044            );
1045            assert_eq!(
1046                accesses.buffer.accesses,
1047                vec![
1048                    (AccessType::HostWrite, 0),
1049                    (AccessType::HostRead, 1),
1050                    (AccessType::HostWrite, 99)
1051                ]
1052            );
1053            assert!(accesses.next().is_none());
1054        }
1055
1056        {
1057            let mut accesses = BufferAccessIter::new(
1058                &mut buffer,
1059                AccessType::Nothing,
1060                buffer_subresource_range(0..100),
1061            );
1062
1063            assert_eq!(
1064                accesses.next().unwrap(),
1065                (AccessType::HostWrite, buffer_subresource_range(0..1))
1066            );
1067            assert_eq!(
1068                accesses.next().unwrap(),
1069                (AccessType::HostRead, buffer_subresource_range(1..99))
1070            );
1071            assert_eq!(
1072                accesses.next().unwrap(),
1073                (AccessType::HostWrite, buffer_subresource_range(99..100))
1074            );
1075            assert!(accesses.next().is_none());
1076        }
1077
1078        {
1079            let mut accesses = BufferAccessIter::new(
1080                &mut buffer,
1081                AccessType::AnyShaderWrite,
1082                buffer_subresource_range(0..100),
1083            );
1084
1085            assert_eq!(
1086                accesses.next().unwrap(),
1087                (AccessType::Nothing, buffer_subresource_range(0..100))
1088            );
1089            assert!(accesses.next().is_none());
1090        }
1091
1092        {
1093            let mut accesses = BufferAccessIter::new(
1094                &mut buffer,
1095                AccessType::AnyShaderReadOther,
1096                buffer_subresource_range(1..2),
1097            );
1098
1099            assert_eq!(
1100                accesses.next().unwrap(),
1101                (AccessType::AnyShaderWrite, buffer_subresource_range(1..2))
1102            );
1103            assert!(accesses.next().is_none());
1104        }
1105
1106        {
1107            let mut accesses = BufferAccessIter::new(
1108                &mut buffer,
1109                AccessType::AnyShaderReadOther,
1110                buffer_subresource_range(3..4),
1111            );
1112
1113            assert_eq!(
1114                accesses.next().unwrap(),
1115                (AccessType::AnyShaderWrite, buffer_subresource_range(3..4))
1116            );
1117            assert!(accesses.next().is_none());
1118        }
1119
1120        {
1121            let mut accesses = BufferAccessIter::new(
1122                &mut buffer,
1123                AccessType::Nothing,
1124                buffer_subresource_range(0..5),
1125            );
1126
1127            assert_eq!(
1128                accesses.next().unwrap(),
1129                (AccessType::AnyShaderWrite, buffer_subresource_range(0..1))
1130            );
1131            assert_eq!(
1132                accesses.next().unwrap(),
1133                (
1134                    AccessType::AnyShaderReadOther,
1135                    buffer_subresource_range(1..2)
1136                )
1137            );
1138            assert_eq!(
1139                accesses.next().unwrap(),
1140                (AccessType::AnyShaderWrite, buffer_subresource_range(2..3))
1141            );
1142            assert_eq!(
1143                accesses.next().unwrap(),
1144                (
1145                    AccessType::AnyShaderReadOther,
1146                    buffer_subresource_range(3..4)
1147                )
1148            );
1149            assert_eq!(
1150                accesses.next().unwrap(),
1151                (AccessType::AnyShaderWrite, buffer_subresource_range(4..5))
1152            );
1153            assert!(accesses.next().is_none());
1154        }
1155    }
1156
1157    #[test]
1158    pub fn buffer_access_basic() {
1159        let mut buffer = BufferAccess::new(5);
1160
1161        buffer.accesses = vec![
1162            (AccessType::ColorAttachmentRead, 0),
1163            (AccessType::AnyShaderWrite, 4),
1164        ];
1165
1166        {
1167            let mut accesses = BufferAccessIter::new(
1168                &mut buffer,
1169                AccessType::AnyShaderWrite,
1170                buffer_subresource_range(0..2),
1171            );
1172
1173            assert_eq!(
1174                accesses.next().unwrap(),
1175                (
1176                    AccessType::ColorAttachmentRead,
1177                    buffer_subresource_range(0..2)
1178                )
1179            );
1180            assert!(accesses.next().is_none());
1181        }
1182
1183        {
1184            let mut accesses = BufferAccessIter::new(
1185                &mut buffer,
1186                AccessType::HostWrite,
1187                buffer_subresource_range(0..5),
1188            );
1189
1190            assert_eq!(
1191                accesses.next().unwrap(),
1192                (AccessType::AnyShaderWrite, buffer_subresource_range(0..2))
1193            );
1194            assert_eq!(
1195                accesses.next().unwrap(),
1196                (
1197                    AccessType::ColorAttachmentRead,
1198                    buffer_subresource_range(2..4)
1199                )
1200            );
1201            assert_eq!(
1202                accesses.next().unwrap(),
1203                (AccessType::AnyShaderWrite, buffer_subresource_range(4..5))
1204            );
1205
1206            assert!(accesses.next().is_none());
1207        }
1208    }
1209
1210    fn buffer_access_fuzz(buffer_size: vk::DeviceSize) {
1211        static ACCESS_TYPES: &[AccessType] = &[
1212            AccessType::AnyShaderReadOther,
1213            AccessType::AnyShaderWrite,
1214            AccessType::ColorAttachmentRead,
1215            AccessType::ColorAttachmentWrite,
1216            AccessType::HostRead,
1217            AccessType::HostWrite,
1218            AccessType::Nothing,
1219        ];
1220
1221        let mut rng = SmallRng::seed_from_u64(42);
1222        let mut buffer = BufferAccess::new(buffer_size);
1223        let mut data = vec![AccessType::Nothing; buffer_size as usize];
1224
1225        for _ in 0..FUZZ_COUNT {
1226            let access = ACCESS_TYPES[rng.random_range(..ACCESS_TYPES.len())];
1227            let access_start = rng.random_range(..buffer_size);
1228            let access_end = rng.random_range(access_start + 1..=buffer_size);
1229
1230            // println!("{access:?} {access_start}..{access_end}");
1231
1232            let accesses = BufferAccessIter::new(
1233                &mut buffer,
1234                access,
1235                buffer_subresource_range(access_start..access_end),
1236            );
1237
1238            for (access, access_range) in accesses {
1239                // println!("\t{access:?} {}..{}", access_range.start, access_range.end);
1240                assert!(
1241                    data[access_range.start as usize..access_range.end as usize]
1242                        .iter()
1243                        .all(|data| *data == access),
1244                    "{:?}",
1245                    &data[access_range.start as usize..access_range.end as usize]
1246                );
1247            }
1248
1249            for data in &mut data[access_start as usize..access_end as usize] {
1250                *data = access;
1251            }
1252        }
1253    }
1254
1255    #[test]
1256    pub fn buffer_access_fuzz_small() {
1257        buffer_access_fuzz(5);
1258    }
1259
1260    #[test]
1261    pub fn buffer_access_fuzz_medium() {
1262        buffer_access_fuzz(101);
1263    }
1264
1265    #[test]
1266    pub fn buffer_access_fuzz_large() {
1267        buffer_access_fuzz(10_000);
1268    }
1269
1270    #[test]
1271    pub fn buffer_info() {
1272        let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1273        let builder = info.to_builder().build();
1274
1275        assert_eq!(info, builder);
1276    }
1277
1278    #[test]
1279    pub fn buffer_info_alignment() {
1280        let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1281
1282        assert_eq!(info.alignment, 1);
1283    }
1284
1285    #[test]
1286    pub fn buffer_info_builder() {
1287        let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1288        let builder = Builder::default().size(0).build();
1289
1290        assert_eq!(info, builder);
1291    }
1292
1293    #[test]
1294    #[should_panic(expected = "Alignment must be a power of two")]
1295    pub fn buffer_info_builder_alignment_0() {
1296        Builder::default().size(0).alignment(0).build();
1297    }
1298
1299    #[test]
1300    #[should_panic(expected = "Alignment must be a power of two")]
1301    pub fn buffer_info_builder_alignment_42() {
1302        Builder::default().size(0).alignment(42).build();
1303    }
1304
1305    #[test]
1306    pub fn buffer_info_builder_alignment_256() {
1307        let mut info = Info::device_mem(42, vk::BufferUsageFlags::empty());
1308        info.alignment = 256;
1309
1310        let builder = Builder::default().size(42).alignment(256).build();
1311
1312        assert_eq!(info, builder);
1313    }
1314
1315    #[test]
1316    #[should_panic(expected = "Field not initialized: size")]
1317    pub fn buffer_info_builder_uninit_size() {
1318        Builder::default().build();
1319    }
1320
1321    fn buffer_subresource_range(
1322        Range { start, end }: Range<vk::DeviceSize>,
1323    ) -> BufferSubresourceRange {
1324        BufferSubresourceRange { start, end }
1325    }
1326
1327    #[test]
1328    pub fn buffer_subresource_range_intersects() {
1329        use BufferSubresourceRange as B;
1330
1331        assert!(!B { start: 10, end: 20 }.intersects(B { start: 0, end: 5 }));
1332        assert!(!B { start: 10, end: 20 }.intersects(B { start: 5, end: 10 }));
1333        assert!(B { start: 10, end: 20 }.intersects(B { start: 10, end: 15 }));
1334        assert!(B { start: 10, end: 20 }.intersects(B { start: 15, end: 20 }));
1335        assert!(!B { start: 10, end: 20 }.intersects(B { start: 20, end: 25 }));
1336        assert!(!B { start: 10, end: 20 }.intersects(B { start: 25, end: 30 }));
1337
1338        assert!(!B { start: 5, end: 10 }.intersects(B { start: 10, end: 20 }));
1339        assert!(B { start: 5, end: 25 }.intersects(B { start: 10, end: 20 }));
1340        assert!(B { start: 5, end: 15 }.intersects(B { start: 10, end: 20 }));
1341        assert!(B { start: 10, end: 20 }.intersects(B { start: 10, end: 20 }));
1342        assert!(B { start: 11, end: 19 }.intersects(B { start: 10, end: 20 }));
1343        assert!(B { start: 15, end: 25 }.intersects(B { start: 10, end: 20 }));
1344        assert!(!B { start: 20, end: 25 }.intersects(B { start: 10, end: 20 }));
1345    }
1346}