screen_13/driver/
buffer.rs

1//! Buffer resource types
2
3use {
4    super::{DriverError, device::Device},
5    ash::vk,
6    derive_builder::{Builder, UninitializedFieldError},
7    gpu_allocator::{
8        MemoryLocation,
9        vulkan::{Allocation, AllocationCreateDesc, AllocationScheme},
10    },
11    log::trace,
12    log::warn,
13    std::{
14        fmt::{Debug, Formatter},
15        mem::ManuallyDrop,
16        ops::{Deref, DerefMut, Range},
17        sync::Arc,
18        thread::panicking,
19    },
20    vk_sync::AccessType,
21};
22
23#[cfg(feature = "parking_lot")]
24use parking_lot::Mutex;
25
26#[cfg(not(feature = "parking_lot"))]
27use std::sync::Mutex;
28
29/// Smart pointer handle to a [buffer] object.
30///
31/// Also contains information about the object.
32///
33/// ## `Deref` behavior
34///
35/// `Buffer` automatically dereferences to [`vk::Buffer`] (via the [`Deref`] trait), so you
36/// can call `vk::Buffer`'s methods on a value of type `Buffer`. To avoid name clashes with
37/// `vk::Buffer`'s methods, the methods of `Buffer` itself are associated functions, called using
38/// [fully qualified syntax]:
39///
40/// ```no_run
41/// # use std::sync::Arc;
42/// # use ash::vk;
43/// # use screen_13::driver::{AccessType, DriverError};
44/// # use screen_13::driver::device::{Device, DeviceInfo};
45/// # use screen_13::driver::buffer::{Buffer, BufferInfo};
46/// # fn main() -> Result<(), DriverError> {
47/// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
48/// # let info = BufferInfo::device_mem(8, vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS);
49/// # let my_buf = Buffer::create(&device, info)?;
50/// let addr = Buffer::device_address(&my_buf);
51/// # Ok(()) }
52/// ```
53///
54/// [buffer]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkBuffer.html
55/// [deref]: core::ops::Deref
56/// [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
57pub struct Buffer {
58    accesses: Mutex<BufferAccess>,
59    allocation: ManuallyDrop<Allocation>,
60    buffer: vk::Buffer,
61    device: Arc<Device>,
62
63    /// Information used to create this object.
64    pub info: BufferInfo,
65
66    /// A name for debugging purposes.
67    pub name: Option<String>,
68}
69
70impl Buffer {
71    /// Creates a new buffer on the given device.
72    ///
73    /// # Examples
74    ///
75    /// Basic usage:
76    ///
77    /// ```no_run
78    /// # use std::sync::Arc;
79    /// # use ash::vk;
80    /// # use screen_13::driver::DriverError;
81    /// # use screen_13::driver::device::{Device, DeviceInfo};
82    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
83    /// # fn main() -> Result<(), DriverError> {
84    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
85    /// const SIZE: vk::DeviceSize = 1024;
86    /// let info = BufferInfo::host_mem(SIZE, vk::BufferUsageFlags::UNIFORM_BUFFER);
87    /// let buf = Buffer::create(&device, info)?;
88    ///
89    /// assert_ne!(*buf, vk::Buffer::null());
90    /// assert_eq!(buf.info.size, SIZE);
91    /// # Ok(()) }
92    /// ```
93    #[profiling::function]
94    pub fn create(device: &Arc<Device>, info: impl Into<BufferInfo>) -> Result<Self, DriverError> {
95        let info = info.into();
96
97        trace!("create: {:?}", info);
98
99        debug_assert_ne!(info.size, 0, "Size must be non-zero");
100
101        let device = Arc::clone(device);
102        let buffer_info = vk::BufferCreateInfo::default()
103            .size(info.size)
104            .usage(info.usage)
105            .sharing_mode(vk::SharingMode::CONCURRENT)
106            .queue_family_indices(&device.physical_device.queue_family_indices);
107        let buffer = unsafe {
108            device.create_buffer(&buffer_info, None).map_err(|err| {
109                warn!("unable to create buffer: {err}");
110
111                DriverError::Unsupported
112            })?
113        };
114        let mut requirements = unsafe { device.get_buffer_memory_requirements(buffer) };
115        requirements.alignment = requirements.alignment.max(info.alignment);
116
117        let memory_location = if info.mappable {
118            MemoryLocation::CpuToGpu
119        } else {
120            MemoryLocation::GpuOnly
121        };
122        let allocation = {
123            profiling::scope!("allocate");
124
125            #[cfg_attr(not(feature = "parking_lot"), allow(unused_mut))]
126            let mut allocator = device.allocator.lock();
127
128            #[cfg(not(feature = "parking_lot"))]
129            let mut allocator = allocator.unwrap();
130
131            allocator
132                .allocate(&AllocationCreateDesc {
133                    name: "buffer",
134                    requirements,
135                    location: memory_location,
136                    linear: true, // Buffers are always linear
137                    allocation_scheme: AllocationScheme::GpuAllocatorManaged,
138                })
139                .map_err(|err| {
140                    warn!("unable to allocate buffer memory: {err}");
141
142                    unsafe {
143                        device.destroy_buffer(buffer, None);
144                    }
145
146                    DriverError::from_alloc_err(err)
147                })
148                .and_then(|allocation| {
149                    if let Err(err) = unsafe {
150                        device.bind_buffer_memory(buffer, allocation.memory(), allocation.offset())
151                    } {
152                        warn!("unable to bind buffer memory: {err}");
153
154                        if let Err(err) = allocator.free(allocation) {
155                            warn!("unable to free buffer allocation: {err}")
156                        }
157
158                        unsafe {
159                            device.destroy_buffer(buffer, None);
160                        }
161
162                        Err(DriverError::OutOfMemory)
163                    } else {
164                        Ok(allocation)
165                    }
166                })
167        }?;
168
169        debug_assert_ne!(buffer, vk::Buffer::null());
170
171        Ok(Self {
172            accesses: Mutex::new(BufferAccess::new(info.size)),
173            allocation: ManuallyDrop::new(allocation),
174            buffer,
175            device,
176            info,
177            name: None,
178        })
179    }
180
181    /// Creates a new mappable buffer on the given device and fills it with the data in `slice`.
182    ///
183    /// # Examples
184    ///
185    /// Basic usage:
186    ///
187    /// ```no_run
188    /// # use std::sync::Arc;
189    /// # use ash::vk;
190    /// # use screen_13::driver::DriverError;
191    /// # use screen_13::driver::device::{Device, DeviceInfo};
192    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
193    /// # fn main() -> Result<(), DriverError> {
194    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
195    /// const DATA: [u8; 4] = [0xfe, 0xed, 0xbe, 0xef];
196    /// let buf = Buffer::create_from_slice(&device, vk::BufferUsageFlags::UNIFORM_BUFFER, &DATA)?;
197    ///
198    /// assert_ne!(*buf, vk::Buffer::null());
199    /// assert_eq!(buf.info.size, 4);
200    /// assert_eq!(Buffer::mapped_slice(&buf), &DATA);
201    /// # Ok(()) }
202    /// ```
203    #[profiling::function]
204    pub fn create_from_slice(
205        device: &Arc<Device>,
206        usage: vk::BufferUsageFlags,
207        slice: impl AsRef<[u8]>,
208    ) -> Result<Self, DriverError> {
209        let slice = slice.as_ref();
210        let info = BufferInfo::host_mem(slice.len() as _, usage);
211        let mut buffer = Self::create(device, info)?;
212
213        Self::copy_from_slice(&mut buffer, 0, slice);
214
215        Ok(buffer)
216    }
217
218    /// Keeps track of some `next_access` which affects this object.
219    ///
220    /// Returns the previous access for which a pipeline barrier should be used to prevent data
221    /// corruption.
222    ///
223    /// # Note
224    ///
225    /// Used to maintain object state when passing a _Screen 13_-created `vk::Buffer` handle to
226    /// external code such as [_Ash_] or [_Erupt_] bindings.
227    ///
228    /// # Examples
229    ///
230    /// Basic usage:
231    ///
232    /// ```no_run
233    /// # use std::sync::Arc;
234    /// # use ash::vk;
235    /// # use screen_13::driver::{AccessType, DriverError};
236    /// # use screen_13::driver::device::{Device, DeviceInfo};
237    /// # use screen_13::driver::buffer::{Buffer, BufferInfo, BufferSubresourceRange};
238    /// # fn main() -> Result<(), DriverError> {
239    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
240    /// # const SIZE: vk::DeviceSize = 1024;
241    /// # let info = BufferInfo::device_mem(SIZE, vk::BufferUsageFlags::STORAGE_BUFFER);
242    /// # let my_buf = Buffer::create(&device, info)?;
243    /// // Initially we want to "write"
244    /// let access = AccessType::ComputeShaderWrite;
245    /// let access_range = BufferSubresourceRange { start: 0, end: SIZE };
246    /// let mut accesses = Buffer::access(&my_buf, access, access_range);
247    ///
248    /// assert_eq!(accesses.next(), Some((AccessType::Nothing, access_range)));
249    /// assert!(accesses.next().is_none());
250    ///
251    /// // External code may now "write"; no barrier required in this case
252    ///
253    /// // Subsequently we want to "read"
254    /// let access = AccessType::ComputeShaderReadOther;
255    /// let mut accesses = Buffer::access(&my_buf, access, access_range);
256    ///
257    /// assert_eq!(accesses.next(), Some((AccessType::ComputeShaderWrite, access_range)));
258    /// assert!(accesses.next().is_none());
259    ///
260    /// // A barrier on "write" before "read" is required! A render graph will do this
261    /// // automatically when resovled, but manual access like this requires manual barriers
262    /// # Ok(()) }
263    /// ```
264    ///
265    /// [_Ash_]: https://crates.io/crates/ash
266    /// [_Erupt_]: https://crates.io/crates/erupt
267    #[profiling::function]
268    pub fn access(
269        this: &Self,
270        access: AccessType,
271        access_range: impl Into<BufferSubresourceRange>,
272    ) -> impl Iterator<Item = (AccessType, BufferSubresourceRange)> + '_ {
273        let mut access_range: BufferSubresourceRange = access_range.into();
274
275        if access_range.end == vk::WHOLE_SIZE {
276            access_range.end = this.info.size;
277        }
278
279        let accesses = this.accesses.lock();
280
281        #[cfg(not(feature = "parking_lot"))]
282        let accesses = accesses.unwrap();
283
284        BufferAccessIter::new(accesses, access, access_range)
285    }
286
287    /// Updates a mappable buffer starting at `offset` with the data in `slice`.
288    ///
289    /// # Panics
290    ///
291    /// Panics if the buffer was not created with the `mappable` flag set to `true`.
292    ///
293    /// # Examples
294    ///
295    /// Basic usage:
296    ///
297    /// ```no_run
298    /// # use std::sync::Arc;
299    /// # use ash::vk;
300    /// # use screen_13::driver::DriverError;
301    /// # use screen_13::driver::device::{Device, DeviceInfo};
302    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
303    /// # fn main() -> Result<(), DriverError> {
304    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
305    /// # let info = BufferInfo::host_mem(4, vk::BufferUsageFlags::empty());
306    /// # let mut my_buf = Buffer::create(&device, info)?;
307    /// const DATA: [u8; 4] = [0xde, 0xad, 0xc0, 0xde];
308    /// Buffer::copy_from_slice(&mut my_buf, 0, &DATA);
309    ///
310    /// assert_eq!(Buffer::mapped_slice(&my_buf), &DATA);
311    /// # Ok(()) }
312    /// ```
313    #[profiling::function]
314    pub fn copy_from_slice(this: &mut Self, offset: vk::DeviceSize, slice: impl AsRef<[u8]>) {
315        let slice = slice.as_ref();
316        Self::mapped_slice_mut(this)[offset as _..offset as usize + slice.len()]
317            .copy_from_slice(slice);
318    }
319
320    /// Returns the device address of this object.
321    ///
322    /// # Panics
323    ///
324    /// Panics if the buffer was not created with the `SHADER_DEVICE_ADDRESS` usage flag.
325    ///
326    /// # Examples
327    ///
328    /// Basic usage:
329    ///
330    /// ```no_run
331    /// # use std::sync::Arc;
332    /// # use ash::vk;
333    /// # use screen_13::driver::DriverError;
334    /// # use screen_13::driver::device::{Device, DeviceInfo};
335    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
336    /// # fn main() -> Result<(), DriverError> {
337    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
338    /// # let info = BufferInfo::host_mem(4, vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS);
339    /// # let my_buf = Buffer::create(&device, info)?;
340    /// let addr = Buffer::device_address(&my_buf);
341    ///
342    /// assert_ne!(addr, 0);
343    /// # Ok(()) }
344    /// ```
345    #[profiling::function]
346    pub fn device_address(this: &Self) -> vk::DeviceAddress {
347        debug_assert!(
348            this.info
349                .usage
350                .contains(vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS)
351        );
352
353        unsafe {
354            this.device.get_buffer_device_address(
355                &vk::BufferDeviceAddressInfo::default().buffer(this.buffer),
356            )
357        }
358    }
359
360    /// Returns a mapped slice.
361    ///
362    /// # Panics
363    ///
364    /// Panics if the buffer was not created with the `mappable` flag set to `true`.
365    ///
366    /// # Examples
367    ///
368    /// Basic usage:
369    ///
370    /// ```no_run
371    /// # use std::sync::Arc;
372    /// # use ash::vk;
373    /// # use screen_13::driver::DriverError;
374    /// # use screen_13::driver::device::{Device, DeviceInfo};
375    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
376    /// # fn main() -> Result<(), DriverError> {
377    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
378    /// # const DATA: [u8; 4] = [0; 4];
379    /// # let my_buf = Buffer::create_from_slice(&device, vk::BufferUsageFlags::empty(), &DATA)?;
380    /// // my_buf is mappable and filled with four zeroes
381    /// let data = Buffer::mapped_slice(&my_buf);
382    ///
383    /// assert_eq!(data.len(), 4);
384    /// assert_eq!(data[0], 0x00);
385    /// # Ok(()) }
386    /// ```
387    #[profiling::function]
388    pub fn mapped_slice(this: &Self) -> &[u8] {
389        debug_assert!(
390            this.info.mappable,
391            "Buffer is not mappable - create using mappable flag"
392        );
393
394        &this.allocation.mapped_slice().unwrap()[0..this.info.size as usize]
395    }
396
397    /// Returns a mapped mutable slice.
398    ///
399    /// # Panics
400    ///
401    /// Panics if the buffer was not created with the `mappable` flag set to `true`.
402    ///
403    /// # Examples
404    ///
405    /// Basic usage:
406    ///
407    /// ```no_run
408    /// # use std::sync::Arc;
409    /// # use ash::vk;
410    /// # use glam::Mat4;
411    /// # use screen_13::driver::DriverError;
412    /// # use screen_13::driver::device::{Device, DeviceInfo};
413    /// # use screen_13::driver::buffer::{Buffer, BufferInfo};
414    /// # fn main() -> Result<(), DriverError> {
415    /// # let device = Arc::new(Device::create_headless(DeviceInfo::default())?);
416    /// # const DATA: [u8; 4] = [0; 4];
417    /// # let mut my_buf = Buffer::create_from_slice(&device, vk::BufferUsageFlags::empty(), &DATA)?;
418    /// let mut data = Buffer::mapped_slice_mut(&mut my_buf);
419    /// data.copy_from_slice(&42f32.to_be_bytes());
420    ///
421    /// assert_eq!(data.len(), 4);
422    /// assert_eq!(data[0], 0x42);
423    /// # Ok(()) }
424    /// ```
425    #[profiling::function]
426    pub fn mapped_slice_mut(this: &mut Self) -> &mut [u8] {
427        debug_assert!(
428            this.info.mappable,
429            "Buffer is not mappable - create using mappable flag"
430        );
431
432        &mut this.allocation.mapped_slice_mut().unwrap()[0..this.info.size as usize]
433    }
434}
435
436impl Debug for Buffer {
437    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
438        if let Some(name) = &self.name {
439            write!(f, "{} ({:?})", name, self.buffer)
440        } else {
441            write!(f, "{:?}", self.buffer)
442        }
443    }
444}
445
446impl Deref for Buffer {
447    type Target = vk::Buffer;
448
449    fn deref(&self) -> &Self::Target {
450        &self.buffer
451    }
452}
453
454impl Drop for Buffer {
455    #[profiling::function]
456    fn drop(&mut self) {
457        if panicking() {
458            return;
459        }
460
461        {
462            profiling::scope!("deallocate");
463
464            #[cfg_attr(not(feature = "parking_lot"), allow(unused_mut))]
465            let mut allocator = self.device.allocator.lock();
466
467            #[cfg(not(feature = "parking_lot"))]
468            let mut allocator = allocator.unwrap();
469
470            allocator.free(unsafe { ManuallyDrop::take(&mut self.allocation) })
471        }
472        .unwrap_or_else(|err| warn!("unable to free buffer allocation: {err}"));
473
474        unsafe {
475            self.device.destroy_buffer(self.buffer, None);
476        }
477    }
478}
479
480#[derive(Debug)]
481struct BufferAccess {
482    accesses: Vec<(AccessType, vk::DeviceSize)>,
483    size: vk::DeviceSize,
484}
485
486impl BufferAccess {
487    fn new(size: vk::DeviceSize) -> Self {
488        Self {
489            accesses: vec![(AccessType::Nothing, 0)],
490            size,
491        }
492    }
493}
494
495struct BufferAccessIter<T> {
496    access: AccessType,
497    access_range: BufferSubresourceRange,
498    buffer: T,
499    idx: usize,
500}
501
502impl<T> BufferAccessIter<T>
503where
504    T: DerefMut<Target = BufferAccess>,
505{
506    fn new(buffer: T, access: AccessType, access_range: BufferSubresourceRange) -> Self {
507        debug_assert!(access_range.start < access_range.end);
508        debug_assert!(access_range.end <= buffer.size);
509
510        #[cfg(debug_assertions)]
511        {
512            let access_start = |(_, access_start): &(AccessType, vk::DeviceSize)| *access_start;
513
514            assert_eq!(buffer.accesses.first().map(access_start), Some(0));
515            assert!(buffer.accesses.last().map(access_start).unwrap() < buffer.size);
516
517            // Custom is-sorted-by key to additionally check that all access starts are unique
518            let (mut prev_access, mut prev_start) = buffer.accesses.first().copied().unwrap();
519            for (next_access, next_start) in buffer.accesses.iter().skip(1).copied() {
520                debug_assert_ne!(prev_access, next_access);
521                debug_assert!(prev_start < next_start);
522
523                prev_access = next_access;
524                prev_start = next_start;
525            }
526        };
527
528        // The needle will always be odd, and the probe always even, the result will always be err
529        let needle = (access_range.start << 1) | 1;
530        let idx = buffer
531            .accesses
532            .binary_search_by(|(_, probe)| (probe << 1).cmp(&needle));
533
534        debug_assert!(idx.is_err());
535
536        let mut idx = unsafe { idx.unwrap_err_unchecked() };
537
538        // The first access will always be at start == 0, which is even, so idx cannot be 0
539        debug_assert_ne!(idx, 0);
540
541        idx -= 1;
542
543        Self {
544            access,
545            access_range,
546            buffer,
547            idx,
548        }
549    }
550}
551
552impl<T> Iterator for BufferAccessIter<T>
553where
554    T: DerefMut<Target = BufferAccess>,
555{
556    type Item = (AccessType, BufferSubresourceRange);
557
558    fn next(&mut self) -> Option<Self::Item> {
559        debug_assert!(self.access_range.start <= self.access_range.end);
560        debug_assert!(self.access_range.end <= self.buffer.size);
561
562        if self.access_range.start == self.access_range.end {
563            return None;
564        }
565
566        debug_assert!(self.buffer.accesses.get(self.idx).is_some());
567
568        let (access, access_start) = unsafe { *self.buffer.accesses.get_unchecked(self.idx) };
569        let access_end = self
570            .buffer
571            .accesses
572            .get(self.idx + 1)
573            .map(|(_, access_start)| *access_start)
574            .unwrap_or(self.buffer.size);
575        let mut access_range = self.access_range;
576
577        access_range.end = access_range.end.min(access_end);
578        self.access_range.start = access_range.end;
579
580        if access == self.access {
581            self.idx += 1;
582        } else if access_start < access_range.start {
583            if let Some((_, access_start)) = self
584                .buffer
585                .accesses
586                .get_mut(self.idx + 1)
587                .filter(|(access, _)| *access == self.access && access_end == access_range.end)
588            {
589                *access_start = access_range.start;
590                self.idx += 1;
591            } else {
592                self.idx += 1;
593                self.buffer
594                    .accesses
595                    .insert(self.idx, (self.access, access_range.start));
596
597                if access_end > access_range.end {
598                    self.buffer
599                        .accesses
600                        .insert(self.idx + 1, (access, access_range.end));
601                }
602
603                self.idx += 1;
604            }
605        } else if self.idx > 0 {
606            if self
607                .buffer
608                .accesses
609                .get(self.idx - 1)
610                .filter(|(access, _)| *access == self.access)
611                .is_some()
612            {
613                if access_end == access_range.end {
614                    self.buffer.accesses.remove(self.idx);
615
616                    if self
617                        .buffer
618                        .accesses
619                        .get(self.idx)
620                        .filter(|(access, _)| *access == self.access)
621                        .is_some()
622                    {
623                        self.buffer.accesses.remove(self.idx);
624                        self.idx -= 1;
625                    }
626                } else {
627                    debug_assert!(self.buffer.accesses.get(self.idx).is_some());
628
629                    let (_, access_start) =
630                        unsafe { self.buffer.accesses.get_unchecked_mut(self.idx) };
631                    *access_start = access_range.end;
632                }
633            } else if access_end == access_range.end {
634                debug_assert!(self.buffer.accesses.get(self.idx).is_some());
635
636                let (access, _) = unsafe { self.buffer.accesses.get_unchecked_mut(self.idx) };
637                *access = self.access;
638
639                if self
640                    .buffer
641                    .accesses
642                    .get(self.idx + 1)
643                    .filter(|(access, _)| *access == self.access)
644                    .is_some()
645                {
646                    self.buffer.accesses.remove(self.idx + 1);
647                } else {
648                    self.idx += 1;
649                }
650            } else {
651                if let Some((_, access_start)) = self.buffer.accesses.get_mut(self.idx) {
652                    *access_start = access_range.end;
653                }
654
655                self.buffer
656                    .accesses
657                    .insert(self.idx, (self.access, access_range.start));
658                self.idx += 2;
659            }
660        } else if let Some((_, access_start)) = self
661            .buffer
662            .accesses
663            .get_mut(1)
664            .filter(|(access, _)| *access == self.access && access_end == access_range.end)
665        {
666            *access_start = 0;
667            self.buffer.accesses.remove(0);
668        } else if access_end > access_range.end {
669            self.buffer.accesses.insert(0, (self.access, 0));
670
671            debug_assert!(self.buffer.accesses.get(1).is_some());
672
673            let (_, access_start) = unsafe { self.buffer.accesses.get_unchecked_mut(1) };
674            *access_start = access_range.end;
675        } else {
676            debug_assert!(!self.buffer.accesses.is_empty());
677
678            let (access, _) = unsafe { self.buffer.accesses.get_unchecked_mut(0) };
679            *access = self.access;
680
681            if self
682                .buffer
683                .accesses
684                .get(1)
685                .filter(|(access, _)| *access == self.access)
686                .is_some()
687            {
688                self.buffer.accesses.remove(1);
689            } else {
690                self.idx += 1;
691            }
692        }
693
694        Some((access, access_range))
695    }
696}
697
698/// Information used to create a [`Buffer`] instance.
699#[derive(Builder, Clone, Copy, Debug, Eq, Hash, PartialEq)]
700#[builder(
701    build_fn(private, name = "fallible_build", error = "BufferInfoBuilderError"),
702    derive(Clone, Copy, Debug),
703    pattern = "owned"
704)]
705#[non_exhaustive]
706pub struct BufferInfo {
707    /// Byte alignment of the base device address of the buffer.
708    ///
709    /// Must be a power of two.
710    #[builder(default = "1")]
711    pub alignment: vk::DeviceSize,
712
713    /// Specifies a buffer whose memory is host visible and may be mapped.
714    #[builder(default)]
715    pub mappable: bool,
716
717    /// Size in bytes of the buffer to be created.
718    pub size: vk::DeviceSize,
719
720    /// A bitmask of specifying allowed usages of the buffer.
721    #[builder(default)]
722    pub usage: vk::BufferUsageFlags,
723}
724
725impl BufferInfo {
726    /// Specifies a non-mappable buffer with the given `size` and `usage` values.
727    ///
728    /// Device-local memory (located on the GPU) is used.
729    #[inline(always)]
730    pub const fn device_mem(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfo {
731        BufferInfo {
732            alignment: 1,
733            mappable: false,
734            size,
735            usage,
736        }
737    }
738
739    /// Specifies a mappable buffer with the given `size` and `usage` values.
740    ///
741    /// Host-local memory (located in CPU-accesible RAM) is used.
742    ///
743    /// # Note
744    ///
745    /// For convenience the given usage value will be bitwise OR'd with
746    /// `TRANSFER_DST | TRANSFER_SRC`.
747    #[inline(always)]
748    pub const fn host_mem(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfo {
749        let usage = vk::BufferUsageFlags::from_raw(
750            usage.as_raw()
751                | vk::BufferUsageFlags::TRANSFER_DST.as_raw()
752                | vk::BufferUsageFlags::TRANSFER_SRC.as_raw(),
753        );
754
755        BufferInfo {
756            alignment: 1,
757            mappable: true,
758            size,
759            usage,
760        }
761    }
762
763    /// Specifies a non-mappable buffer with the given `size` and `usage` values.
764    #[allow(clippy::new_ret_no_self)]
765    #[deprecated = "Use BufferInfo::device_mem()"]
766    #[doc(hidden)]
767    pub fn new(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfoBuilder {
768        Self::device_mem(size, usage).to_builder()
769    }
770
771    /// Specifies a mappable buffer with the given `size` and `usage` values.
772    ///
773    /// # Note
774    ///
775    /// For convenience the given usage value will be bitwise OR'd with
776    /// `TRANSFER_DST | TRANSFER_SRC`.
777    #[deprecated = "Use BufferInfo::host_mem()"]
778    #[doc(hidden)]
779    pub fn new_mappable(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfoBuilder {
780        Self::host_mem(size, usage).to_builder()
781    }
782
783    /// Converts a `BufferInfo` into a `BufferInfoBuilder`.
784    #[inline(always)]
785    pub fn to_builder(self) -> BufferInfoBuilder {
786        BufferInfoBuilder {
787            alignment: Some(self.alignment),
788            mappable: Some(self.mappable),
789            size: Some(self.size),
790            usage: Some(self.usage),
791        }
792    }
793}
794
795impl BufferInfoBuilder {
796    /// Builds a new `BufferInfo`.
797    ///    
798    /// # Panics
799    ///
800    /// If any of the following values have not been set this function will panic:
801    ///
802    /// * `size`
803    ///
804    /// If `alignment` is not a power to two this function will panic.
805    #[inline(always)]
806    pub fn build(self) -> BufferInfo {
807        let res = match self.fallible_build() {
808            Err(BufferInfoBuilderError(err)) => panic!("{err}"),
809            Ok(info) => info,
810        };
811
812        assert_eq!(
813            res.alignment.count_ones(),
814            1,
815            "Alignment must be a power of two"
816        );
817
818        res
819    }
820}
821
822impl From<BufferInfoBuilder> for BufferInfo {
823    fn from(info: BufferInfoBuilder) -> Self {
824        info.build()
825    }
826}
827
828#[derive(Debug)]
829struct BufferInfoBuilderError(UninitializedFieldError);
830
831impl From<UninitializedFieldError> for BufferInfoBuilderError {
832    fn from(err: UninitializedFieldError) -> Self {
833        Self(err)
834    }
835}
836
837/// Specifies a range of buffer data.
838#[derive(Clone, Copy, Debug, PartialEq)]
839pub struct BufferSubresourceRange {
840    /// The start of range.
841    pub start: vk::DeviceSize,
842
843    /// The non-inclusive end of the range.
844    pub end: vk::DeviceSize,
845}
846
847impl BufferSubresourceRange {
848    #[cfg(test)]
849    pub(crate) fn intersects(self, other: Self) -> bool {
850        self.start < other.end && self.end > other.start
851    }
852}
853
854impl From<BufferInfo> for BufferSubresourceRange {
855    fn from(info: BufferInfo) -> Self {
856        Self {
857            start: 0,
858            end: info.size,
859        }
860    }
861}
862
863impl From<Range<vk::DeviceSize>> for BufferSubresourceRange {
864    fn from(range: Range<vk::DeviceSize>) -> Self {
865        Self {
866            start: range.start,
867            end: range.end,
868        }
869    }
870}
871
872impl From<Option<Range<vk::DeviceSize>>> for BufferSubresourceRange {
873    fn from(range: Option<Range<vk::DeviceSize>>) -> Self {
874        range.unwrap_or(0..vk::WHOLE_SIZE).into()
875    }
876}
877
878impl From<BufferSubresourceRange> for Range<vk::DeviceSize> {
879    fn from(subresource: BufferSubresourceRange) -> Self {
880        subresource.start..subresource.end
881    }
882}
883
884#[cfg(test)]
885mod tests {
886    use {
887        super::*,
888        rand::{Rng, SeedableRng, rngs::SmallRng},
889    };
890
891    type Info = BufferInfo;
892    type Builder = BufferInfoBuilder;
893
894    const FUZZ_COUNT: usize = 100_000;
895
896    #[test]
897    pub fn buffer_access() {
898        let mut buffer = BufferAccess::new(100);
899
900        {
901            let mut accesses = BufferAccessIter::new(
902                &mut buffer,
903                AccessType::TransferWrite,
904                buffer_subresource_range(0..10),
905            );
906
907            assert_eq!(accesses.buffer.accesses, vec![(AccessType::Nothing, 0)]);
908            assert_eq!(
909                accesses.next().unwrap(),
910                (AccessType::Nothing, buffer_subresource_range(0..10))
911            );
912            assert_eq!(
913                accesses.buffer.accesses,
914                vec![(AccessType::TransferWrite, 0), (AccessType::Nothing, 10)]
915            );
916            assert!(accesses.next().is_none());
917        }
918
919        {
920            let mut accesses = BufferAccessIter::new(
921                &mut buffer,
922                AccessType::TransferRead,
923                buffer_subresource_range(5..15),
924            );
925
926            assert_eq!(
927                accesses.buffer.accesses,
928                vec![(AccessType::TransferWrite, 0), (AccessType::Nothing, 10)]
929            );
930            assert_eq!(
931                accesses.next().unwrap(),
932                (AccessType::TransferWrite, buffer_subresource_range(5..10))
933            );
934            assert_eq!(
935                accesses.buffer.accesses,
936                vec![
937                    (AccessType::TransferWrite, 0),
938                    (AccessType::TransferRead, 5),
939                    (AccessType::Nothing, 10)
940                ]
941            );
942            assert_eq!(
943                accesses.next().unwrap(),
944                (AccessType::Nothing, buffer_subresource_range(10..15))
945            );
946            assert_eq!(
947                accesses.buffer.accesses,
948                vec![
949                    (AccessType::TransferWrite, 0),
950                    (AccessType::TransferRead, 5),
951                    (AccessType::Nothing, 15)
952                ]
953            );
954            assert!(accesses.next().is_none());
955        }
956
957        {
958            let mut accesses = BufferAccessIter::new(
959                &mut buffer,
960                AccessType::HostRead,
961                buffer_subresource_range(0..100),
962            );
963
964            assert_eq!(
965                accesses.buffer.accesses,
966                vec![
967                    (AccessType::TransferWrite, 0),
968                    (AccessType::TransferRead, 5),
969                    (AccessType::Nothing, 15)
970                ]
971            );
972            assert_eq!(
973                accesses.next().unwrap(),
974                (AccessType::TransferWrite, buffer_subresource_range(0..5))
975            );
976            assert_eq!(
977                accesses.buffer.accesses,
978                vec![
979                    (AccessType::HostRead, 0),
980                    (AccessType::TransferRead, 5),
981                    (AccessType::Nothing, 15)
982                ]
983            );
984            assert_eq!(
985                accesses.next().unwrap(),
986                (AccessType::TransferRead, buffer_subresource_range(5..15))
987            );
988            assert_eq!(
989                accesses.buffer.accesses,
990                vec![(AccessType::HostRead, 0), (AccessType::Nothing, 15)]
991            );
992            assert_eq!(
993                accesses.next().unwrap(),
994                (AccessType::Nothing, buffer_subresource_range(15..100))
995            );
996            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostRead, 0),]);
997            assert!(accesses.next().is_none());
998        }
999
1000        {
1001            let mut accesses = BufferAccessIter::new(
1002                &mut buffer,
1003                AccessType::HostWrite,
1004                buffer_subresource_range(0..100),
1005            );
1006
1007            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostRead, 0)]);
1008            assert_eq!(
1009                accesses.next().unwrap(),
1010                (AccessType::HostRead, buffer_subresource_range(0..100))
1011            );
1012            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1013            assert!(accesses.next().is_none());
1014        }
1015
1016        {
1017            let mut accesses = BufferAccessIter::new(
1018                &mut buffer,
1019                AccessType::HostWrite,
1020                buffer_subresource_range(0..100),
1021            );
1022
1023            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1024            assert_eq!(
1025                accesses.next().unwrap(),
1026                (AccessType::HostWrite, buffer_subresource_range(0..100))
1027            );
1028            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1029            assert!(accesses.next().is_none());
1030        }
1031
1032        {
1033            let mut accesses = BufferAccessIter::new(
1034                &mut buffer,
1035                AccessType::HostWrite,
1036                buffer_subresource_range(1..99),
1037            );
1038
1039            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1040            assert_eq!(
1041                accesses.next().unwrap(),
1042                (AccessType::HostWrite, buffer_subresource_range(1..99))
1043            );
1044            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1045            assert!(accesses.next().is_none());
1046        }
1047
1048        {
1049            let mut accesses = BufferAccessIter::new(
1050                &mut buffer,
1051                AccessType::HostRead,
1052                buffer_subresource_range(1..99),
1053            );
1054
1055            assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1056            assert_eq!(
1057                accesses.next().unwrap(),
1058                (AccessType::HostWrite, buffer_subresource_range(1..99))
1059            );
1060            assert_eq!(
1061                accesses.buffer.accesses,
1062                vec![
1063                    (AccessType::HostWrite, 0),
1064                    (AccessType::HostRead, 1),
1065                    (AccessType::HostWrite, 99)
1066                ]
1067            );
1068            assert!(accesses.next().is_none());
1069        }
1070
1071        {
1072            let mut accesses = BufferAccessIter::new(
1073                &mut buffer,
1074                AccessType::Nothing,
1075                buffer_subresource_range(0..100),
1076            );
1077
1078            assert_eq!(
1079                accesses.next().unwrap(),
1080                (AccessType::HostWrite, buffer_subresource_range(0..1))
1081            );
1082            assert_eq!(
1083                accesses.next().unwrap(),
1084                (AccessType::HostRead, buffer_subresource_range(1..99))
1085            );
1086            assert_eq!(
1087                accesses.next().unwrap(),
1088                (AccessType::HostWrite, buffer_subresource_range(99..100))
1089            );
1090            assert!(accesses.next().is_none());
1091        }
1092
1093        {
1094            let mut accesses = BufferAccessIter::new(
1095                &mut buffer,
1096                AccessType::AnyShaderWrite,
1097                buffer_subresource_range(0..100),
1098            );
1099
1100            assert_eq!(
1101                accesses.next().unwrap(),
1102                (AccessType::Nothing, buffer_subresource_range(0..100))
1103            );
1104            assert!(accesses.next().is_none());
1105        }
1106
1107        {
1108            let mut accesses = BufferAccessIter::new(
1109                &mut buffer,
1110                AccessType::AnyShaderReadOther,
1111                buffer_subresource_range(1..2),
1112            );
1113
1114            assert_eq!(
1115                accesses.next().unwrap(),
1116                (AccessType::AnyShaderWrite, buffer_subresource_range(1..2))
1117            );
1118            assert!(accesses.next().is_none());
1119        }
1120
1121        {
1122            let mut accesses = BufferAccessIter::new(
1123                &mut buffer,
1124                AccessType::AnyShaderReadOther,
1125                buffer_subresource_range(3..4),
1126            );
1127
1128            assert_eq!(
1129                accesses.next().unwrap(),
1130                (AccessType::AnyShaderWrite, buffer_subresource_range(3..4))
1131            );
1132            assert!(accesses.next().is_none());
1133        }
1134
1135        {
1136            let mut accesses = BufferAccessIter::new(
1137                &mut buffer,
1138                AccessType::Nothing,
1139                buffer_subresource_range(0..5),
1140            );
1141
1142            assert_eq!(
1143                accesses.next().unwrap(),
1144                (AccessType::AnyShaderWrite, buffer_subresource_range(0..1))
1145            );
1146            assert_eq!(
1147                accesses.next().unwrap(),
1148                (
1149                    AccessType::AnyShaderReadOther,
1150                    buffer_subresource_range(1..2)
1151                )
1152            );
1153            assert_eq!(
1154                accesses.next().unwrap(),
1155                (AccessType::AnyShaderWrite, buffer_subresource_range(2..3))
1156            );
1157            assert_eq!(
1158                accesses.next().unwrap(),
1159                (
1160                    AccessType::AnyShaderReadOther,
1161                    buffer_subresource_range(3..4)
1162                )
1163            );
1164            assert_eq!(
1165                accesses.next().unwrap(),
1166                (AccessType::AnyShaderWrite, buffer_subresource_range(4..5))
1167            );
1168            assert!(accesses.next().is_none());
1169        }
1170    }
1171
1172    #[test]
1173    pub fn buffer_access_basic() {
1174        let mut buffer = BufferAccess::new(5);
1175
1176        buffer.accesses = vec![
1177            (AccessType::ColorAttachmentRead, 0),
1178            (AccessType::AnyShaderWrite, 4),
1179        ];
1180
1181        {
1182            let mut accesses = BufferAccessIter::new(
1183                &mut buffer,
1184                AccessType::AnyShaderWrite,
1185                buffer_subresource_range(0..2),
1186            );
1187
1188            assert_eq!(
1189                accesses.next().unwrap(),
1190                (
1191                    AccessType::ColorAttachmentRead,
1192                    buffer_subresource_range(0..2)
1193                )
1194            );
1195            assert!(accesses.next().is_none());
1196        }
1197
1198        {
1199            let mut accesses = BufferAccessIter::new(
1200                &mut buffer,
1201                AccessType::HostWrite,
1202                buffer_subresource_range(0..5),
1203            );
1204
1205            assert_eq!(
1206                accesses.next().unwrap(),
1207                (AccessType::AnyShaderWrite, buffer_subresource_range(0..2))
1208            );
1209            assert_eq!(
1210                accesses.next().unwrap(),
1211                (
1212                    AccessType::ColorAttachmentRead,
1213                    buffer_subresource_range(2..4)
1214                )
1215            );
1216            assert_eq!(
1217                accesses.next().unwrap(),
1218                (AccessType::AnyShaderWrite, buffer_subresource_range(4..5))
1219            );
1220
1221            assert!(accesses.next().is_none());
1222        }
1223    }
1224
1225    fn buffer_access_fuzz(buffer_size: vk::DeviceSize) {
1226        static ACCESS_TYPES: &[AccessType] = &[
1227            AccessType::AnyShaderReadOther,
1228            AccessType::AnyShaderWrite,
1229            AccessType::ColorAttachmentRead,
1230            AccessType::ColorAttachmentWrite,
1231            AccessType::HostRead,
1232            AccessType::HostWrite,
1233            AccessType::Nothing,
1234        ];
1235
1236        let mut rng = SmallRng::seed_from_u64(42);
1237        let mut buffer = BufferAccess::new(buffer_size);
1238        let mut data = vec![AccessType::Nothing; buffer_size as usize];
1239
1240        for _ in 0..FUZZ_COUNT {
1241            let access = ACCESS_TYPES[rng.random_range(..ACCESS_TYPES.len())];
1242            let access_start = rng.random_range(..buffer_size);
1243            let access_end = rng.random_range(access_start + 1..=buffer_size);
1244
1245            // println!("{access:?} {access_start}..{access_end}");
1246
1247            let accesses = BufferAccessIter::new(
1248                &mut buffer,
1249                access,
1250                buffer_subresource_range(access_start..access_end),
1251            );
1252
1253            for (access, access_range) in accesses {
1254                // println!("\t{access:?} {}..{}", access_range.start, access_range.end);
1255                assert!(
1256                    data[access_range.start as usize..access_range.end as usize]
1257                        .iter()
1258                        .all(|data| *data == access),
1259                    "{:?}",
1260                    &data[access_range.start as usize..access_range.end as usize]
1261                );
1262            }
1263
1264            for data in &mut data[access_start as usize..access_end as usize] {
1265                *data = access;
1266            }
1267        }
1268    }
1269
1270    #[test]
1271    pub fn buffer_access_fuzz_small() {
1272        buffer_access_fuzz(5);
1273    }
1274
1275    #[test]
1276    pub fn buffer_access_fuzz_medium() {
1277        buffer_access_fuzz(101);
1278    }
1279
1280    #[test]
1281    pub fn buffer_access_fuzz_large() {
1282        buffer_access_fuzz(10_000);
1283    }
1284
1285    #[test]
1286    pub fn buffer_info() {
1287        let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1288        let builder = info.to_builder().build();
1289
1290        assert_eq!(info, builder);
1291    }
1292
1293    #[test]
1294    pub fn buffer_info_alignment() {
1295        let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1296
1297        assert_eq!(info.alignment, 1);
1298    }
1299
1300    #[test]
1301    pub fn buffer_info_builder() {
1302        let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1303        let builder = Builder::default().size(0).build();
1304
1305        assert_eq!(info, builder);
1306    }
1307
1308    #[test]
1309    #[should_panic(expected = "Alignment must be a power of two")]
1310    pub fn buffer_info_builder_alignment_0() {
1311        Builder::default().size(0).alignment(0).build();
1312    }
1313
1314    #[test]
1315    #[should_panic(expected = "Alignment must be a power of two")]
1316    pub fn buffer_info_builder_alignment_42() {
1317        Builder::default().size(0).alignment(42).build();
1318    }
1319
1320    #[test]
1321    pub fn buffer_info_builder_alignment_256() {
1322        let mut info = Info::device_mem(42, vk::BufferUsageFlags::empty());
1323        info.alignment = 256;
1324
1325        let builder = Builder::default().size(42).alignment(256).build();
1326
1327        assert_eq!(info, builder);
1328    }
1329
1330    #[test]
1331    #[should_panic(expected = "Field not initialized: size")]
1332    pub fn buffer_info_builder_uninit_size() {
1333        Builder::default().build();
1334    }
1335
1336    fn buffer_subresource_range(
1337        Range { start, end }: Range<vk::DeviceSize>,
1338    ) -> BufferSubresourceRange {
1339        BufferSubresourceRange { start, end }
1340    }
1341
1342    #[test]
1343    pub fn buffer_subresource_range_intersects() {
1344        use BufferSubresourceRange as B;
1345
1346        assert!(!B { start: 10, end: 20 }.intersects(B { start: 0, end: 5 }));
1347        assert!(!B { start: 10, end: 20 }.intersects(B { start: 5, end: 10 }));
1348        assert!(B { start: 10, end: 20 }.intersects(B { start: 10, end: 15 }));
1349        assert!(B { start: 10, end: 20 }.intersects(B { start: 15, end: 20 }));
1350        assert!(!B { start: 10, end: 20 }.intersects(B { start: 20, end: 25 }));
1351        assert!(!B { start: 10, end: 20 }.intersects(B { start: 25, end: 30 }));
1352
1353        assert!(!B { start: 5, end: 10 }.intersects(B { start: 10, end: 20 }));
1354        assert!(B { start: 5, end: 25 }.intersects(B { start: 10, end: 20 }));
1355        assert!(B { start: 5, end: 15 }.intersects(B { start: 10, end: 20 }));
1356        assert!(B { start: 10, end: 20 }.intersects(B { start: 10, end: 20 }));
1357        assert!(B { start: 11, end: 19 }.intersects(B { start: 10, end: 20 }));
1358        assert!(B { start: 15, end: 25 }.intersects(B { start: 10, end: 20 }));
1359        assert!(!B { start: 20, end: 25 }.intersects(B { start: 10, end: 20 }));
1360    }
1361}