1use {
4 super::{DriverError, device::Device},
5 ash::vk,
6 derive_builder::{Builder, UninitializedFieldError},
7 gpu_allocator::{
8 MemoryLocation,
9 vulkan::{Allocation, AllocationCreateDesc, AllocationScheme},
10 },
11 log::trace,
12 log::warn,
13 std::{
14 fmt::{Debug, Formatter},
15 mem::ManuallyDrop,
16 ops::{Deref, DerefMut, Range},
17 sync::Arc,
18 thread::panicking,
19 },
20 vk_sync::AccessType,
21};
22
23#[cfg(feature = "parking_lot")]
24use parking_lot::Mutex;
25
26#[cfg(not(feature = "parking_lot"))]
27use std::sync::Mutex;
28
29pub struct Buffer {
58 accesses: Mutex<BufferAccess>,
59 allocation: ManuallyDrop<Allocation>,
60 buffer: vk::Buffer,
61 device: Arc<Device>,
62
63 pub info: BufferInfo,
65
66 pub name: Option<String>,
68}
69
70impl Buffer {
71 #[profiling::function]
94 pub fn create(device: &Arc<Device>, info: impl Into<BufferInfo>) -> Result<Self, DriverError> {
95 let info = info.into();
96
97 trace!("create: {:?}", info);
98
99 debug_assert_ne!(info.size, 0, "Size must be non-zero");
100
101 let device = Arc::clone(device);
102 let buffer_info = vk::BufferCreateInfo::default()
103 .size(info.size)
104 .usage(info.usage)
105 .sharing_mode(vk::SharingMode::CONCURRENT)
106 .queue_family_indices(&device.physical_device.queue_family_indices);
107 let buffer = unsafe {
108 device.create_buffer(&buffer_info, None).map_err(|err| {
109 warn!("{err}");
110
111 DriverError::Unsupported
112 })?
113 };
114 let mut requirements = unsafe { device.get_buffer_memory_requirements(buffer) };
115 requirements.alignment = requirements.alignment.max(info.alignment);
116
117 let memory_location = if info.mappable {
118 MemoryLocation::CpuToGpu
119 } else {
120 MemoryLocation::GpuOnly
121 };
122 let allocation = {
123 profiling::scope!("allocate");
124
125 #[cfg_attr(not(feature = "parking_lot"), allow(unused_mut))]
126 let mut allocator = device.allocator.lock();
127
128 #[cfg(not(feature = "parking_lot"))]
129 let mut allocator = allocator.unwrap();
130
131 allocator
132 .allocate(&AllocationCreateDesc {
133 name: "buffer",
134 requirements,
135 location: memory_location,
136 linear: true, allocation_scheme: AllocationScheme::GpuAllocatorManaged,
138 })
139 .map_err(|err| {
140 warn!("{err}");
141
142 DriverError::Unsupported
143 })
144 }?;
145
146 unsafe {
148 device
149 .bind_buffer_memory(buffer, allocation.memory(), allocation.offset())
150 .map_err(|err| {
151 warn!("{err}");
152
153 DriverError::Unsupported
154 })?
155 };
156
157 Ok(Self {
158 accesses: Mutex::new(BufferAccess::new(info.size)),
159 allocation: ManuallyDrop::new(allocation),
160 buffer,
161 device,
162 info,
163 name: None,
164 })
165 }
166
167 #[profiling::function]
190 pub fn create_from_slice(
191 device: &Arc<Device>,
192 usage: vk::BufferUsageFlags,
193 slice: impl AsRef<[u8]>,
194 ) -> Result<Self, DriverError> {
195 let slice = slice.as_ref();
196 let info = BufferInfo::host_mem(slice.len() as _, usage);
197 let mut buffer = Self::create(device, info)?;
198
199 Self::copy_from_slice(&mut buffer, 0, slice);
200
201 Ok(buffer)
202 }
203
204 #[profiling::function]
254 pub fn access(
255 this: &Self,
256 access: AccessType,
257 access_range: impl Into<BufferSubresourceRange>,
258 ) -> impl Iterator<Item = (AccessType, BufferSubresourceRange)> + '_ {
259 let mut access_range: BufferSubresourceRange = access_range.into();
260
261 if access_range.end == vk::WHOLE_SIZE {
262 access_range.end = this.info.size;
263 }
264
265 let accesses = this.accesses.lock();
266
267 #[cfg(not(feature = "parking_lot"))]
268 let accesses = accesses.unwrap();
269
270 BufferAccessIter::new(accesses, access, access_range)
271 }
272
273 #[profiling::function]
300 pub fn copy_from_slice(this: &mut Self, offset: vk::DeviceSize, slice: impl AsRef<[u8]>) {
301 let slice = slice.as_ref();
302 Self::mapped_slice_mut(this)[offset as _..offset as usize + slice.len()]
303 .copy_from_slice(slice);
304 }
305
306 #[profiling::function]
332 pub fn device_address(this: &Self) -> vk::DeviceAddress {
333 debug_assert!(
334 this.info
335 .usage
336 .contains(vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS)
337 );
338
339 unsafe {
340 this.device.get_buffer_device_address(
341 &vk::BufferDeviceAddressInfo::default().buffer(this.buffer),
342 )
343 }
344 }
345
346 #[profiling::function]
374 pub fn mapped_slice(this: &Self) -> &[u8] {
375 debug_assert!(
376 this.info.mappable,
377 "Buffer is not mappable - create using mappable flag"
378 );
379
380 &this.allocation.mapped_slice().unwrap()[0..this.info.size as usize]
381 }
382
383 #[profiling::function]
412 pub fn mapped_slice_mut(this: &mut Self) -> &mut [u8] {
413 debug_assert!(
414 this.info.mappable,
415 "Buffer is not mappable - create using mappable flag"
416 );
417
418 &mut this.allocation.mapped_slice_mut().unwrap()[0..this.info.size as usize]
419 }
420}
421
422impl Debug for Buffer {
423 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
424 if let Some(name) = &self.name {
425 write!(f, "{} ({:?})", name, self.buffer)
426 } else {
427 write!(f, "{:?}", self.buffer)
428 }
429 }
430}
431
432impl Deref for Buffer {
433 type Target = vk::Buffer;
434
435 fn deref(&self) -> &Self::Target {
436 &self.buffer
437 }
438}
439
440impl Drop for Buffer {
441 #[profiling::function]
442 fn drop(&mut self) {
443 if panicking() {
444 return;
445 }
446
447 {
448 profiling::scope!("deallocate");
449
450 #[cfg_attr(not(feature = "parking_lot"), allow(unused_mut))]
451 let mut allocator = self.device.allocator.lock();
452
453 #[cfg(not(feature = "parking_lot"))]
454 let mut allocator = allocator.unwrap();
455
456 allocator.free(unsafe { ManuallyDrop::take(&mut self.allocation) })
457 }
458 .unwrap_or_else(|_| warn!("Unable to free buffer allocation"));
459
460 unsafe {
461 self.device.destroy_buffer(self.buffer, None);
462 }
463 }
464}
465
466#[derive(Debug)]
467struct BufferAccess {
468 accesses: Vec<(AccessType, vk::DeviceSize)>,
469 size: vk::DeviceSize,
470}
471
472impl BufferAccess {
473 fn new(size: vk::DeviceSize) -> Self {
474 Self {
475 accesses: vec![(AccessType::Nothing, 0)],
476 size,
477 }
478 }
479}
480
481struct BufferAccessIter<T> {
482 access: AccessType,
483 access_range: BufferSubresourceRange,
484 buffer: T,
485 idx: usize,
486}
487
488impl<T> BufferAccessIter<T>
489where
490 T: DerefMut<Target = BufferAccess>,
491{
492 fn new(buffer: T, access: AccessType, access_range: BufferSubresourceRange) -> Self {
493 debug_assert!(access_range.start < access_range.end);
494 debug_assert!(access_range.end <= buffer.size);
495
496 #[cfg(debug_assertions)]
497 {
498 let access_start = |(_, access_start): &(AccessType, vk::DeviceSize)| *access_start;
499
500 assert_eq!(buffer.accesses.first().map(access_start), Some(0));
501 assert!(buffer.accesses.last().map(access_start).unwrap() < buffer.size);
502
503 let (mut prev_access, mut prev_start) = buffer.accesses.first().copied().unwrap();
505 for (next_access, next_start) in buffer.accesses.iter().skip(1).copied() {
506 debug_assert_ne!(prev_access, next_access);
507 debug_assert!(prev_start < next_start);
508
509 prev_access = next_access;
510 prev_start = next_start;
511 }
512 };
513
514 let needle = (access_range.start << 1) | 1;
516 let idx = buffer
517 .accesses
518 .binary_search_by(|(_, probe)| (probe << 1).cmp(&needle));
519
520 debug_assert!(idx.is_err());
521
522 let mut idx = unsafe { idx.unwrap_err_unchecked() };
523
524 debug_assert_ne!(idx, 0);
526
527 idx -= 1;
528
529 Self {
530 access,
531 access_range,
532 buffer,
533 idx,
534 }
535 }
536}
537
538impl<T> Iterator for BufferAccessIter<T>
539where
540 T: DerefMut<Target = BufferAccess>,
541{
542 type Item = (AccessType, BufferSubresourceRange);
543
544 fn next(&mut self) -> Option<Self::Item> {
545 debug_assert!(self.access_range.start <= self.access_range.end);
546 debug_assert!(self.access_range.end <= self.buffer.size);
547
548 if self.access_range.start == self.access_range.end {
549 return None;
550 }
551
552 debug_assert!(self.buffer.accesses.get(self.idx).is_some());
553
554 let (access, access_start) = unsafe { *self.buffer.accesses.get_unchecked(self.idx) };
555 let access_end = self
556 .buffer
557 .accesses
558 .get(self.idx + 1)
559 .map(|(_, access_start)| *access_start)
560 .unwrap_or(self.buffer.size);
561 let mut access_range = self.access_range;
562
563 access_range.end = access_range.end.min(access_end);
564 self.access_range.start = access_range.end;
565
566 if access == self.access {
567 self.idx += 1;
568 } else if access_start < access_range.start {
569 if let Some((_, access_start)) = self
570 .buffer
571 .accesses
572 .get_mut(self.idx + 1)
573 .filter(|(access, _)| *access == self.access && access_end == access_range.end)
574 {
575 *access_start = access_range.start;
576 self.idx += 1;
577 } else {
578 self.idx += 1;
579 self.buffer
580 .accesses
581 .insert(self.idx, (self.access, access_range.start));
582
583 if access_end > access_range.end {
584 self.buffer
585 .accesses
586 .insert(self.idx + 1, (access, access_range.end));
587 }
588
589 self.idx += 1;
590 }
591 } else if self.idx > 0 {
592 if self
593 .buffer
594 .accesses
595 .get(self.idx - 1)
596 .filter(|(access, _)| *access == self.access)
597 .is_some()
598 {
599 if access_end == access_range.end {
600 self.buffer.accesses.remove(self.idx);
601
602 if self
603 .buffer
604 .accesses
605 .get(self.idx)
606 .filter(|(access, _)| *access == self.access)
607 .is_some()
608 {
609 self.buffer.accesses.remove(self.idx);
610 self.idx -= 1;
611 }
612 } else {
613 debug_assert!(self.buffer.accesses.get(self.idx).is_some());
614
615 let (_, access_start) =
616 unsafe { self.buffer.accesses.get_unchecked_mut(self.idx) };
617 *access_start = access_range.end;
618 }
619 } else if access_end == access_range.end {
620 debug_assert!(self.buffer.accesses.get(self.idx).is_some());
621
622 let (access, _) = unsafe { self.buffer.accesses.get_unchecked_mut(self.idx) };
623 *access = self.access;
624
625 if self
626 .buffer
627 .accesses
628 .get(self.idx + 1)
629 .filter(|(access, _)| *access == self.access)
630 .is_some()
631 {
632 self.buffer.accesses.remove(self.idx + 1);
633 } else {
634 self.idx += 1;
635 }
636 } else {
637 if let Some((_, access_start)) = self.buffer.accesses.get_mut(self.idx) {
638 *access_start = access_range.end;
639 }
640
641 self.buffer
642 .accesses
643 .insert(self.idx, (self.access, access_range.start));
644 self.idx += 2;
645 }
646 } else if let Some((_, access_start)) = self
647 .buffer
648 .accesses
649 .get_mut(1)
650 .filter(|(access, _)| *access == self.access && access_end == access_range.end)
651 {
652 *access_start = 0;
653 self.buffer.accesses.remove(0);
654 } else if access_end > access_range.end {
655 self.buffer.accesses.insert(0, (self.access, 0));
656
657 debug_assert!(self.buffer.accesses.get(1).is_some());
658
659 let (_, access_start) = unsafe { self.buffer.accesses.get_unchecked_mut(1) };
660 *access_start = access_range.end;
661 } else {
662 debug_assert!(!self.buffer.accesses.is_empty());
663
664 let (access, _) = unsafe { self.buffer.accesses.get_unchecked_mut(0) };
665 *access = self.access;
666
667 if self
668 .buffer
669 .accesses
670 .get(1)
671 .filter(|(access, _)| *access == self.access)
672 .is_some()
673 {
674 self.buffer.accesses.remove(1);
675 } else {
676 self.idx += 1;
677 }
678 }
679
680 Some((access, access_range))
681 }
682}
683
684#[derive(Builder, Clone, Copy, Debug, Eq, Hash, PartialEq)]
686#[builder(
687 build_fn(private, name = "fallible_build", error = "BufferInfoBuilderError"),
688 derive(Clone, Copy, Debug),
689 pattern = "owned"
690)]
691#[non_exhaustive]
692pub struct BufferInfo {
693 #[builder(default = "1")]
697 pub alignment: vk::DeviceSize,
698
699 #[builder(default)]
701 pub mappable: bool,
702
703 pub size: vk::DeviceSize,
705
706 #[builder(default)]
708 pub usage: vk::BufferUsageFlags,
709}
710
711impl BufferInfo {
712 #[inline(always)]
716 pub const fn device_mem(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfo {
717 BufferInfo {
718 alignment: 1,
719 mappable: false,
720 size,
721 usage,
722 }
723 }
724
725 #[inline(always)]
734 pub const fn host_mem(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfo {
735 let usage = vk::BufferUsageFlags::from_raw(
736 usage.as_raw()
737 | vk::BufferUsageFlags::TRANSFER_DST.as_raw()
738 | vk::BufferUsageFlags::TRANSFER_SRC.as_raw(),
739 );
740
741 BufferInfo {
742 alignment: 1,
743 mappable: true,
744 size,
745 usage,
746 }
747 }
748
749 #[allow(clippy::new_ret_no_self)]
751 #[deprecated = "Use BufferInfo::device_mem()"]
752 #[doc(hidden)]
753 pub fn new(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfoBuilder {
754 Self::device_mem(size, usage).to_builder()
755 }
756
757 #[deprecated = "Use BufferInfo::host_mem()"]
764 #[doc(hidden)]
765 pub fn new_mappable(size: vk::DeviceSize, usage: vk::BufferUsageFlags) -> BufferInfoBuilder {
766 Self::host_mem(size, usage).to_builder()
767 }
768
769 #[inline(always)]
771 pub fn to_builder(self) -> BufferInfoBuilder {
772 BufferInfoBuilder {
773 alignment: Some(self.alignment),
774 mappable: Some(self.mappable),
775 size: Some(self.size),
776 usage: Some(self.usage),
777 }
778 }
779}
780
781impl BufferInfoBuilder {
782 #[inline(always)]
792 pub fn build(self) -> BufferInfo {
793 let res = match self.fallible_build() {
794 Err(BufferInfoBuilderError(err)) => panic!("{err}"),
795 Ok(info) => info,
796 };
797
798 assert_eq!(
799 res.alignment.count_ones(),
800 1,
801 "Alignment must be a power of two"
802 );
803
804 res
805 }
806}
807
808impl From<BufferInfoBuilder> for BufferInfo {
809 fn from(info: BufferInfoBuilder) -> Self {
810 info.build()
811 }
812}
813
814#[derive(Debug)]
815struct BufferInfoBuilderError(UninitializedFieldError);
816
817impl From<UninitializedFieldError> for BufferInfoBuilderError {
818 fn from(err: UninitializedFieldError) -> Self {
819 Self(err)
820 }
821}
822
823#[derive(Clone, Copy, Debug, PartialEq)]
825pub struct BufferSubresourceRange {
826 pub start: vk::DeviceSize,
828
829 pub end: vk::DeviceSize,
831}
832
833impl BufferSubresourceRange {
834 #[cfg(test)]
835 pub(crate) fn intersects(self, other: Self) -> bool {
836 self.start < other.end && self.end > other.start
837 }
838}
839
840impl From<BufferInfo> for BufferSubresourceRange {
841 fn from(info: BufferInfo) -> Self {
842 Self {
843 start: 0,
844 end: info.size,
845 }
846 }
847}
848
849impl From<Range<vk::DeviceSize>> for BufferSubresourceRange {
850 fn from(range: Range<vk::DeviceSize>) -> Self {
851 Self {
852 start: range.start,
853 end: range.end,
854 }
855 }
856}
857
858impl From<Option<Range<vk::DeviceSize>>> for BufferSubresourceRange {
859 fn from(range: Option<Range<vk::DeviceSize>>) -> Self {
860 range.unwrap_or(0..vk::WHOLE_SIZE).into()
861 }
862}
863
864impl From<BufferSubresourceRange> for Range<vk::DeviceSize> {
865 fn from(subresource: BufferSubresourceRange) -> Self {
866 subresource.start..subresource.end
867 }
868}
869
870#[cfg(test)]
871mod tests {
872 use {
873 super::*,
874 rand::{Rng, SeedableRng, rngs::SmallRng},
875 };
876
877 type Info = BufferInfo;
878 type Builder = BufferInfoBuilder;
879
880 const FUZZ_COUNT: usize = 100_000;
881
882 #[test]
883 pub fn buffer_access() {
884 let mut buffer = BufferAccess::new(100);
885
886 {
887 let mut accesses = BufferAccessIter::new(
888 &mut buffer,
889 AccessType::TransferWrite,
890 buffer_subresource_range(0..10),
891 );
892
893 assert_eq!(accesses.buffer.accesses, vec![(AccessType::Nothing, 0)]);
894 assert_eq!(
895 accesses.next().unwrap(),
896 (AccessType::Nothing, buffer_subresource_range(0..10))
897 );
898 assert_eq!(
899 accesses.buffer.accesses,
900 vec![(AccessType::TransferWrite, 0), (AccessType::Nothing, 10)]
901 );
902 assert!(accesses.next().is_none());
903 }
904
905 {
906 let mut accesses = BufferAccessIter::new(
907 &mut buffer,
908 AccessType::TransferRead,
909 buffer_subresource_range(5..15),
910 );
911
912 assert_eq!(
913 accesses.buffer.accesses,
914 vec![(AccessType::TransferWrite, 0), (AccessType::Nothing, 10)]
915 );
916 assert_eq!(
917 accesses.next().unwrap(),
918 (AccessType::TransferWrite, buffer_subresource_range(5..10))
919 );
920 assert_eq!(
921 accesses.buffer.accesses,
922 vec![
923 (AccessType::TransferWrite, 0),
924 (AccessType::TransferRead, 5),
925 (AccessType::Nothing, 10)
926 ]
927 );
928 assert_eq!(
929 accesses.next().unwrap(),
930 (AccessType::Nothing, buffer_subresource_range(10..15))
931 );
932 assert_eq!(
933 accesses.buffer.accesses,
934 vec![
935 (AccessType::TransferWrite, 0),
936 (AccessType::TransferRead, 5),
937 (AccessType::Nothing, 15)
938 ]
939 );
940 assert!(accesses.next().is_none());
941 }
942
943 {
944 let mut accesses = BufferAccessIter::new(
945 &mut buffer,
946 AccessType::HostRead,
947 buffer_subresource_range(0..100),
948 );
949
950 assert_eq!(
951 accesses.buffer.accesses,
952 vec![
953 (AccessType::TransferWrite, 0),
954 (AccessType::TransferRead, 5),
955 (AccessType::Nothing, 15)
956 ]
957 );
958 assert_eq!(
959 accesses.next().unwrap(),
960 (AccessType::TransferWrite, buffer_subresource_range(0..5))
961 );
962 assert_eq!(
963 accesses.buffer.accesses,
964 vec![
965 (AccessType::HostRead, 0),
966 (AccessType::TransferRead, 5),
967 (AccessType::Nothing, 15)
968 ]
969 );
970 assert_eq!(
971 accesses.next().unwrap(),
972 (AccessType::TransferRead, buffer_subresource_range(5..15))
973 );
974 assert_eq!(
975 accesses.buffer.accesses,
976 vec![(AccessType::HostRead, 0), (AccessType::Nothing, 15)]
977 );
978 assert_eq!(
979 accesses.next().unwrap(),
980 (AccessType::Nothing, buffer_subresource_range(15..100))
981 );
982 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostRead, 0),]);
983 assert!(accesses.next().is_none());
984 }
985
986 {
987 let mut accesses = BufferAccessIter::new(
988 &mut buffer,
989 AccessType::HostWrite,
990 buffer_subresource_range(0..100),
991 );
992
993 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostRead, 0)]);
994 assert_eq!(
995 accesses.next().unwrap(),
996 (AccessType::HostRead, buffer_subresource_range(0..100))
997 );
998 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
999 assert!(accesses.next().is_none());
1000 }
1001
1002 {
1003 let mut accesses = BufferAccessIter::new(
1004 &mut buffer,
1005 AccessType::HostWrite,
1006 buffer_subresource_range(0..100),
1007 );
1008
1009 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1010 assert_eq!(
1011 accesses.next().unwrap(),
1012 (AccessType::HostWrite, buffer_subresource_range(0..100))
1013 );
1014 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1015 assert!(accesses.next().is_none());
1016 }
1017
1018 {
1019 let mut accesses = BufferAccessIter::new(
1020 &mut buffer,
1021 AccessType::HostWrite,
1022 buffer_subresource_range(1..99),
1023 );
1024
1025 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1026 assert_eq!(
1027 accesses.next().unwrap(),
1028 (AccessType::HostWrite, buffer_subresource_range(1..99))
1029 );
1030 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1031 assert!(accesses.next().is_none());
1032 }
1033
1034 {
1035 let mut accesses = BufferAccessIter::new(
1036 &mut buffer,
1037 AccessType::HostRead,
1038 buffer_subresource_range(1..99),
1039 );
1040
1041 assert_eq!(accesses.buffer.accesses, vec![(AccessType::HostWrite, 0)]);
1042 assert_eq!(
1043 accesses.next().unwrap(),
1044 (AccessType::HostWrite, buffer_subresource_range(1..99))
1045 );
1046 assert_eq!(
1047 accesses.buffer.accesses,
1048 vec![
1049 (AccessType::HostWrite, 0),
1050 (AccessType::HostRead, 1),
1051 (AccessType::HostWrite, 99)
1052 ]
1053 );
1054 assert!(accesses.next().is_none());
1055 }
1056
1057 {
1058 let mut accesses = BufferAccessIter::new(
1059 &mut buffer,
1060 AccessType::Nothing,
1061 buffer_subresource_range(0..100),
1062 );
1063
1064 assert_eq!(
1065 accesses.next().unwrap(),
1066 (AccessType::HostWrite, buffer_subresource_range(0..1))
1067 );
1068 assert_eq!(
1069 accesses.next().unwrap(),
1070 (AccessType::HostRead, buffer_subresource_range(1..99))
1071 );
1072 assert_eq!(
1073 accesses.next().unwrap(),
1074 (AccessType::HostWrite, buffer_subresource_range(99..100))
1075 );
1076 assert!(accesses.next().is_none());
1077 }
1078
1079 {
1080 let mut accesses = BufferAccessIter::new(
1081 &mut buffer,
1082 AccessType::AnyShaderWrite,
1083 buffer_subresource_range(0..100),
1084 );
1085
1086 assert_eq!(
1087 accesses.next().unwrap(),
1088 (AccessType::Nothing, buffer_subresource_range(0..100))
1089 );
1090 assert!(accesses.next().is_none());
1091 }
1092
1093 {
1094 let mut accesses = BufferAccessIter::new(
1095 &mut buffer,
1096 AccessType::AnyShaderReadOther,
1097 buffer_subresource_range(1..2),
1098 );
1099
1100 assert_eq!(
1101 accesses.next().unwrap(),
1102 (AccessType::AnyShaderWrite, buffer_subresource_range(1..2))
1103 );
1104 assert!(accesses.next().is_none());
1105 }
1106
1107 {
1108 let mut accesses = BufferAccessIter::new(
1109 &mut buffer,
1110 AccessType::AnyShaderReadOther,
1111 buffer_subresource_range(3..4),
1112 );
1113
1114 assert_eq!(
1115 accesses.next().unwrap(),
1116 (AccessType::AnyShaderWrite, buffer_subresource_range(3..4))
1117 );
1118 assert!(accesses.next().is_none());
1119 }
1120
1121 {
1122 let mut accesses = BufferAccessIter::new(
1123 &mut buffer,
1124 AccessType::Nothing,
1125 buffer_subresource_range(0..5),
1126 );
1127
1128 assert_eq!(
1129 accesses.next().unwrap(),
1130 (AccessType::AnyShaderWrite, buffer_subresource_range(0..1))
1131 );
1132 assert_eq!(
1133 accesses.next().unwrap(),
1134 (
1135 AccessType::AnyShaderReadOther,
1136 buffer_subresource_range(1..2)
1137 )
1138 );
1139 assert_eq!(
1140 accesses.next().unwrap(),
1141 (AccessType::AnyShaderWrite, buffer_subresource_range(2..3))
1142 );
1143 assert_eq!(
1144 accesses.next().unwrap(),
1145 (
1146 AccessType::AnyShaderReadOther,
1147 buffer_subresource_range(3..4)
1148 )
1149 );
1150 assert_eq!(
1151 accesses.next().unwrap(),
1152 (AccessType::AnyShaderWrite, buffer_subresource_range(4..5))
1153 );
1154 assert!(accesses.next().is_none());
1155 }
1156 }
1157
1158 #[test]
1159 pub fn buffer_access_basic() {
1160 let mut buffer = BufferAccess::new(5);
1161
1162 buffer.accesses = vec![
1163 (AccessType::ColorAttachmentRead, 0),
1164 (AccessType::AnyShaderWrite, 4),
1165 ];
1166
1167 {
1168 let mut accesses = BufferAccessIter::new(
1169 &mut buffer,
1170 AccessType::AnyShaderWrite,
1171 buffer_subresource_range(0..2),
1172 );
1173
1174 assert_eq!(
1175 accesses.next().unwrap(),
1176 (
1177 AccessType::ColorAttachmentRead,
1178 buffer_subresource_range(0..2)
1179 )
1180 );
1181 assert!(accesses.next().is_none());
1182 }
1183
1184 {
1185 let mut accesses = BufferAccessIter::new(
1186 &mut buffer,
1187 AccessType::HostWrite,
1188 buffer_subresource_range(0..5),
1189 );
1190
1191 assert_eq!(
1192 accesses.next().unwrap(),
1193 (AccessType::AnyShaderWrite, buffer_subresource_range(0..2))
1194 );
1195 assert_eq!(
1196 accesses.next().unwrap(),
1197 (
1198 AccessType::ColorAttachmentRead,
1199 buffer_subresource_range(2..4)
1200 )
1201 );
1202 assert_eq!(
1203 accesses.next().unwrap(),
1204 (AccessType::AnyShaderWrite, buffer_subresource_range(4..5))
1205 );
1206
1207 assert!(accesses.next().is_none());
1208 }
1209 }
1210
1211 fn buffer_access_fuzz(buffer_size: vk::DeviceSize) {
1212 static ACCESS_TYPES: &[AccessType] = &[
1213 AccessType::AnyShaderReadOther,
1214 AccessType::AnyShaderWrite,
1215 AccessType::ColorAttachmentRead,
1216 AccessType::ColorAttachmentWrite,
1217 AccessType::HostRead,
1218 AccessType::HostWrite,
1219 AccessType::Nothing,
1220 ];
1221
1222 let mut rng = SmallRng::seed_from_u64(42);
1223 let mut buffer = BufferAccess::new(buffer_size);
1224 let mut data = vec![AccessType::Nothing; buffer_size as usize];
1225
1226 for _ in 0..FUZZ_COUNT {
1227 let access = ACCESS_TYPES[rng.random_range(..ACCESS_TYPES.len())];
1228 let access_start = rng.random_range(..buffer_size);
1229 let access_end = rng.random_range(access_start + 1..=buffer_size);
1230
1231 let accesses = BufferAccessIter::new(
1234 &mut buffer,
1235 access,
1236 buffer_subresource_range(access_start..access_end),
1237 );
1238
1239 for (access, access_range) in accesses {
1240 assert!(
1242 data[access_range.start as usize..access_range.end as usize]
1243 .iter()
1244 .all(|data| *data == access),
1245 "{:?}",
1246 &data[access_range.start as usize..access_range.end as usize]
1247 );
1248 }
1249
1250 for data in &mut data[access_start as usize..access_end as usize] {
1251 *data = access;
1252 }
1253 }
1254 }
1255
1256 #[test]
1257 pub fn buffer_access_fuzz_small() {
1258 buffer_access_fuzz(5);
1259 }
1260
1261 #[test]
1262 pub fn buffer_access_fuzz_medium() {
1263 buffer_access_fuzz(101);
1264 }
1265
1266 #[test]
1267 pub fn buffer_access_fuzz_large() {
1268 buffer_access_fuzz(10_000);
1269 }
1270
1271 #[test]
1272 pub fn buffer_info() {
1273 let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1274 let builder = info.to_builder().build();
1275
1276 assert_eq!(info, builder);
1277 }
1278
1279 #[test]
1280 pub fn buffer_info_alignment() {
1281 let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1282
1283 assert_eq!(info.alignment, 1);
1284 }
1285
1286 #[test]
1287 pub fn buffer_info_builder() {
1288 let info = Info::device_mem(0, vk::BufferUsageFlags::empty());
1289 let builder = Builder::default().size(0).build();
1290
1291 assert_eq!(info, builder);
1292 }
1293
1294 #[test]
1295 #[should_panic(expected = "Alignment must be a power of two")]
1296 pub fn buffer_info_builder_alignment_0() {
1297 Builder::default().size(0).alignment(0).build();
1298 }
1299
1300 #[test]
1301 #[should_panic(expected = "Alignment must be a power of two")]
1302 pub fn buffer_info_builder_alignment_42() {
1303 Builder::default().size(0).alignment(42).build();
1304 }
1305
1306 #[test]
1307 pub fn buffer_info_builder_alignment_256() {
1308 let mut info = Info::device_mem(42, vk::BufferUsageFlags::empty());
1309 info.alignment = 256;
1310
1311 let builder = Builder::default().size(42).alignment(256).build();
1312
1313 assert_eq!(info, builder);
1314 }
1315
1316 #[test]
1317 #[should_panic(expected = "Field not initialized: size")]
1318 pub fn buffer_info_builder_uninit_size() {
1319 Builder::default().build();
1320 }
1321
1322 fn buffer_subresource_range(
1323 Range { start, end }: Range<vk::DeviceSize>,
1324 ) -> BufferSubresourceRange {
1325 BufferSubresourceRange { start, end }
1326 }
1327
1328 #[test]
1329 pub fn buffer_subresource_range_intersects() {
1330 use BufferSubresourceRange as B;
1331
1332 assert!(!B { start: 10, end: 20 }.intersects(B { start: 0, end: 5 }));
1333 assert!(!B { start: 10, end: 20 }.intersects(B { start: 5, end: 10 }));
1334 assert!(B { start: 10, end: 20 }.intersects(B { start: 10, end: 15 }));
1335 assert!(B { start: 10, end: 20 }.intersects(B { start: 15, end: 20 }));
1336 assert!(!B { start: 10, end: 20 }.intersects(B { start: 20, end: 25 }));
1337 assert!(!B { start: 10, end: 20 }.intersects(B { start: 25, end: 30 }));
1338
1339 assert!(!B { start: 5, end: 10 }.intersects(B { start: 10, end: 20 }));
1340 assert!(B { start: 5, end: 25 }.intersects(B { start: 10, end: 20 }));
1341 assert!(B { start: 5, end: 15 }.intersects(B { start: 10, end: 20 }));
1342 assert!(B { start: 10, end: 20 }.intersects(B { start: 10, end: 20 }));
1343 assert!(B { start: 11, end: 19 }.intersects(B { start: 10, end: 20 }));
1344 assert!(B { start: 15, end: 25 }.intersects(B { start: 10, end: 20 }));
1345 assert!(!B { start: 20, end: 25 }.intersects(B { start: 10, end: 20 }));
1346 }
1347}