1pub use self::{subbuffer::*, sys::*, usage::*};
73use crate::{
74 device::{physical::PhysicalDevice, Device, DeviceOwned},
75 macros::{vulkan_bitflags, vulkan_enum},
76 memory::{
77 allocator::{
78 AllocationCreateInfo, AllocationType, DeviceLayout, MemoryAllocator,
79 MemoryAllocatorError,
80 },
81 DedicatedAllocation, ExternalMemoryHandleType, ExternalMemoryHandleTypes,
82 ExternalMemoryProperties, MemoryRequirements, ResourceMemory,
83 },
84 range_map::RangeMap,
85 sync::{future::AccessError, AccessConflict, CurrentAccess, Sharing},
86 DeviceSize, NonNullDeviceAddress, Requires, RequiresAllOf, RequiresOneOf, Validated,
87 ValidationError, Version, VulkanError, VulkanObject,
88};
89use parking_lot::{Mutex, MutexGuard};
90use smallvec::SmallVec;
91use std::{
92 error::Error,
93 fmt::{Display, Formatter},
94 hash::{Hash, Hasher},
95 marker::PhantomData,
96 ops::Range,
97 sync::Arc,
98};
99
100pub mod allocator;
101pub mod subbuffer;
102pub mod sys;
103mod usage;
104pub mod view;
105
106#[derive(Debug)]
201pub struct Buffer {
202 inner: RawBuffer,
203 memory: BufferMemory,
204 state: Mutex<BufferState>,
205}
206
207#[derive(Debug)]
209#[non_exhaustive]
210pub enum BufferMemory {
211 Normal(ResourceMemory),
215
216 Sparse,
220
221 External,
223}
224
225impl Buffer {
226 pub fn from_data<T>(
238 allocator: Arc<dyn MemoryAllocator>,
239 create_info: BufferCreateInfo,
240 allocation_info: AllocationCreateInfo,
241 data: T,
242 ) -> Result<Subbuffer<T>, Validated<AllocateBufferError>>
243 where
244 T: BufferContents,
245 {
246 let buffer = Buffer::new_sized(allocator, create_info, allocation_info)?;
247
248 {
249 let mut write_guard = buffer.write().unwrap();
250 *write_guard = data;
251 }
252
253 Ok(buffer)
254 }
255
256 pub fn from_iter<T, I>(
269 allocator: Arc<dyn MemoryAllocator>,
270 create_info: BufferCreateInfo,
271 allocation_info: AllocationCreateInfo,
272 iter: I,
273 ) -> Result<Subbuffer<[T]>, Validated<AllocateBufferError>>
274 where
275 T: BufferContents,
276 I: IntoIterator<Item = T>,
277 I::IntoIter: ExactSizeIterator,
278 {
279 let iter = iter.into_iter();
280 let buffer = Buffer::new_slice(
281 allocator,
282 create_info,
283 allocation_info,
284 iter.len().try_into().unwrap(),
285 )?;
286
287 {
288 let mut write_guard = buffer.write().unwrap();
289
290 for (o, i) in write_guard.iter_mut().zip(iter) {
291 *o = i;
292 }
293 }
294
295 Ok(buffer)
296 }
297
298 pub fn new_sized<T>(
305 allocator: Arc<dyn MemoryAllocator>,
306 create_info: BufferCreateInfo,
307 allocation_info: AllocationCreateInfo,
308 ) -> Result<Subbuffer<T>, Validated<AllocateBufferError>>
309 where
310 T: BufferContents,
311 {
312 let layout = T::LAYOUT.unwrap_sized();
313 let buffer = Subbuffer::new(Buffer::new(
314 allocator,
315 create_info,
316 allocation_info,
317 layout,
318 )?);
319
320 Ok(unsafe { buffer.reinterpret_unchecked() })
321 }
322
323 pub fn new_slice<T>(
331 allocator: Arc<dyn MemoryAllocator>,
332 create_info: BufferCreateInfo,
333 allocation_info: AllocationCreateInfo,
334 len: DeviceSize,
335 ) -> Result<Subbuffer<[T]>, Validated<AllocateBufferError>>
336 where
337 T: BufferContents,
338 {
339 Buffer::new_unsized(allocator, create_info, allocation_info, len)
340 }
341
342 pub fn new_unsized<T>(
350 allocator: Arc<dyn MemoryAllocator>,
351 create_info: BufferCreateInfo,
352 allocation_info: AllocationCreateInfo,
353 len: DeviceSize,
354 ) -> Result<Subbuffer<T>, Validated<AllocateBufferError>>
355 where
356 T: BufferContents + ?Sized,
357 {
358 let layout = T::LAYOUT.layout_for_len(len).unwrap();
359 let buffer = Subbuffer::new(Buffer::new(
360 allocator,
361 create_info,
362 allocation_info,
363 layout,
364 )?);
365
366 Ok(unsafe { buffer.reinterpret_unchecked() })
367 }
368
369 pub fn new(
375 allocator: Arc<dyn MemoryAllocator>,
376 mut create_info: BufferCreateInfo,
377 allocation_info: AllocationCreateInfo,
378 layout: DeviceLayout,
379 ) -> Result<Arc<Self>, Validated<AllocateBufferError>> {
380 assert!(!create_info
381 .flags
382 .contains(BufferCreateFlags::SPARSE_BINDING));
383
384 assert_eq!(
385 create_info.size, 0,
386 "`Buffer::new*` functions set the `create_info.size` field themselves, you should not \
387 set it yourself"
388 );
389
390 create_info.size = layout.size();
391
392 let raw_buffer =
393 RawBuffer::new(allocator.device().clone(), create_info).map_err(|err| match err {
394 Validated::Error(err) => Validated::Error(AllocateBufferError::CreateBuffer(err)),
395 Validated::ValidationError(err) => err.into(),
396 })?;
397 let mut requirements = *raw_buffer.memory_requirements();
398 requirements.layout = requirements.layout.align_to(layout.alignment()).unwrap();
399
400 let allocation = allocator
401 .allocate(
402 requirements,
403 AllocationType::Linear,
404 allocation_info,
405 Some(DedicatedAllocation::Buffer(&raw_buffer)),
406 )
407 .map_err(AllocateBufferError::AllocateMemory)?;
408 let allocation = unsafe { ResourceMemory::from_allocation(allocator, allocation) };
409
410 let buffer = raw_buffer.bind_memory(allocation).map_err(|(err, _, _)| {
411 err.map(AllocateBufferError::BindMemory)
412 .map_validation(|err| err.add_context("RawBuffer::bind_memory"))
413 })?;
414
415 Ok(Arc::new(buffer))
416 }
417
418 fn from_raw(inner: RawBuffer, memory: BufferMemory) -> Self {
419 let state = Mutex::new(BufferState::new(inner.size()));
420
421 Buffer {
422 inner,
423 memory,
424 state,
425 }
426 }
427
428 #[inline]
430 pub fn memory(&self) -> &BufferMemory {
431 &self.memory
432 }
433
434 #[inline]
436 pub fn memory_requirements(&self) -> &MemoryRequirements {
437 self.inner.memory_requirements()
438 }
439
440 #[inline]
442 pub fn flags(&self) -> BufferCreateFlags {
443 self.inner.flags()
444 }
445
446 #[inline]
448 pub fn size(&self) -> DeviceSize {
449 self.inner.size()
450 }
451
452 #[inline]
454 pub fn usage(&self) -> BufferUsage {
455 self.inner.usage()
456 }
457
458 #[inline]
460 pub fn sharing(&self) -> &Sharing<SmallVec<[u32; 4]>> {
461 self.inner.sharing()
462 }
463
464 #[inline]
466 pub fn external_memory_handle_types(&self) -> ExternalMemoryHandleTypes {
467 self.inner.external_memory_handle_types()
468 }
469
470 pub fn device_address(&self) -> Result<NonNullDeviceAddress, Box<ValidationError>> {
473 self.validate_device_address()?;
474
475 Ok(unsafe { self.device_address_unchecked() })
476 }
477
478 fn validate_device_address(&self) -> Result<(), Box<ValidationError>> {
479 let device = self.device();
480
481 if !device.enabled_features().buffer_device_address {
482 return Err(Box::new(ValidationError {
483 requires_one_of: RequiresOneOf(&[RequiresAllOf(&[Requires::DeviceFeature(
484 "buffer_device_address",
485 )])]),
486 vuids: &["VUID-vkGetBufferDeviceAddress-bufferDeviceAddress-03324"],
487 ..Default::default()
488 }));
489 }
490
491 if !self.usage().intersects(BufferUsage::SHADER_DEVICE_ADDRESS) {
492 return Err(Box::new(ValidationError {
493 context: "self.usage()".into(),
494 problem: "does not contain `BufferUsage::SHADER_DEVICE_ADDRESS`".into(),
495 vuids: &["VUID-VkBufferDeviceAddressInfo-buffer-02601"],
496 ..Default::default()
497 }));
498 }
499
500 Ok(())
501 }
502
503 #[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
504 pub unsafe fn device_address_unchecked(&self) -> NonNullDeviceAddress {
505 let device = self.device();
506
507 let info_vk = ash::vk::BufferDeviceAddressInfo::default().buffer(self.handle());
508
509 let ptr = {
510 let fns = device.fns();
511 let func = if device.api_version() >= Version::V1_2 {
512 fns.v1_2.get_buffer_device_address
513 } else if device.enabled_extensions().khr_buffer_device_address {
514 fns.khr_buffer_device_address.get_buffer_device_address_khr
515 } else {
516 fns.ext_buffer_device_address.get_buffer_device_address_ext
517 };
518 unsafe { func(device.handle(), &info_vk) }
519 };
520
521 NonNullDeviceAddress::new(ptr).unwrap()
522 }
523
524 pub(crate) fn state(&self) -> MutexGuard<'_, BufferState> {
525 self.state.lock()
526 }
527}
528
529unsafe impl VulkanObject for Buffer {
530 type Handle = ash::vk::Buffer;
531
532 #[inline]
533 fn handle(&self) -> Self::Handle {
534 self.inner.handle()
535 }
536}
537
538unsafe impl DeviceOwned for Buffer {
539 #[inline]
540 fn device(&self) -> &Arc<Device> {
541 self.inner.device()
542 }
543}
544
545impl PartialEq for Buffer {
546 #[inline]
547 fn eq(&self, other: &Self) -> bool {
548 self.inner == other.inner
549 }
550}
551
552impl Eq for Buffer {}
553
554impl Hash for Buffer {
555 fn hash<H: Hasher>(&self, state: &mut H) {
556 self.inner.hash(state);
557 }
558}
559
560#[derive(Clone, Debug)]
562pub enum AllocateBufferError {
563 CreateBuffer(VulkanError),
564 AllocateMemory(MemoryAllocatorError),
565 BindMemory(VulkanError),
566}
567
568impl Error for AllocateBufferError {
569 fn source(&self) -> Option<&(dyn Error + 'static)> {
570 match self {
571 Self::CreateBuffer(err) => Some(err),
572 Self::AllocateMemory(err) => Some(err),
573 Self::BindMemory(err) => Some(err),
574 }
575 }
576}
577
578impl Display for AllocateBufferError {
579 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
580 match self {
581 Self::CreateBuffer(_) => write!(f, "creating the buffer failed"),
582 Self::AllocateMemory(_) => write!(f, "allocating memory for the buffer failed"),
583 Self::BindMemory(_) => write!(f, "binding memory to the buffer failed"),
584 }
585 }
586}
587
588impl From<AllocateBufferError> for Validated<AllocateBufferError> {
589 fn from(err: AllocateBufferError) -> Self {
590 Self::Error(err)
591 }
592}
593
594#[derive(Debug)]
596pub(crate) struct BufferState {
597 ranges: RangeMap<DeviceSize, BufferRangeState>,
598}
599
600impl BufferState {
601 fn new(size: DeviceSize) -> Self {
602 BufferState {
603 ranges: [(
604 0..size,
605 BufferRangeState {
606 current_access: CurrentAccess::Shared {
607 cpu_reads: 0,
608 gpu_reads: 0,
609 },
610 },
611 )]
612 .into_iter()
613 .collect(),
614 }
615 }
616
617 pub(crate) fn check_cpu_read(&self, range: Range<DeviceSize>) -> Result<(), AccessConflict> {
618 for (_range, state) in self.ranges.range(&range) {
619 match &state.current_access {
620 CurrentAccess::CpuExclusive { .. } => return Err(AccessConflict::HostWrite),
621 CurrentAccess::GpuExclusive { .. } => return Err(AccessConflict::DeviceWrite),
622 CurrentAccess::Shared { .. } => (),
623 }
624 }
625
626 Ok(())
627 }
628
629 pub(crate) unsafe fn cpu_read_lock(&mut self, range: Range<DeviceSize>) {
630 self.ranges.split_at(&range.start);
631 self.ranges.split_at(&range.end);
632
633 for (_range, state) in self.ranges.range_mut(&range) {
634 match &mut state.current_access {
635 CurrentAccess::Shared { cpu_reads, .. } => {
636 *cpu_reads += 1;
637 }
638 _ => unreachable!("Buffer is being written by the CPU or GPU"),
639 }
640 }
641 }
642
643 pub(crate) unsafe fn cpu_read_unlock(&mut self, range: Range<DeviceSize>) {
644 self.ranges.split_at(&range.start);
645 self.ranges.split_at(&range.end);
646
647 for (_range, state) in self.ranges.range_mut(&range) {
648 match &mut state.current_access {
649 CurrentAccess::Shared { cpu_reads, .. } => *cpu_reads -= 1,
650 _ => unreachable!("Buffer was not locked for CPU read"),
651 }
652 }
653 }
654
655 pub(crate) fn check_cpu_write(&self, range: Range<DeviceSize>) -> Result<(), AccessConflict> {
656 for (_range, state) in self.ranges.range(&range) {
657 match &state.current_access {
658 CurrentAccess::CpuExclusive => return Err(AccessConflict::HostWrite),
659 CurrentAccess::GpuExclusive { .. } => return Err(AccessConflict::DeviceWrite),
660 CurrentAccess::Shared {
661 cpu_reads: 0,
662 gpu_reads: 0,
663 } => (),
664 CurrentAccess::Shared { cpu_reads, .. } if *cpu_reads > 0 => {
665 return Err(AccessConflict::HostRead);
666 }
667 CurrentAccess::Shared { .. } => return Err(AccessConflict::DeviceRead),
668 }
669 }
670
671 Ok(())
672 }
673
674 pub(crate) unsafe fn cpu_write_lock(&mut self, range: Range<DeviceSize>) {
675 self.ranges.split_at(&range.start);
676 self.ranges.split_at(&range.end);
677
678 for (_range, state) in self.ranges.range_mut(&range) {
679 state.current_access = CurrentAccess::CpuExclusive;
680 }
681 }
682
683 pub(crate) unsafe fn cpu_write_unlock(&mut self, range: Range<DeviceSize>) {
684 self.ranges.split_at(&range.start);
685 self.ranges.split_at(&range.end);
686
687 for (_range, state) in self.ranges.range_mut(&range) {
688 match &mut state.current_access {
689 CurrentAccess::CpuExclusive => {
690 state.current_access = CurrentAccess::Shared {
691 cpu_reads: 0,
692 gpu_reads: 0,
693 }
694 }
695 _ => unreachable!("Buffer was not locked for CPU write"),
696 }
697 }
698 }
699
700 pub(crate) fn check_gpu_read(&self, range: Range<DeviceSize>) -> Result<(), AccessError> {
701 for (_range, state) in self.ranges.range(&range) {
702 match &state.current_access {
703 CurrentAccess::Shared { .. } => (),
704 _ => return Err(AccessError::AlreadyInUse),
705 }
706 }
707
708 Ok(())
709 }
710
711 pub(crate) unsafe fn gpu_read_lock(&mut self, range: Range<DeviceSize>) {
712 self.ranges.split_at(&range.start);
713 self.ranges.split_at(&range.end);
714
715 for (_range, state) in self.ranges.range_mut(&range) {
716 match &mut state.current_access {
717 CurrentAccess::GpuExclusive { gpu_reads, .. }
718 | CurrentAccess::Shared { gpu_reads, .. } => *gpu_reads += 1,
719 _ => unreachable!("Buffer is being written by the CPU"),
720 }
721 }
722 }
723
724 pub(crate) unsafe fn gpu_read_unlock(&mut self, range: Range<DeviceSize>) {
725 self.ranges.split_at(&range.start);
726 self.ranges.split_at(&range.end);
727
728 for (_range, state) in self.ranges.range_mut(&range) {
729 match &mut state.current_access {
730 CurrentAccess::GpuExclusive { gpu_reads, .. } => *gpu_reads -= 1,
731 CurrentAccess::Shared { gpu_reads, .. } => *gpu_reads -= 1,
732 _ => unreachable!("Buffer was not locked for GPU read"),
733 }
734 }
735 }
736
737 pub(crate) fn check_gpu_write(&self, range: Range<DeviceSize>) -> Result<(), AccessError> {
738 for (_range, state) in self.ranges.range(&range) {
739 match &state.current_access {
740 CurrentAccess::Shared {
741 cpu_reads: 0,
742 gpu_reads: 0,
743 } => (),
744 _ => return Err(AccessError::AlreadyInUse),
745 }
746 }
747
748 Ok(())
749 }
750
751 pub(crate) unsafe fn gpu_write_lock(&mut self, range: Range<DeviceSize>) {
752 self.ranges.split_at(&range.start);
753 self.ranges.split_at(&range.end);
754
755 for (_range, state) in self.ranges.range_mut(&range) {
756 match &mut state.current_access {
757 CurrentAccess::GpuExclusive { gpu_writes, .. } => *gpu_writes += 1,
758 &mut CurrentAccess::Shared {
759 cpu_reads: 0,
760 gpu_reads,
761 } => {
762 state.current_access = CurrentAccess::GpuExclusive {
763 gpu_reads,
764 gpu_writes: 1,
765 }
766 }
767 _ => unreachable!("Buffer is being accessed by the CPU"),
768 }
769 }
770 }
771
772 pub(crate) unsafe fn gpu_write_unlock(&mut self, range: Range<DeviceSize>) {
773 self.ranges.split_at(&range.start);
774 self.ranges.split_at(&range.end);
775
776 for (_range, state) in self.ranges.range_mut(&range) {
777 match &mut state.current_access {
778 &mut CurrentAccess::GpuExclusive {
779 gpu_reads,
780 gpu_writes: 1,
781 } => {
782 state.current_access = CurrentAccess::Shared {
783 cpu_reads: 0,
784 gpu_reads,
785 }
786 }
787 CurrentAccess::GpuExclusive { gpu_writes, .. } => *gpu_writes -= 1,
788 _ => unreachable!("Buffer was not locked for GPU write"),
789 }
790 }
791 }
792}
793
794#[derive(Clone, Copy, Debug, PartialEq, Eq)]
796struct BufferRangeState {
797 current_access: CurrentAccess,
798}
799
800vulkan_bitflags! {
801 #[non_exhaustive]
802
803 BufferCreateFlags = BufferCreateFlags(u32);
805
806 SPARSE_BINDING = SPARSE_BINDING,
815
816 SPARSE_RESIDENCY = SPARSE_RESIDENCY,
824
825 }
855
856#[derive(Clone, Debug, PartialEq, Eq, Hash)]
858pub struct ExternalBufferInfo {
859 pub flags: BufferCreateFlags,
861
862 pub usage: BufferUsage,
864
865 pub handle_type: ExternalMemoryHandleType,
867
868 pub _ne: crate::NonExhaustive,
869}
870
871impl ExternalBufferInfo {
872 #[inline]
874 pub fn handle_type(handle_type: ExternalMemoryHandleType) -> Self {
875 Self {
876 flags: BufferCreateFlags::empty(),
877 usage: BufferUsage::empty(),
878 handle_type,
879 _ne: crate::NonExhaustive(()),
880 }
881 }
882
883 pub(crate) fn validate(
884 &self,
885 physical_device: &PhysicalDevice,
886 ) -> Result<(), Box<ValidationError>> {
887 let &Self {
888 flags,
889 usage,
890 handle_type,
891 _ne: _,
892 } = self;
893
894 flags
895 .validate_physical_device(physical_device)
896 .map_err(|err| {
897 err.add_context("flags")
898 .set_vuids(&["VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter"])
899 })?;
900
901 usage
902 .validate_physical_device(physical_device)
903 .map_err(|err| {
904 err.add_context("usage")
905 .set_vuids(&["VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter"])
906 })?;
907
908 if usage.is_empty() {
909 return Err(Box::new(ValidationError {
910 context: "usage".into(),
911 problem: "is empty".into(),
912 vuids: &["VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask"],
913 ..Default::default()
914 }));
915 }
916
917 handle_type
918 .validate_physical_device(physical_device)
919 .map_err(|err| {
920 err.add_context("handle_type")
921 .set_vuids(&["VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter"])
922 })?;
923
924 Ok(())
925 }
926
927 pub(crate) fn to_vk(&self) -> ash::vk::PhysicalDeviceExternalBufferInfo<'static> {
928 let &Self {
929 flags,
930 usage,
931 handle_type,
932 _ne: _,
933 } = self;
934
935 ash::vk::PhysicalDeviceExternalBufferInfo::default()
936 .flags(flags.into())
937 .usage(usage.into())
938 .handle_type(handle_type.into())
939 }
940}
941
942#[derive(Clone, Debug)]
944#[non_exhaustive]
945pub struct ExternalBufferProperties {
946 pub external_memory_properties: ExternalMemoryProperties,
948}
949
950impl ExternalBufferProperties {
951 pub(crate) fn to_mut_vk() -> ash::vk::ExternalBufferProperties<'static> {
952 ash::vk::ExternalBufferProperties::default()
953 }
954
955 pub(crate) fn from_vk(val_vk: &ash::vk::ExternalBufferProperties<'_>) -> Self {
956 let &ash::vk::ExternalBufferProperties {
957 ref external_memory_properties,
958 ..
959 } = val_vk;
960
961 Self {
962 external_memory_properties: ExternalMemoryProperties::from_vk(
963 external_memory_properties,
964 ),
965 }
966 }
967}
968
969vulkan_enum! {
970 #[non_exhaustive]
971
972 IndexType = IndexType(i32);
974
975 U8 = UINT8_EXT
977 RequiresOneOf([
978 RequiresAllOf([DeviceExtension(ext_index_type_uint8)]),
979 ]),
980
981 U16 = UINT16,
983
984 U32 = UINT32,
986}
987
988impl IndexType {
989 #[inline]
991 pub fn size(self) -> DeviceSize {
992 match self {
993 IndexType::U8 => 1,
994 IndexType::U16 => 2,
995 IndexType::U32 => 4,
996 }
997 }
998}
999
1000#[derive(Clone, Debug)]
1002pub enum IndexBuffer {
1003 U8(Subbuffer<[u8]>),
1009
1010 U16(Subbuffer<[u16]>),
1012
1013 U32(Subbuffer<[u32]>),
1015}
1016
1017impl IndexBuffer {
1018 #[inline]
1020 pub fn index_type(&self) -> IndexType {
1021 match self {
1022 Self::U8(_) => IndexType::U8,
1023 Self::U16(_) => IndexType::U16,
1024 Self::U32(_) => IndexType::U32,
1025 }
1026 }
1027
1028 #[inline]
1030 pub fn as_bytes(&self) -> &Subbuffer<[u8]> {
1031 match self {
1032 IndexBuffer::U8(buffer) => buffer.as_bytes(),
1033 IndexBuffer::U16(buffer) => buffer.as_bytes(),
1034 IndexBuffer::U32(buffer) => buffer.as_bytes(),
1035 }
1036 }
1037
1038 #[inline]
1040 pub fn len(&self) -> DeviceSize {
1041 match self {
1042 IndexBuffer::U8(buffer) => buffer.len(),
1043 IndexBuffer::U16(buffer) => buffer.len(),
1044 IndexBuffer::U32(buffer) => buffer.len(),
1045 }
1046 }
1047}
1048
1049impl From<Subbuffer<[u8]>> for IndexBuffer {
1050 #[inline]
1051 fn from(value: Subbuffer<[u8]>) -> Self {
1052 Self::U8(value)
1053 }
1054}
1055
1056impl From<Subbuffer<[u16]>> for IndexBuffer {
1057 #[inline]
1058 fn from(value: Subbuffer<[u16]>) -> Self {
1059 Self::U16(value)
1060 }
1061}
1062
1063impl From<Subbuffer<[u32]>> for IndexBuffer {
1064 #[inline]
1065 fn from(value: Subbuffer<[u32]>) -> Self {
1066 Self::U32(value)
1067 }
1068}
1069
1070#[doc(hidden)]
1072pub struct AssertParamIsBufferContents<T: BufferContents + ?Sized>(PhantomData<T>);