vkobject_rs/
buffer.rs

1
2use crate::prelude::*;
3use std::{
4	any::Any,
5	ffi::c_void,
6	fmt::{self, Debug, Formatter},
7	mem::size_of,
8	ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
9	slice::from_raw_parts_mut,
10	sync::{
11		Arc,
12		RwLock,
13		RwLockWriteGuard,
14	},
15	vec::IntoIter,
16};
17use struct_iterable::Iterable;
18
19/// The Vulkan buffer object, same as the OpenGL buffer object, could be used to store vertices, elements(indices), and the other data.
20pub struct Buffer {
21	/// The `VulkanDevice` is the associated device
22	pub device: Arc<VulkanDevice>,
23
24	/// The buffer
25	pub buffer: Arc<VulkanBuffer>,
26
27	/// The device memory
28	pub memory: Arc<VulkanMemory>,
29
30	/// The usage of the buffer, not including `VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_SRC_BIT` and `VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_DST_BIT`
31	pub(crate) usage: VkBufferUsageFlags,
32
33	/// The staging buffer
34	pub staging_buffer: RwLock<Option<StagingBuffer>>,
35}
36
37impl Buffer {
38	/// Create a new buffer
39	/// * If `data` is `None`, `cmdbuf` could be `null()` because no `vkCmdCopyBuffer()` will be issued.
40	pub fn new(device: Arc<VulkanDevice>, size: VkDeviceSize, data: Option<*const c_void>, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
41		let buffer = Arc::new(VulkanBuffer::new(device.clone(), size, usage | VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_DST_BIT as VkBufferUsageFlags)?);
42		let memory = Arc::new(VulkanMemory::new(device.clone(), &buffer.get_memory_requirements()?,
43			VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT as VkMemoryPropertyFlags)?);
44		memory.bind_vk_buffer(buffer.get_vk_buffer())?;
45		let ret = Self {
46			device,
47			memory,
48			buffer,
49			usage,
50			staging_buffer: RwLock::new(None),
51		};
52		if let Some(data) = data {
53			unsafe {ret.set_staging_data(data, 0, size as usize)?};
54		}
55		Ok(ret)
56	}
57
58	/// Get the `VkBuffer`
59	pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
60		self.buffer.get_vk_buffer()
61	}
62
63	/// Create the staging buffer if not exist
64	pub fn ensure_staging_buffer<'a>(&'a self) -> Result<RwLockWriteGuard<'a, Option<StagingBuffer>>, VulkanError> {
65		let mut lock = self.staging_buffer.write().unwrap();
66		if lock.is_none() {
67			*lock = Some(StagingBuffer::new(self.device.clone(), self.buffer.get_size())?);
68		}
69		Ok(lock)
70	}
71
72	/// Discard the staging buffer to save memory
73	pub fn discard_staging_buffer(&self) {
74		let mut lock = self.staging_buffer.write().unwrap();
75		*lock = None;
76	}
77
78	/// Get the usage
79	pub fn get_usage(&self) -> VkBufferUsageFlags {
80		self.usage
81	}
82
83	/// Get the size
84	pub fn get_size(&self) -> VkDeviceSize {
85		self.buffer.get_size()
86	}
87
88	/// Map staging buffer as slice
89	pub fn map_staging_buffer_as_slice_locked<'a, T>(&'a self) -> Result<BufferMapGuard<'a, T>, VulkanError>
90	where
91		T: Sized + Clone + Copy {
92		BufferMapGuard::new(self.ensure_staging_buffer()?, self.get_size() as usize)
93	}
94
95	/// Get the address of the staging buffer memory data
96	pub fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
97		let lock = self.ensure_staging_buffer()?;
98		Ok(lock.as_ref().unwrap().get_address())
99	}
100
101	/// Update new data to the buffer
102	///
103	/// # Safety
104	///
105	/// You must provide a valid pointer `data`, otherwise the behavior of this function is undefined.
106	pub unsafe fn set_staging_data(&self, data: *const c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
107		let lock = self.ensure_staging_buffer()?;
108		lock.as_ref().unwrap().set_data(data, offset, size)?;
109		Ok(())
110	}
111
112	/// Retrieve the data from the staging buffer
113	///
114	/// # Safety
115	///
116	/// You must provide a valid pointer `data`, otherwise the behavior of this function is undefined.
117	pub unsafe fn get_staging_data(&self, data: *mut c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
118		let lock = self.staging_buffer.read().unwrap();
119		if let Some(ref staging_buffer) = *lock {
120			staging_buffer.get_data(data, offset, size)
121		} else {
122			Err(VulkanError::NoStagingBuffer)
123		}
124	}
125
126	/// Upload the data from the staging buffer
127	pub fn upload_staging_buffer(&self, cmdbuf: VkCommandBuffer, offset: VkDeviceSize, size: VkDeviceSize) -> Result<(), VulkanError> {
128		let lock = self.staging_buffer.read().unwrap();
129		if let Some(ref staging_buffer) = *lock {
130			let copy_region = VkBufferCopy {
131				srcOffset: offset,
132				dstOffset: offset,
133				size: size as VkDeviceSize,
134			};
135			self.device.vkcore.vkCmdCopyBuffer(cmdbuf, staging_buffer.get_vk_buffer(), self.buffer.get_vk_buffer(), 1, &copy_region)?;
136			Ok(())
137		} else {
138			Err(VulkanError::NoStagingBuffer)
139		}
140	}
141
142	/// Upload the data from the staging buffer
143	pub fn upload_staging_buffer_multi(&self, cmdbuf: VkCommandBuffer, regions: &[BufferRegion]) -> Result<(), VulkanError> {
144		let lock = self.staging_buffer.read().unwrap();
145		if let Some(ref staging_buffer) = *lock {
146			let copy_regions: Vec<VkBufferCopy> = regions.iter().map(|r|VkBufferCopy {
147				srcOffset: r.offset,
148				dstOffset: r.offset,
149				size: r.size as VkDeviceSize,
150			}).collect();
151			self.device.vkcore.vkCmdCopyBuffer(cmdbuf, staging_buffer.get_vk_buffer(), self.buffer.get_vk_buffer(), copy_regions.len() as u32, copy_regions.as_ptr())?;
152			Ok(())
153		} else {
154			Err(VulkanError::NoStagingBuffer)
155		}
156	}
157
158	/// Download the data to the staging buffer
159	pub fn download_staging_buffer(&self, cmdbuf: VkCommandBuffer, offset: VkDeviceSize, size: VkDeviceSize) -> Result<(), VulkanError> {
160		let lock = self.ensure_staging_buffer()?;
161		let copy_region = VkBufferCopy {
162			srcOffset: offset,
163			dstOffset: offset,
164			size: size as VkDeviceSize,
165		};
166		self.device.vkcore.vkCmdCopyBuffer(cmdbuf, self.buffer.get_vk_buffer(), lock.as_ref().unwrap().get_vk_buffer(), 1, &copy_region)?;
167		Ok(())
168	}
169
170	/// Download the data to the staging buffer
171	pub fn download_staging_buffer_multi(&self, cmdbuf: VkCommandBuffer, regions: &[BufferRegion]) -> Result<(), VulkanError> {
172		let lock = self.ensure_staging_buffer()?;
173		let copy_regions: Vec<VkBufferCopy> = regions.iter().map(|r|VkBufferCopy {
174			srcOffset: r.offset,
175			dstOffset: r.offset,
176			size: r.size as VkDeviceSize,
177		}).collect();
178		self.device.vkcore.vkCmdCopyBuffer(cmdbuf, self.buffer.get_vk_buffer(), lock.as_ref().unwrap().get_vk_buffer(), copy_regions.len() as u32, copy_regions.as_ptr())?;
179		Ok(())
180	}
181
182	/// Create a buffer view
183	pub fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError> {
184		VulkanBufferView::new(self.buffer.clone(), format)
185	}
186
187	/// Create a buffer view
188	pub fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError> {
189		VulkanBufferView::new_partial(self.buffer.clone(), range)
190	}
191}
192
193impl Clone for Buffer {
194	fn clone(&self) -> Self {
195		Self::new(self.device.clone(), self.get_size(), self.staging_buffer.read().unwrap().as_ref().map(|b|b.get_address() as *const _), self.usage).unwrap()
196	}
197}
198
199impl Debug for Buffer {
200	fn fmt(&self, f: &mut Formatter) -> fmt::Result {
201		f.debug_struct("Buffer")
202		.field("memory", &self.memory)
203		.field("buffer", &self.buffer)
204		.field("usage", &self.usage)
205		.field("staging_buffer", &self.staging_buffer)
206		.finish()
207	}
208}
209
210
211/// The typed map
212#[derive(Debug)]
213pub struct BufferMapGuard<'a, T>
214where
215	T: Sized + Clone + Copy {
216	/// The lock guard
217	lock_guard: RwLockWriteGuard<'a, Option<StagingBuffer>>,
218
219	/// The slice of items
220	slice: &'a mut [T],
221}
222
223impl<'a, T> BufferMapGuard<'a, T>
224where
225	T: Sized + Clone + Copy {
226	pub fn new(lock_guard: RwLockWriteGuard<'a, Option<StagingBuffer>>, size: usize) -> Result<Self, VulkanError> {
227		let address = lock_guard.as_ref().unwrap().get_address();
228		let len = size / size_of::<T>();
229		let slice = unsafe {from_raw_parts_mut(address as *mut T, len)};
230		Ok(Self {
231			lock_guard,
232			slice,
233		})
234	}
235
236	/// Operate the mapped memory as a slice
237	pub fn as_slice(&self) -> &[T] {
238		self.slice
239	}
240
241	/// Operate the mapped memory as a mutable slice
242	pub fn as_slice_mut(&mut self) -> &mut [T] {
243		self.slice
244	}
245}
246
247impl<'a, T> Index<usize> for BufferMapGuard<'a, T>
248where
249	T: Sized + Clone + Copy {
250	type Output = T;
251	fn index(&self, index: usize) -> &T {
252		&self.slice[index]
253	}
254}
255
256impl<'a, T> IndexMut<usize> for BufferMapGuard<'a, T>
257where
258	T: Sized + Clone + Copy {
259	fn index_mut(&mut self, index: usize) -> &mut T {
260		&mut self.slice[index]
261	}
262}
263
264impl<'a, T> Index<Range<usize>> for BufferMapGuard<'a, T>
265where
266	T: Sized + Clone + Copy {
267	type Output = [T];
268	fn index(&self, range: Range<usize>) -> &[T] {
269		&self.slice[range.start..range.end]
270	}
271}
272
273impl<'a, T> IndexMut<Range<usize>> for BufferMapGuard<'a, T>
274where
275	T: Sized + Clone + Copy {
276	fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
277		&mut self.slice[range.start..range.end]
278	}
279}
280
281impl<'a, T> Index<RangeFrom<usize>> for BufferMapGuard<'a, T>
282where
283	T: Sized + Clone + Copy {
284	type Output = [T];
285	fn index(&self, range: RangeFrom<usize>) -> &[T] {
286		&self.slice[range.start..]
287	}
288}
289
290impl<'a, T> IndexMut<RangeFrom<usize>> for BufferMapGuard<'a, T>
291where
292	T: Sized + Clone + Copy {
293	fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
294		&mut self.slice[range.start..]
295	}
296}
297
298impl<'a, T> Index<RangeTo<usize>> for BufferMapGuard<'a, T>
299where
300	T: Sized + Clone + Copy {
301	type Output = [T];
302	fn index(&self, range: RangeTo<usize>) -> &[T] {
303		&self.slice[..range.end]
304	}
305}
306
307impl<'a, T> IndexMut<RangeTo<usize>> for BufferMapGuard<'a, T>
308where
309	T: Sized + Clone + Copy {
310	fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
311		&mut self.slice[..range.end]
312	}
313}
314
315impl<'a, T> Index<RangeFull> for BufferMapGuard<'a, T>
316where
317	T: Sized + Clone + Copy {
318	type Output = [T];
319	fn index(&self, _: RangeFull) -> &[T] {
320		&self.slice[..]
321	}
322}
323
324impl<'a, T> IndexMut<RangeFull> for BufferMapGuard<'a, T>
325where
326	T: Sized + Clone + Copy {
327	fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
328		&mut self.slice[..]
329	}
330}
331
332impl<'a, T> Index<RangeInclusive<usize>> for BufferMapGuard<'a, T>
333where
334	T: Sized + Clone + Copy {
335	type Output = [T];
336	fn index(&self, range: RangeInclusive<usize>) -> &[T] {
337		&self.slice[*range.start()..=*range.end()]
338	}
339}
340
341impl<'a, T> IndexMut<RangeInclusive<usize>> for BufferMapGuard<'a, T>
342where
343	T: Sized + Clone + Copy {
344	fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
345		&mut self.slice[*range.start()..=*range.end()]
346	}
347}
348
349impl<'a, T> Index<RangeToInclusive<usize>> for BufferMapGuard<'a, T>
350where
351	T: Sized + Clone + Copy {
352	type Output = [T];
353	fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
354		&self.slice[..=range.end]
355	}
356}
357
358impl<'a, T> IndexMut<RangeToInclusive<usize>> for BufferMapGuard<'a, T>
359where
360	T: Sized + Clone + Copy {
361	fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
362		&mut self.slice[..=range.end]
363	}
364}
365
366/// The trait that the struct of uniform must implement
367pub trait UniformStructType: Copy + Clone + Sized + Default + Send + Sync + Debug + Iterable + Any {}
368impl<T> UniformStructType for T where T: Copy + Clone + Sized + Default + Send + Sync + Debug + Iterable + Any {}
369
370#[macro_export]
371macro_rules! derive_uniform_buffer_type {
372	($item: item) => {
373		#[repr(C)]
374		#[derive(Iterable, Default, Debug, Clone, Copy)]
375		$item
376	};
377}
378
379/// The uniform buffer
380#[derive(Debug, Clone)]
381pub struct UniformBuffer<U>
382where
383	U: UniformStructType {
384	/// The buffer
385	pub buffer: Buffer,
386
387	/// The iterable struct that holds the uniform struct type
388	iterable: U,
389}
390
391impl<U> UniformBuffer<U>
392where
393	U: UniformStructType {
394	/// Create the `UniformBuffer`
395	pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
396		let def = U::default();
397		let buffer = Buffer::new(device.clone(), size_of::<U>() as VkDeviceSize, Some(&def as *const U as *const c_void), VkBufferUsageFlagBits::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT as VkBufferUsageFlags)?;
398		Ok(Self {
399			buffer,
400			iterable: def,
401		})
402	}
403
404	/// Create the staging buffer if not exist
405	pub fn ensure_staging_buffer<'a>(&'a self) -> Result<RwLockWriteGuard<'a, Option<StagingBuffer>>, VulkanError> {
406		self.buffer.ensure_staging_buffer()
407	}
408
409	/// Discard the staging buffer to save memory
410	pub fn discard_staging_buffer(&self) {
411		self.buffer.discard_staging_buffer()
412	}
413
414	/// Get the address of the staging buffer memory data
415	pub fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
416		self.buffer.get_staging_buffer_address()
417	}
418
419	/// Flush to GPU
420	pub fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
421		self.buffer.upload_staging_buffer(cmdbuf, 0, self.buffer.get_size())
422	}
423}
424
425impl<U> AsRef<U> for UniformBuffer<U>
426where
427	U: UniformStructType {
428	fn as_ref(&self) -> &U {
429		unsafe{&*(self.get_staging_buffer_address().unwrap() as *const U)}
430	}
431}
432
433impl<U> AsMut<U> for UniformBuffer<U>
434where
435	U: UniformStructType {
436	fn as_mut(&mut self) -> &mut U {
437		unsafe{&mut *(self.get_staging_buffer_address().unwrap() as *mut U)}
438	}
439}
440
441unsafe impl<U> Send for UniformBuffer<U> where U: UniformStructType {}
442unsafe impl<U> Sync for UniformBuffer<U> where U: UniformStructType {}
443
444/// The trait for the `UniformBuffer` to be able to wrap into an object
445pub trait GenericUniformBuffer: IterableDataAttrib + Debug + Any + Send + Sync {
446	/// Get the `VkBuffer`
447	fn get_vk_buffer(&self) -> VkBuffer;
448
449	/// Get the size of the buffer
450	fn get_size(&self) -> VkDeviceSize;
451
452	/// Get the address of the staging buffer
453	fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError>;
454
455	/// Upload to GPU
456	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
457}
458
459impl<U> GenericUniformBuffer for UniformBuffer<U>
460where
461	U: UniformStructType {
462	fn get_vk_buffer(&self) -> VkBuffer {
463		self.buffer.get_vk_buffer()
464	}
465
466	fn get_size(&self) -> VkDeviceSize {
467		self.buffer.get_size()
468	}
469
470	fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
471		self.get_staging_buffer_address()
472	}
473
474	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
475		self.buffer.upload_staging_buffer(cmdbuf, 0, self.get_size() as VkDeviceSize)
476	}
477}
478
479impl<U> IterableDataAttrib for UniformBuffer<U>
480where
481	U: UniformStructType {
482	fn iter_members(&self) -> IntoIter<(&'static str, &dyn Any)> {
483		self.iterable.iter()
484	}
485}
486
487/// The trait for the `StorageBuffer` to be able to wrap into an object
488pub trait GenericStorageBuffer: IterableDataAttrib + Debug + Any + Send + Sync {
489	/// Get the `VkBuffer`
490	fn get_vk_buffer(&self) -> VkBuffer;
491
492	/// Get the size of the buffer
493	fn get_size(&self) -> VkDeviceSize;
494
495	/// Get the address of the staging buffer
496	fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError>;
497
498	/// Upload to GPU
499	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
500}
501
502/// The trait that the struct of uniform must implement
503pub trait StorageBufferStructType: Copy + Clone + Sized + Default + Send + Sync + Debug + Iterable + Any {}
504impl<T> StorageBufferStructType for T where T: Copy + Clone + Sized + Default + Send + Sync + Debug + Iterable + Any {}
505
506#[macro_export]
507macro_rules! derive_storage_buffer_type {
508	($item: item) => {
509		#[repr(C)]
510		#[derive(Iterable, Default, Debug, Clone, Copy)]
511		$item
512	};
513}
514
515/// The storage buffer
516#[derive(Debug, Clone)]
517pub struct StorageBuffer<S>
518where
519	S: StorageBufferStructType {
520	/// The buffer
521	pub buffer: Buffer,
522
523	/// The iterable struct that holds the storage buffer struct type
524	iterable: S,
525}
526
527impl<S> StorageBuffer<S>
528where
529	S: StorageBufferStructType {
530	/// Create the `StorageBuffer`
531	pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
532		let def = S::default();
533		let buffer = Buffer::new(device.clone(), size_of::<S>() as VkDeviceSize, Some(&def as *const S as *const c_void), VkBufferUsageFlagBits::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT as VkBufferUsageFlags)?;
534		Ok(Self {
535			buffer,
536			iterable: def,
537		})
538	}
539
540	/// Create the staging buffer if not exist
541	pub fn ensure_staging_buffer<'a>(&'a self) -> Result<RwLockWriteGuard<'a, Option<StagingBuffer>>, VulkanError> {
542		self.buffer.ensure_staging_buffer()
543	}
544
545	/// Discard the staging buffer to save memory
546	pub fn discard_staging_buffer(&self) {
547		self.buffer.discard_staging_buffer()
548	}
549
550	/// Get the address of the staging buffer memory data
551	pub fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
552		self.buffer.get_staging_buffer_address()
553	}
554
555	/// Flush to GPU
556	pub fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
557		self.buffer.upload_staging_buffer(cmdbuf, 0, self.buffer.get_size())
558	}
559}
560
561impl<S> AsRef<S> for StorageBuffer<S>
562where
563	S: StorageBufferStructType {
564	fn as_ref(&self) -> &S {
565		unsafe{&*(self.get_staging_buffer_address().unwrap() as *const S)}
566	}
567}
568
569impl<S> AsMut<S> for StorageBuffer<S>
570where
571	S: StorageBufferStructType {
572	fn as_mut(&mut self) -> &mut S {
573		unsafe{&mut *(self.get_staging_buffer_address().unwrap() as *mut S)}
574	}
575}
576
577unsafe impl<S> Send for StorageBuffer<S> where S: StorageBufferStructType {}
578unsafe impl<S> Sync for StorageBuffer<S> where S: StorageBufferStructType {}
579
580impl<S> GenericStorageBuffer for StorageBuffer<S>
581where
582	S: StorageBufferStructType {
583	fn get_vk_buffer(&self) -> VkBuffer {
584		self.buffer.get_vk_buffer()
585	}
586
587	fn get_size(&self) -> VkDeviceSize {
588		self.buffer.get_size()
589	}
590
591	fn get_staging_buffer_address(&self) -> Result<*mut c_void, VulkanError> {
592		self.get_staging_buffer_address()
593	}
594
595	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
596		self.buffer.upload_staging_buffer(cmdbuf, 0, self.get_size() as VkDeviceSize)
597	}
598}
599
600impl<S> IterableDataAttrib for StorageBuffer<S>
601where
602	S: StorageBufferStructType {
603	fn iter_members(&self) -> IntoIter<(&'static str, &dyn Any)> {
604		self.iterable.iter()
605	}
606}