vkobject_rs/
buffer.rs

1
2use crate::prelude::*;
3use std::{
4	ffi::c_void,
5	fmt::{self, Debug, Formatter},
6	marker::PhantomData,
7	mem::size_of,
8	sync::Arc,
9};
10use struct_iterable::Iterable;
11
12/// The Vulkan buffer object, same as the OpenGL buffer object, could be used to store vertices, elements(indices), and the other data.
13pub struct Buffer {
14	/// The `VulkanDevice` is the associated device
15	pub device: Arc<VulkanDevice>,
16
17	/// The buffer
18	pub buffer: Arc<VulkanBuffer>,
19
20	/// The device memory
21	pub memory: Arc<VulkanMemory>,
22
23	/// The usage of the buffer, not including `VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_SRC_BIT` and `VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_DST_BIT`
24	pub(crate) usage: VkBufferUsageFlags,
25
26	/// The staging buffer
27	pub staging_buffer: Option<StagingBuffer>,
28}
29
30impl Buffer {
31	/// Create a new buffer
32	/// * If `data` is `None`, `cmdbuf` could be `null()` because no `vkCmdCopyBuffer()` will be issued.
33	pub fn new(device: Arc<VulkanDevice>, size: VkDeviceSize, data: Option<*const c_void>, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
34		let buffer = Arc::new(VulkanBuffer::new(device.clone(), size, usage | VkBufferUsageFlagBits::VK_BUFFER_USAGE_TRANSFER_DST_BIT as VkBufferUsageFlags)?);
35		let memory = Arc::new(VulkanMemory::new(device.clone(), &buffer.get_memory_requirements()?,
36			VkMemoryPropertyFlagBits::VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT as VkMemoryPropertyFlags)?);
37		memory.bind_vk_buffer(buffer.get_vk_buffer())?;
38		let mut ret = Self {
39			device,
40			memory,
41			buffer,
42			usage,
43			staging_buffer: None,
44		};
45		if let Some(data) = data {
46			unsafe {ret.set_staging_data(data, 0, size as usize)?};
47		}
48		Ok(ret)
49	}
50
51	/// Get the `VkBuffer`
52	pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
53		self.buffer.get_vk_buffer()
54	}
55
56	/// Create the staging buffer if not exist
57	pub fn ensure_staging_buffer(&mut self) -> Result<(), VulkanError> {
58		if self.staging_buffer.is_none() {
59			self.staging_buffer = Some(StagingBuffer::new(self.device.clone(), self.memory.get_size())?);
60		}
61		Ok(())
62	}
63
64	/// Discard the staging buffer to save memory
65	pub fn discard_staging_buffer(&mut self) {
66		self.staging_buffer = None;
67	}
68
69	/// Get the staging buffer
70	pub fn get_staging_buffer(&self) -> Option<&StagingBuffer> {
71		self.staging_buffer.as_ref()
72	}
73
74	/// Get the usage
75	pub fn get_usage(&self) -> VkBufferUsageFlags {
76		self.usage
77	}
78
79	/// Get the size
80	pub fn get_size(&self) -> VkDeviceSize {
81		self.buffer.get_size()
82	}
83
84	/// Get the address of the staging buffer memory data
85	pub fn get_staging_buffer_address(&mut self) -> Result<*mut c_void, VulkanError> {
86		self.ensure_staging_buffer()?;
87		Ok(self.staging_buffer.as_ref().unwrap().get_address())
88	}
89
90	/// Update new data to the buffer
91	///
92	/// # Safety
93	///
94	/// You must provide a valid pointer `data`, otherwise the behavior of this function is undefined.
95	pub unsafe fn set_staging_data(&mut self, data: *const c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
96		self.ensure_staging_buffer()?;
97		self.staging_buffer.as_mut().unwrap().set_data(data, offset, size)?;
98		Ok(())
99	}
100
101	/// Retrieve the data from the staging buffer
102	///
103	/// # Safety
104	///
105	/// You must provide a valid pointer `data`, otherwise the behavior of this function is undefined.
106	pub unsafe fn get_staging_data(&mut self, data: *mut c_void, offset: VkDeviceSize, size: usize) -> Result<(), VulkanError> {
107		if let Some(ref mut staging_buffer) = self.staging_buffer {
108			staging_buffer.get_data(data, offset, size)
109		} else {
110			Err(VulkanError::NoStagingBuffer)
111		}
112	}
113
114	/// Upload the data from the staging buffer
115	pub fn upload_staging_buffer(&self, cmdbuf: VkCommandBuffer, offset: VkDeviceSize, size: VkDeviceSize) -> Result<(), VulkanError> {
116		if let Some(ref staging_buffer) = self.staging_buffer {
117			let copy_region = VkBufferCopy {
118				srcOffset: offset,
119				dstOffset: offset,
120				size: size as VkDeviceSize,
121			};
122			self.device.vkcore.vkCmdCopyBuffer(cmdbuf, staging_buffer.get_vk_buffer(), self.buffer.get_vk_buffer(), 1, &copy_region)?;
123			Ok(())
124		} else {
125			Err(VulkanError::NoStagingBuffer)
126		}
127	}
128
129	/// Upload the data from the staging buffer
130	pub fn upload_staging_buffer_multi(&self, cmdbuf: VkCommandBuffer, regions: &[BufferRegion]) -> Result<(), VulkanError> {
131		if let Some(ref staging_buffer) = self.staging_buffer {
132			let copy_regions: Vec<VkBufferCopy> = regions.iter().map(|r|VkBufferCopy {
133				srcOffset: r.offset,
134				dstOffset: r.offset,
135				size: r.size as VkDeviceSize,
136			}).collect();
137			self.device.vkcore.vkCmdCopyBuffer(cmdbuf, staging_buffer.get_vk_buffer(), self.buffer.get_vk_buffer(), copy_regions.len() as u32, copy_regions.as_ptr())?;
138			Ok(())
139		} else {
140			Err(VulkanError::NoStagingBuffer)
141		}
142	}
143
144	/// Download the data to the staging buffer
145	pub fn download_staging_buffer(&mut self, cmdbuf: VkCommandBuffer, offset: VkDeviceSize, size: VkDeviceSize) -> Result<(), VulkanError> {
146		self.ensure_staging_buffer()?;
147		let copy_region = VkBufferCopy {
148			srcOffset: offset,
149			dstOffset: offset,
150			size: size as VkDeviceSize,
151		};
152		self.device.vkcore.vkCmdCopyBuffer(cmdbuf, self.buffer.get_vk_buffer(), self.staging_buffer.as_ref().unwrap().get_vk_buffer(), 1, &copy_region)?;
153		Ok(())
154	}
155
156	/// Download the data to the staging buffer
157	pub fn download_staging_buffer_multi(&mut self, cmdbuf: VkCommandBuffer, regions: &[BufferRegion]) -> Result<(), VulkanError> {
158		self.ensure_staging_buffer()?;
159		let copy_regions: Vec<VkBufferCopy> = regions.iter().map(|r|VkBufferCopy {
160			srcOffset: r.offset,
161			dstOffset: r.offset,
162			size: r.size as VkDeviceSize,
163		}).collect();
164		self.device.vkcore.vkCmdCopyBuffer(cmdbuf, self.buffer.get_vk_buffer(), self.staging_buffer.as_ref().unwrap().get_vk_buffer(), copy_regions.len() as u32, copy_regions.as_ptr())?;
165		Ok(())
166	}
167
168	/// Map the staging buffer
169	pub fn map_staging_buffer<'a>(&'a mut self, offset: VkDeviceSize, size: usize) -> Result<MappedMemory<'a>, VulkanError> {
170		self.ensure_staging_buffer()?;
171		let staging_buffer = self.staging_buffer.as_mut().unwrap();
172		staging_buffer.memory.map(offset, size)
173	}
174
175	/// Create a buffer view
176	pub fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError> {
177		VulkanBufferView::new(self.buffer.clone(), format)
178	}
179
180	/// Create a buffer view
181	pub fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError> {
182		VulkanBufferView::new_partial(self.buffer.clone(), range)
183	}
184}
185
186impl Clone for Buffer {
187	fn clone(&self) -> Self {
188		Self::new(self.device.clone(), self.get_size(), self.staging_buffer.as_ref().map(|b|b.get_address() as *const _), self.usage).unwrap()
189	}
190}
191
192impl Debug for Buffer {
193	fn fmt(&self, f: &mut Formatter) -> fmt::Result {
194		f.debug_struct("Buffer")
195		.field("memory", &self.memory)
196		.field("buffer", &self.buffer)
197		.field("usage", &self.usage)
198		.field("staging_buffer", &self.staging_buffer)
199		.finish()
200	}
201}
202
203/// The trait that the struct of uniform must implement
204pub trait UniformStructType: Copy + Clone + Sized + Default + Debug + Iterable {}
205impl<T> UniformStructType for T where T: Copy + Clone + Sized + Default + Debug + Iterable {}
206
207#[macro_export]
208macro_rules! derive_uniform_buffer_type {
209	($item: item) => {
210		#[derive(Iterable, Default, Debug, Clone, Copy)]
211		$item
212	};
213}
214
215/// The uniform buffer
216#[derive(Debug, Clone)]
217pub struct UniformBuffer<U>
218where
219	U: UniformStructType {
220	/// The buffer
221	pub buffer: Buffer,
222
223	/// The phantom data that holds the uniform struct type
224	_phantom: PhantomData<U>,
225}
226
227impl<U> UniformBuffer<U>
228where
229	U: UniformStructType {
230	/// Create the `UniformBuffer`
231	pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
232		let def = U::default();
233		let buffer = Buffer::new(device.clone(), size_of::<U>() as VkDeviceSize, Some(&def as *const U as *const c_void), VkBufferUsageFlagBits::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT as VkBufferUsageFlags)?;
234		Ok(Self {
235			buffer,
236			_phantom: PhantomData,
237		})
238	}
239
240	/// Flush to GPU
241	pub fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
242		self.buffer.upload_staging_buffer(cmdbuf, 0, self.buffer.get_size())
243	}
244}
245
246impl<U> AsRef<U> for UniformBuffer<U>
247where
248	U: UniformStructType {
249	fn as_ref(&self) -> &U {
250		unsafe{&*(self.buffer.staging_buffer.as_ref().unwrap().get_address() as *const U)}
251	}
252}
253
254impl<U> AsMut<U> for UniformBuffer<U>
255where
256	U: UniformStructType {
257	fn as_mut(&mut self) -> &mut U {
258		unsafe{&mut *(self.buffer.staging_buffer.as_ref().unwrap().get_address() as *mut U)}
259	}
260}
261
262unsafe impl<U> Send for UniformBuffer<U> where U: UniformStructType {}
263unsafe impl<U> Sync for UniformBuffer<U> where U: UniformStructType {}
264
265/// The trait for the `UniformBuffer` to be able to wrap into an object
266pub trait GenericUniformBuffer: Debug {
267	/// Get the `VkBuffer`
268	fn get_vk_buffer(&self) -> VkBuffer;
269
270	/// Get the size of the buffer
271	fn get_size(&self) -> VkDeviceSize;
272
273	/// Get the address of the staging buffer
274	fn get_staging_buffer_address(&self) -> *mut c_void;
275
276	/// Upload to GPU
277	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
278}
279
280impl<U> GenericUniformBuffer for UniformBuffer<U>
281where
282	U: UniformStructType {
283	fn get_vk_buffer(&self) -> VkBuffer {
284		self.buffer.get_vk_buffer()
285	}
286
287	fn get_size(&self) -> VkDeviceSize {
288		self.buffer.get_size()
289	}
290
291	fn get_staging_buffer_address(&self) -> *mut c_void {
292		self.buffer.staging_buffer.as_ref().unwrap().get_address()
293	}
294
295	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
296		self.buffer.upload_staging_buffer(cmdbuf, 0, self.get_size() as VkDeviceSize)
297	}
298}
299
300/// The trait for the `StorageBuffer` to be able to wrap into an object
301pub trait GenericStorageBuffer: Debug {
302	/// Get the `VkBuffer`
303	fn get_vk_buffer(&self) -> VkBuffer;
304
305	/// Get the size of the buffer
306	fn get_size(&self) -> VkDeviceSize;
307
308	/// Get the address of the staging buffer
309	fn get_staging_buffer_address(&self) -> *mut c_void;
310
311	/// Upload to GPU
312	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
313}
314
315/// The trait that the struct of uniform must implement
316pub trait StorageBufferStructType: Copy + Clone + Sized + Default + Debug + Iterable {}
317impl<T> StorageBufferStructType for T where T: Copy + Clone + Sized + Default + Debug + Iterable {}
318
319#[macro_export]
320macro_rules! derive_storage_buffer_type {
321	($item: item) => {
322		#[derive(Iterable, Default, Debug, Clone, Copy)]
323		$item
324	};
325}
326
327/// The storage buffer
328#[derive(Debug, Clone)]
329pub struct StorageBuffer<S>
330where
331	S: StorageBufferStructType {
332	/// The buffer
333	pub buffer: Buffer,
334
335	/// The phantom data that holds the uniform struct type
336	_phantom: PhantomData<S>,
337}
338
339impl<S> StorageBuffer<S>
340where
341	S: StorageBufferStructType {
342	/// Create the `StorageBuffer`
343	pub fn new(device: Arc<VulkanDevice>) -> Result<Self, VulkanError> {
344		let def = S::default();
345		let buffer = Buffer::new(device.clone(), size_of::<S>() as VkDeviceSize, Some(&def as *const S as *const c_void), VkBufferUsageFlagBits::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT as VkBufferUsageFlags)?;
346		Ok(Self {
347			buffer,
348			_phantom: PhantomData,
349		})
350	}
351}
352
353impl<S> AsRef<S> for StorageBuffer<S>
354where
355	S: StorageBufferStructType {
356	fn as_ref(&self) -> &S {
357		unsafe{&*(self.buffer.staging_buffer.as_ref().unwrap().get_address() as *const S)}
358	}
359}
360
361impl<S> AsMut<S> for StorageBuffer<S>
362where
363	S: StorageBufferStructType {
364	fn as_mut(&mut self) -> &mut S {
365		unsafe{&mut *(self.buffer.staging_buffer.as_ref().unwrap().get_address() as *mut S)}
366	}
367}
368
369unsafe impl<S> Send for StorageBuffer<S> where S: StorageBufferStructType {}
370unsafe impl<S> Sync for StorageBuffer<S> where S: StorageBufferStructType {}
371
372impl<S> GenericStorageBuffer for StorageBuffer<S>
373where
374	S: StorageBufferStructType {
375	fn get_vk_buffer(&self) -> VkBuffer {
376		self.buffer.get_vk_buffer()
377	}
378
379	fn get_size(&self) -> VkDeviceSize {
380		self.buffer.get_size()
381	}
382
383	fn get_staging_buffer_address(&self) -> *mut c_void {
384		self.buffer.staging_buffer.as_ref().unwrap().get_address()
385	}
386
387	fn flush(&self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
388		self.buffer.upload_staging_buffer(cmdbuf, 0, self.get_size() as VkDeviceSize)
389	}
390}