vkobject_rs/
buffervec.rs

1
2use crate::prelude::*;
3use bitvec::vec::BitVec;
4use std::{
5	any::Any,
6	cmp::min,
7	ffi::c_void,
8	fmt::{self, Debug, Formatter},
9	marker::PhantomData,
10	mem::{size_of, size_of_val},
11	ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
12	ptr::{copy, null_mut},
13	slice::{from_raw_parts, from_raw_parts_mut},
14	sync::Arc,
15};
16
17/// The type that could be the item of the `BufferVec`
18pub trait BufferVecItem: Clone + Copy + Sized + Default + Send + Sync + Debug + Any {}
19impl<T> BufferVecItem for T where T: Clone + Copy + Sized + Default + Send + Sync + Debug + Any {}
20
21/// The advanced buffer object that could be used as a vector
22pub struct BufferVec<T: BufferVecItem> {
23	/// The buffer
24	buffer: Buffer,
25
26	/// The address of the data in the staging buffer
27	staging_buffer_data_address: *mut T,
28
29	/// Num items in the buffer
30	num_items: usize,
31
32	/// The capacity of the buffer
33	capacity: usize,
34
35	/// The bitmap indicating that the cached (the staging buffer) item was changed
36	cache_modified_bitmap: BitVec,
37
38	/// The bitmap indicating that the cached (the staging buffer) data was changed
39	cache_modified: bool,
40
41	/// The phantom data to hold the generic type `T`
42	_phantom: PhantomData<T>,
43}
44
45impl<T> BufferVec<T>
46where
47	T: BufferVecItem {
48	/// Create the `BufferVec<T>`
49	pub fn new(device: Arc<VulkanDevice>, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
50		let buffer = Buffer::new(device, 0, None, usage)?;
51		Ok(Self {
52			buffer,
53			staging_buffer_data_address: null_mut(),
54			num_items: 0,
55			capacity: 0,
56			cache_modified_bitmap: BitVec::new(),
57			cache_modified: false,
58			_phantom: PhantomData,
59		})
60	}
61
62	/// Get the VkBuffer
63	pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
64		self.buffer.get_vk_buffer()
65	}
66
67	/// Get the device
68	pub fn get_device(&self) -> Arc<VulkanDevice> {
69		self.buffer.device.clone()
70	}
71
72	/// Create from a slice of data
73	pub fn from(device: Arc<VulkanDevice>, data: &[T], cmdbuf: VkCommandBuffer, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
74		let buffer = Buffer::new(device, size_of_val(data) as VkDeviceSize, Some(data.as_ptr() as *const c_void), usage)?;
75		let staging_buffer_data_address = buffer.get_staging_buffer_address()? as *mut T;
76		buffer.upload_staging_buffer(cmdbuf, 0, size_of_val(data) as VkDeviceSize)?;
77		Ok(Self {
78			buffer,
79			staging_buffer_data_address,
80			num_items: data.len(),
81			capacity: data.len(),
82			cache_modified_bitmap: {let mut bv = BitVec::with_capacity(data.len()); bv.resize(data.len(), false); bv},
83			cache_modified: false,
84			_phantom: PhantomData,
85		})
86	}
87
88	/// Create the `BufferVec<T>` with an initial capacity
89	pub fn with_capacity(device: Arc<VulkanDevice>, capacity: usize, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
90		let buffer = Buffer::new(device, capacity as VkDeviceSize, None, usage)?;
91		let staging_buffer_data_address = buffer.get_staging_buffer_address()? as *mut T;
92		Ok(Self {
93			buffer,
94			staging_buffer_data_address,
95			num_items: 0,
96			capacity,
97			cache_modified_bitmap: BitVec::with_capacity(capacity),
98			cache_modified: true,
99			_phantom: PhantomData,
100		})
101	}
102
103	/// Change the capacity
104	/// * If the capacity is less than the current items, the number of items will be reduced to the new capacity.
105	pub fn change_capacity(&mut self, new_capacity: usize) -> Result<(), VulkanError> {
106		let new_buffer = Buffer::new(self.buffer.device.clone(), new_capacity as VkDeviceSize, None, self.buffer.get_usage())?;
107		if new_capacity != 0 {
108			let new_address = new_buffer.get_staging_buffer_address()? as *mut T;
109			unsafe {copy(self.staging_buffer_data_address as *const T, new_address, self.capacity)}
110			self.staging_buffer_data_address = new_address;
111			self.cache_modified = true;
112			self.cache_modified_bitmap.resize(new_capacity, false);
113		} else {
114			self.staging_buffer_data_address = null_mut();
115			self.cache_modified = false;
116			self.cache_modified_bitmap.clear();
117			self.cache_modified_bitmap.shrink_to_fit();
118		}
119		self.buffer = new_buffer;
120		self.capacity = new_capacity;
121		self.num_items = min(self.num_items, new_capacity);
122		Ok(())
123	}
124
125	/// Set data is modified or not
126	/// # Safety
127	///
128	/// This would change the behavior of how `flush` does to the data.
129	/// The data marked as modified will be uploaded to the device, while the data marked as unmodified maybe uploaded to the device, or not (skipped because the gap is long enough)
130	/// You are going to control which part of the data will be synchronized to the device, while you are not actually knowing if the data is really changed.
131	/// * `first_index`: Which part of the data you are going to mark.
132	/// * `length`: From the index, how many items you are going to mark.
133	/// * `flag`: The boolean value of the mark, `true` means the data is marked as changed and will be synchronized to the device after next `flush` was called, `false` means the data is unchanged and will not be synchronized to the device.
134	///
135	/// # Panic
136	/// Panic if the index and range is out of bounds
137	pub unsafe fn set_data_modified(&mut self, first_index: usize, length: usize, flag: bool) {
138		let last_index = first_index + length - 1;
139		if first_index >= self.num_items {
140			panic!("The `first_index` is {first_index} but the length of the `BufferVec` is {}", self.num_items);
141		}
142		if last_index >= self.num_items {
143			panic!("The last index is {last_index}, which is exceeded the length {}", self.num_items);
144		}
145		self.cache_modified |= flag;
146		for i in first_index..=last_index {
147			self.cache_modified_bitmap.set(i, flag);
148		}
149	}
150
151	/// Change the length
152	/// Forces the length of the vector to new_len.
153	///
154	/// This is a low-level operation that maintains none of the normal invariants of the type.
155	///
156	/// # Safety
157	///
158	/// `new_len` must be less than or equal to `capacity()`.
159	/// The elements at `old_len..new_len` must be initialized.
160	pub unsafe fn set_len(&mut self, new_len: usize) {
161		if new_len > self.num_items {
162			for i in self.num_items..new_len {
163				self.cache_modified_bitmap.set(i, true);
164			}
165		}
166		self.num_items = new_len;
167	}
168
169	/// Get the inner buffer
170	pub fn into_inner(self) -> Buffer {
171		self.buffer
172	}
173
174	/// Creates a `BufferVec<T>` directly from a buffer, a length, and a capacity.
175	///
176	/// # Safety
177	///
178	/// This is highly unsafe, just like the Rust official `Vec<T>::from_raw_parts()`
179	/// * Unlike the Rust official `Vec<T>::from_raw_parts()`, capacity is not needed to be provided since it was calculated by `buffer.get_size() / size_of::<T>()`
180	/// * `length` must be less than the calculated capacity.
181	pub unsafe fn from_raw_parts(buffer: Buffer, length: usize) -> Result<Self, VulkanError> {
182		let capacity = buffer.get_size() as usize / size_of::<T>();
183		let lock = buffer.ensure_staging_buffer()?;
184		let staging_buffer_data_address = lock.as_ref().unwrap().get_address() as *mut T;
185		drop(lock);
186		Ok(Self {
187			buffer,
188			staging_buffer_data_address,
189			num_items: length,
190			capacity,
191			cache_modified_bitmap: BitVec::with_capacity(capacity),
192			cache_modified: true,
193			_phantom: PhantomData,
194		})
195	}
196
197	/// Enlarge the capacity of the `BufferVec<T>`
198	fn grow(&mut self) -> Result<(), VulkanError> {
199		let mut new_capacity = ((self.capacity * 3) >> 1) + 1;
200		if new_capacity < self.num_items {
201			new_capacity = self.num_items;
202		}
203		self.change_capacity(new_capacity)
204	}
205
206	/// Push data to the buffer
207	pub fn push(&mut self, data: T) -> Result<(), VulkanError> {
208		if self.num_items >= self.capacity {
209			self.grow()?;
210		}
211		unsafe {*self.staging_buffer_data_address.wrapping_add(self.num_items) = data};
212		self.cache_modified = true;
213		self.cache_modified_bitmap.push(true);
214		self.num_items += 1;
215		Ok(())
216	}
217
218	/// Pop data from the buffer
219	pub fn pop(&mut self) -> T {
220		if self.num_items == 0 {
221			panic!("`BufferVec::<T>::pop()` called on an empty `BufferVec<T>`.");
222		}
223		self.num_items -= 1;
224		self.cache_modified_bitmap.pop();
225		unsafe {*self.staging_buffer_data_address.wrapping_add(self.num_items)}
226	}
227
228	/// Removes and returns the element at position index within the vector, shifting all elements after it to the left.
229	///
230	/// Note: Because this shifts over the remaining elements, it has a worst-case performance of O(n). If you don’t need the order of elements to be preserved, use `swap_remove` instead.
231	///
232	/// # Panics
233	/// Panics if `index` is out of bounds.
234	pub fn remove(&mut self, index: usize) -> T {
235		let ret = self[index];
236		let from_index = index + 1;
237		unsafe {copy(
238			self.staging_buffer_data_address.wrapping_add(from_index),
239			self.staging_buffer_data_address.wrapping_add(index),
240			self.num_items - from_index)
241		};
242		self.num_items -= 1;
243		for i in index..self.num_items {
244			self.cache_modified_bitmap.set(i, true);
245		}
246		self.cache_modified_bitmap.pop();
247		ret
248	}
249
250	/// Removes an element from the vector and returns it.
251	///
252	/// The removed element is replaced by the last element of the vector.
253	///
254	/// This does not preserve ordering of the remaining elements, but is O(1). If you need to preserve the element order, use `remove` instead.
255	///
256	/// # Panics
257	/// Panics if `index` is out of bounds.
258	pub fn swap_remove(&mut self, index: usize) -> T {
259		if self.num_items > 1 {
260			let last_index = self.num_items - 1;
261			let last_item = unsafe {&mut *self.staging_buffer_data_address.wrapping_add(self.num_items)};
262			let swap_item = &mut self[index];
263			let ret = *swap_item;
264			if last_index != index {
265				*swap_item = *last_item;
266			}
267			self.num_items -= 1;
268			self.cache_modified_bitmap.pop();
269			ret
270		} else {
271			if index != 0 {
272				panic!("Index {index} out of bounds (len() == {})", self.len());
273			}
274			self.pop()
275		}
276	}
277
278	/// Resize the buffer
279	pub fn resize(&mut self, new_len: usize, new_data: T) -> Result<(), VulkanError> {
280		if self.num_items == new_len && self.capacity >= self.num_items {
281			return Ok(());
282		}
283		self.cache_modified = true;
284		if self.capacity < new_len {
285			self.change_capacity(new_len)?;
286		}
287		if new_len > self.num_items {
288			self.cache_modified = true;
289			unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(self.num_items), new_len - self.num_items)}.fill(new_data);
290			for i in self.num_items..new_len {
291				self.cache_modified_bitmap.set(i, true);
292			}
293		}
294		self.num_items = new_len;
295		self.cache_modified_bitmap.resize(new_len, false);
296		Ok(())
297	}
298
299	/// Clear the buffer
300	pub fn clear(&mut self) {
301		self.num_items = 0;
302	}
303
304	/// Get the capacity
305	pub fn get_capacity(&self) -> usize {
306		self.capacity
307	}
308
309	/// Get num items in the buffer
310	pub fn len(&self) -> usize {
311		self.num_items
312	}
313
314	/// Get is the buffer empty?
315	pub fn is_empty(&self) -> bool {
316		self.num_items == 0
317	}
318
319	/// Shrink to fit
320	pub fn shrink_to_fit(&mut self) -> Result<(), VulkanError> {
321		self.change_capacity(self.num_items)
322	}
323
324	/// Flush the staging buffer to the device memory
325	pub fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
326		if !self.cache_modified {
327			return Ok(());
328		}
329		const MAX_GAP: usize = 16;
330		let mut si = 0;
331		let mut ei = 0;
332		let mut gap = 0;
333		let mut is_in = false;
334		let mut region: Vec<BufferRegion> = Vec::new();
335		for (i, b) in self.cache_modified_bitmap.iter().enumerate() {
336			if *b {
337				if !is_in {
338					is_in = true;
339					si = i;
340					gap = 0;
341				}
342			} else if is_in {
343				ei = i;
344				is_in = false;
345				gap = 1; // This ensures all regions were flushed including the last one.
346			} else {
347				gap += 1;
348				if gap == MAX_GAP {
349					region.push(BufferRegion {
350						offset: (si * size_of::<T>()) as VkDeviceSize,
351						size: ((ei + 1 - si) * size_of::<T>()) as VkDeviceSize,
352					});
353				}
354			}
355		}
356		self.cache_modified_bitmap.fill(false);
357		if is_in || gap != 0 {
358			region.push(BufferRegion {
359				offset: (si * size_of::<T>()) as VkDeviceSize,
360				size: ((self.cache_modified_bitmap.len() - si) * size_of::<T>()) as VkDeviceSize,
361			});
362		}
363		if !region.is_empty() {
364			self.buffer.upload_staging_buffer_multi(cmdbuf, region.as_ref())?;
365		}
366		self.cache_modified = false;
367		Ok(())
368	}
369}
370
371impl<T> Clone for BufferVec<T>
372where
373	T: BufferVecItem {
374	fn clone(&self) -> Self {
375		let buffer = self.buffer.clone();
376		let staging_buffer_data_address = buffer.get_staging_buffer_address().unwrap() as *mut T;
377		Self {
378			buffer,
379			staging_buffer_data_address,
380			num_items: self.num_items,
381			capacity: self.capacity,
382			cache_modified_bitmap: self.cache_modified_bitmap.clone(),
383			cache_modified: self.cache_modified,
384			_phantom: self._phantom,
385		}
386	}
387}
388
389impl<T> Debug for BufferVec<T>
390where
391	T: BufferVecItem {
392	fn fmt(&self, f: &mut Formatter) -> fmt::Result {
393		f.debug_struct("BufferVec")
394		.field("buffer", &self.buffer)
395		.field("staging_buffer_data_address", &self.staging_buffer_data_address)
396		.field("num_items", &self.num_items)
397		.field("capacity", &self.capacity)
398		.field("cache_modified_bitmap", &self.cache_modified_bitmap)
399		.field("cache_modified", &self.cache_modified)
400		.finish()
401	}
402}
403
404impl<T> Index<usize> for BufferVec<T>
405where
406	T: BufferVecItem {
407	type Output = T;
408	fn index(&self, index: usize) -> &T {
409		if index >= self.len() {
410			panic!("Index {index:?} out of bounds (len() == {})", self.len());
411		}
412		unsafe {&*self.staging_buffer_data_address.wrapping_add(index)}
413	}
414}
415
416impl<T> IndexMut<usize> for BufferVec<T>
417where
418	T: BufferVecItem {
419	fn index_mut(&mut self, index: usize) -> &mut T {
420		if index >= self.len() {
421			panic!("Index {index:?} out of bounds (len() == {})", self.len());
422		}
423		self.cache_modified = true;
424		self.cache_modified_bitmap.set(index, true);
425		unsafe {&mut *self.staging_buffer_data_address.wrapping_add(index)}
426	}
427}
428
429impl<T> Index<Range<usize>> for BufferVec<T>
430where
431	T: BufferVecItem {
432	type Output = [T];
433	fn index(&self, range: Range<usize>) -> &[T] {
434		if range.start >= self.len() && range.end > self.len() {
435			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
436		}
437		unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(range.start), range.end - range.start)}
438	}
439}
440
441impl<T> IndexMut<Range<usize>> for BufferVec<T>
442where
443	T: BufferVecItem {
444	fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
445		if range.start >= self.len() && range.end > self.len() {
446			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
447		}
448		self.cache_modified = true;
449		for i in range.clone() {
450			self.cache_modified_bitmap.set(i, true);
451		}
452		unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(range.start), range.end - range.start)}
453	}
454}
455
456impl<T> Index<RangeFrom<usize>> for BufferVec<T>
457where
458	T: BufferVecItem {
459	type Output = [T];
460	fn index(&self, range: RangeFrom<usize>) -> &[T] {
461		if range.start >= self.len() {
462			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
463		}
464		unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(range.start), self.len() - range.start)}
465	}
466}
467
468impl<T> IndexMut<RangeFrom<usize>> for BufferVec<T>
469where
470	T: BufferVecItem {
471	fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
472		if range.start >= self.len() {
473			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
474		}
475		self.cache_modified = true;
476		for i in range.start..self.len() {
477			self.cache_modified_bitmap.set(i, true);
478		}
479		unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(range.start), self.len() - range.start)}
480	}
481}
482
483impl<T> Index<RangeTo<usize>> for BufferVec<T>
484where
485	T: BufferVecItem {
486	type Output = [T];
487	fn index(&self, range: RangeTo<usize>) -> &[T] {
488		if range.end > self.len() {
489			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
490		}
491		unsafe {from_raw_parts(self.staging_buffer_data_address, range.end)}
492	}
493}
494
495impl<T> IndexMut<RangeTo<usize>> for BufferVec<T>
496where
497	T: BufferVecItem {
498	fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
499		if range.end > self.len() {
500			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
501		}
502		self.cache_modified = true;
503		for i in 0..range.end {
504			self.cache_modified_bitmap.set(i, true);
505		}
506		unsafe {from_raw_parts_mut(self.staging_buffer_data_address, range.end)}
507	}
508}
509
510impl<T> Index<RangeFull> for BufferVec<T>
511where
512	T: BufferVecItem {
513	type Output = [T];
514	fn index(&self, _: RangeFull) -> &[T] {
515		unsafe {from_raw_parts(self.staging_buffer_data_address, self.len())}
516	}
517}
518
519impl<T> IndexMut<RangeFull> for BufferVec<T>
520where
521	T: BufferVecItem {
522	fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
523		self.cache_modified = true;
524		self.cache_modified_bitmap.fill(true);
525		unsafe {from_raw_parts_mut(self.staging_buffer_data_address, self.len())}
526	}
527}
528
529impl<T> Index<RangeInclusive<usize>> for BufferVec<T>
530where
531	T: BufferVecItem {
532	type Output = [T];
533	fn index(&self, range: RangeInclusive<usize>) -> &[T] {
534		if *range.start() >= self.len() || *range.end() >= self.len() {
535			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
536		}
537		unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(*range.start()), range.end() + 1 - range.start())}
538	}
539}
540
541impl<T> IndexMut<RangeInclusive<usize>> for BufferVec<T>
542where
543	T: BufferVecItem {
544	fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
545		if *range.start() >= self.len() || *range.end() >= self.len() {
546			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
547		}
548		self.cache_modified = true;
549		for i in range.clone() {
550			self.cache_modified_bitmap.set(i, true);
551		}
552		unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(*range.start()), range.end() + 1 - range.start())}
553	}
554}
555
556impl<T> Index<RangeToInclusive<usize>> for BufferVec<T>
557where
558	T: BufferVecItem {
559	type Output = [T];
560	fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
561		if range.end >= self.len() {
562			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
563		}
564		unsafe {from_raw_parts(self.staging_buffer_data_address, range.end + 1)}
565	}
566}
567
568impl<T> IndexMut<RangeToInclusive<usize>> for BufferVec<T>
569where
570	T: BufferVecItem {
571	fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
572		if range.end >= self.len() {
573			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
574		}
575		self.cache_modified = true;
576		for i in 0..=range.end {
577			self.cache_modified_bitmap.set(i, true);
578		}
579		unsafe {from_raw_parts_mut(self.staging_buffer_data_address, range.end + 1)}
580	}
581}
582
583unsafe impl<T> Send for BufferVec<T> where T: BufferVecItem {}
584unsafe impl<T> Sync for BufferVec<T> where T: BufferVecItem {}
585
586/// The trait that the struct of uniform must implement
587pub trait TexelBufferDataType: Copy + Clone + Sized + Default + Send + Sync + Debug + Any {}
588impl<T> TexelBufferDataType for T where T: Copy + Clone + Sized + Default + Send + Sync + Debug + Any {}
589
590pub type TexelBuffer<T> = BufferVec<T>;
591
592/// The trait for the `UniformBuffer` to be able to wrap into an object
593pub trait GenericTexelBuffer: Debug {
594	/// Get the `VkBuffer`
595	fn get_vk_buffer(&self) -> VkBuffer;
596
597	/// Get the size of the buffer
598	fn get_size(&self) -> usize;
599
600	/// Get the address of the staging buffer
601	fn get_staging_buffer_address(&self) -> *mut c_void;
602
603	/// Create a buffer view map
604	fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError>;
605
606	/// Create a buffer view map
607	fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError>;
608
609	/// Upload to GPU
610	fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
611}
612
613impl<T> GenericTexelBuffer for TexelBuffer<T>
614where
615	T: TexelBufferDataType {
616	fn get_vk_buffer(&self) -> VkBuffer {
617		self.buffer.get_vk_buffer()
618	}
619
620	fn get_size(&self) -> usize {
621		self.capacity * size_of::<T>()
622	}
623
624	fn get_staging_buffer_address(&self) -> *mut c_void {
625		self.staging_buffer_data_address as *mut c_void
626	}
627
628	fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError> {
629		self.buffer.create_buffer_view(format)
630	}
631
632	fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError> {
633		self.buffer.create_buffer_view_partial(range)
634	}
635
636	fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
637		self.flush(cmdbuf)
638	}
639}