vkobject_rs/
buffervec.rs

1
2use crate::prelude::*;
3use bitvec::vec::BitVec;
4use std::{
5	cmp::min,
6	ffi::c_void,
7	fmt::{self, Debug, Formatter},
8	marker::PhantomData,
9	mem::{size_of, size_of_val},
10	ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
11	ptr::{copy, null_mut},
12	slice::{from_raw_parts, from_raw_parts_mut},
13	sync::Arc,
14};
15
16/// The type that could be the item of the `BufferVec`
17pub trait BufferVecItem: Copy + Sized + Default + Debug {}
18impl<T> BufferVecItem for T where T: Copy + Sized + Default + Debug {}
19
20/// The advanced buffer object that could be used as a vector
21pub struct BufferVec<T: BufferVecItem> {
22	/// The buffer
23	buffer: Buffer,
24
25	/// The address of the data in the staging buffer
26	staging_buffer_data_address: *mut T,
27
28	/// Num items in the buffer
29	num_items: usize,
30
31	/// The capacity of the buffer
32	capacity: usize,
33
34	/// The bitmap indicating that the cached (the staging buffer) item was changed
35	cache_modified_bitmap: BitVec,
36
37	/// The bitmap indicating that the cached (the staging buffer) data was changed
38	cache_modified: bool,
39
40	/// The phantom data to hold the generic type `T`
41	_phantom: PhantomData<T>,
42}
43
44impl<T> BufferVec<T>
45where
46	T: BufferVecItem {
47	/// Create the `BufferVec<T>`
48	pub fn new(device: Arc<VulkanDevice>, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
49		let buffer = Buffer::new(device, 0, None, usage)?;
50		Ok(Self {
51			buffer,
52			staging_buffer_data_address: null_mut(),
53			num_items: 0,
54			capacity: 0,
55			cache_modified_bitmap: BitVec::new(),
56			cache_modified: false,
57			_phantom: PhantomData,
58		})
59	}
60
61	/// Get the VkBuffer
62	pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
63		self.buffer.get_vk_buffer()
64	}
65
66	/// Create from a slice of data
67	pub fn from(device: Arc<VulkanDevice>, data: &[T], cmdbuf: VkCommandBuffer, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
68		let mut buffer = Buffer::new(device, data.len() as VkDeviceSize, Some(data.as_ptr() as *const c_void), usage)?;
69		let staging_buffer_data_address = buffer.get_staging_buffer_address()? as *mut T;
70		buffer.upload_staging_buffer(cmdbuf, 0, size_of_val(data) as VkDeviceSize)?;
71		Ok(Self {
72			buffer,
73			staging_buffer_data_address,
74			num_items: data.len(),
75			capacity: data.len(),
76			cache_modified_bitmap: BitVec::with_capacity(data.len()),
77			cache_modified: false,
78			_phantom: PhantomData,
79		})
80	}
81
82	/// Create the `BufferVec<T>` with an initial capacity
83	pub fn with_capacity(device: Arc<VulkanDevice>, capacity: usize, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
84		let mut buffer = Buffer::new(device, capacity as VkDeviceSize, None, usage)?;
85		let staging_buffer_data_address = buffer.get_staging_buffer_address()? as *mut T;
86		Ok(Self {
87			buffer,
88			staging_buffer_data_address,
89			num_items: 0,
90			capacity,
91			cache_modified_bitmap: BitVec::with_capacity(capacity),
92			cache_modified: true,
93			_phantom: PhantomData,
94		})
95	}
96
97	/// Change the capacity
98	/// * If the capacity is less than the current items, the number of items will be reduced to the new capacity.
99	pub fn change_capacity(&mut self, new_capacity: usize) -> Result<(), VulkanError> {
100		let mut new_buffer = Buffer::new(self.buffer.device.clone(), new_capacity as VkDeviceSize, None, self.buffer.get_usage())?;
101		if new_capacity != 0 {
102			let new_address = new_buffer.get_staging_buffer_address()? as *mut T;
103			unsafe {copy(self.staging_buffer_data_address as *const T, new_address, self.capacity)}
104			self.staging_buffer_data_address = new_address;
105			self.cache_modified = true;
106			self.cache_modified_bitmap.resize(new_capacity, false);
107		} else {
108			self.staging_buffer_data_address = null_mut();
109			self.cache_modified = false;
110			self.cache_modified_bitmap.clear();
111			self.cache_modified_bitmap.shrink_to_fit();
112		}
113		self.buffer = new_buffer;
114		self.capacity = new_capacity;
115		self.num_items = min(self.num_items, new_capacity);
116		Ok(())
117	}
118
119	/// Change the length
120	/// Forces the length of the vector to new_len.
121	///
122	/// This is a low-level operation that maintains none of the normal invariants of the type.
123	///
124	/// # Safety
125	///
126	/// `new_len` must be less than or equal to `capacity()`.
127	/// The elements at `old_len..new_len` must be initialized.
128	pub unsafe fn set_len(&mut self, new_len: usize) {
129		if new_len > self.num_items {
130			for i in self.num_items..new_len {
131				self.cache_modified_bitmap.set(i, true);
132			}
133		}
134		self.num_items = new_len;
135	}
136
137	/// Get the inner buffer
138	pub fn into_inner(self) -> Buffer {
139		self.buffer
140	}
141
142	/// Creates a `BufferVec<T>` directly from a buffer, a length, and a capacity.
143	///
144	/// # Safety
145	///
146	/// This is highly unsafe, just like the Rust official `Vec<T>::from_raw_parts()`
147	/// * Unlike the Rust official `Vec<T>::from_raw_parts()`, capacity is not needed to be provided since it was calculated by `buffer.get_size() / size_of::<T>()`
148	/// * `length` must be less than the calculated capacity.
149	pub unsafe fn from_raw_parts(mut buffer: Buffer, length: usize) -> Result<Self, VulkanError> {
150		let capacity = buffer.get_size() as usize / size_of::<T>();
151		buffer.ensure_staging_buffer()?;
152		let staging_buffer_data_address = buffer.get_staging_buffer_address()? as *mut T;
153		Ok(Self {
154			buffer,
155			staging_buffer_data_address,
156			num_items: length,
157			capacity,
158			cache_modified_bitmap: BitVec::with_capacity(capacity),
159			cache_modified: true,
160			_phantom: PhantomData,
161		})
162	}
163
164	/// Enlarge the capacity of the `BufferVec<T>`
165	fn grow(&mut self) -> Result<(), VulkanError> {
166		let mut new_capacity = ((self.capacity * 3) >> 1) + 1;
167		if new_capacity < self.num_items {
168			new_capacity = self.num_items;
169		}
170		self.change_capacity(new_capacity)
171	}
172
173	/// Push data to the buffer
174	pub fn push(&mut self, data: T) -> Result<(), VulkanError> {
175		if self.num_items >= self.capacity {
176			self.grow()?;
177		}
178		unsafe {*self.staging_buffer_data_address.wrapping_add(self.num_items) = data};
179		self.cache_modified = true;
180		self.cache_modified_bitmap.push(true);
181		self.num_items += 1;
182		Ok(())
183	}
184
185	/// Pop data from the buffer
186	pub fn pop(&mut self) -> T {
187		if self.num_items == 0 {
188			panic!("`BufferVec::<T>::pop()` called on an empty `BufferVec<T>`.");
189		}
190		self.num_items -= 1;
191		self.cache_modified_bitmap.pop();
192		unsafe {*self.staging_buffer_data_address.wrapping_add(self.num_items)}
193	}
194
195	/// Removes and returns the element at position index within the vector, shifting all elements after it to the left.
196	///
197	/// Note: Because this shifts over the remaining elements, it has a worst-case performance of O(n). If you don’t need the order of elements to be preserved, use `swap_remove` instead.
198	///
199	/// # Panics
200	/// Panics if `index` is out of bounds.
201	pub fn remove(&mut self, index: usize) -> T {
202		let ret = self[index];
203		let from_index = index + 1;
204		unsafe {copy(
205			self.staging_buffer_data_address.wrapping_add(from_index),
206			self.staging_buffer_data_address.wrapping_add(index),
207			self.num_items - from_index)
208		};
209		self.num_items -= 1;
210		for i in index..self.num_items {
211			self.cache_modified_bitmap.set(i, true);
212		}
213		self.cache_modified_bitmap.pop();
214		ret
215	}
216
217	/// Removes an element from the vector and returns it.
218	///
219	/// The removed element is replaced by the last element of the vector.
220	///
221	/// This does not preserve ordering of the remaining elements, but is O(1). If you need to preserve the element order, use `remove` instead.
222	///
223	/// # Panics
224	/// Panics if `index` is out of bounds.
225	pub fn swap_remove(&mut self, index: usize) -> T {
226		if self.num_items > 1 {
227			let last_index = self.num_items - 1;
228			let last_item = unsafe {&mut *self.staging_buffer_data_address.wrapping_add(self.num_items)};
229			let swap_item = &mut self[index];
230			let ret = *swap_item;
231			if last_index != index {
232				*swap_item = *last_item;
233			}
234			self.num_items -= 1;
235			self.cache_modified_bitmap.pop();
236			ret
237		} else {
238			if index != 0 {
239				panic!("Index {index} out of bounds (len() == {})", self.len());
240			}
241			self.pop()
242		}
243	}
244
245	/// Resize the buffer
246	pub fn resize(&mut self, new_len: usize, new_data: T) -> Result<(), VulkanError> {
247		if self.num_items == new_len && self.capacity >= self.num_items {
248			return Ok(());
249		}
250		self.cache_modified = true;
251		if self.capacity < new_len {
252			self.change_capacity(new_len)?;
253		}
254		if new_len > self.num_items {
255			self.cache_modified = true;
256			unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(self.num_items), new_len - self.num_items)}.fill(new_data);
257			for i in self.num_items..new_len {
258				self.cache_modified_bitmap.set(i, true);
259			}
260		}
261		self.num_items = new_len;
262		self.cache_modified_bitmap.resize(new_len, false);
263		Ok(())
264	}
265
266	/// Clear the buffer
267	pub fn clear(&mut self) {
268		self.num_items = 0;
269	}
270
271	/// Get the capacity
272	pub fn get_capacity(&self) -> usize {
273		self.capacity
274	}
275
276	/// Get num items in the buffer
277	pub fn len(&self) -> usize {
278		self.num_items
279	}
280
281	/// Get is the buffer empty?
282	pub fn is_empty(&self) -> bool {
283		self.num_items == 0
284	}
285
286	/// Shrink to fit
287	pub fn shrink_to_fit(&mut self) -> Result<(), VulkanError> {
288		self.change_capacity(self.num_items)
289	}
290
291	/// Flush the staging buffer to the device memory
292	pub fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
293		if !self.cache_modified {
294			return Ok(());
295		}
296		const MAX_GAP: usize = 16;
297		let mut si = 0;
298		let mut ei = 0;
299		let mut gap = 0;
300		let mut is_in = false;
301		let mut region: Vec<BufferRegion> = Vec::new();
302		for (i, b) in self.cache_modified_bitmap.iter().enumerate() {
303			if *b {
304				if !is_in {
305					is_in = true;
306					si = i;
307					gap = 0;
308				}
309			} else if is_in {
310				ei = i;
311				is_in = false;
312				gap = 1; // This ensures all regions were flushed including the last one.
313			} else {
314				gap += 1;
315				if gap == MAX_GAP {
316					region.push(BufferRegion {
317						offset: (si * size_of::<T>()) as VkDeviceSize,
318						size: ((ei + 1 - si) * size_of::<T>()) as VkDeviceSize,
319					});
320				}
321			}
322		}
323		self.cache_modified_bitmap.fill(false);
324		if is_in || gap != 0 {
325			region.push(BufferRegion {
326				offset: (si * size_of::<T>()) as VkDeviceSize,
327				size: ((ei + 1 - si) * size_of::<T>()) as VkDeviceSize,
328			});
329		}
330		if !region.is_empty() {
331			self.buffer.upload_staging_buffer_multi(cmdbuf, region.as_ref())?;
332		}
333		self.cache_modified = false;
334		Ok(())
335	}
336}
337
338impl<T> Clone for BufferVec<T>
339where
340	T: BufferVecItem {
341	fn clone(&self) -> Self {
342		let mut buffer = self.buffer.clone();
343		let staging_buffer_data_address = buffer.get_staging_buffer_address().unwrap() as *mut T;
344		Self {
345			buffer,
346			staging_buffer_data_address,
347			num_items: self.num_items,
348			capacity: self.capacity,
349			cache_modified_bitmap: self.cache_modified_bitmap.clone(),
350			cache_modified: self.cache_modified,
351			_phantom: self._phantom,
352		}
353	}
354}
355
356impl<T> Debug for BufferVec<T>
357where
358	T: BufferVecItem {
359	fn fmt(&self, f: &mut Formatter) -> fmt::Result {
360		f.debug_struct("BufferVec")
361		.field("buffer", &self.buffer)
362		.field("staging_buffer_data_address", &self.staging_buffer_data_address)
363		.field("num_items", &self.num_items)
364		.field("capacity", &self.capacity)
365		.field("cache_modified_bitmap", &self.cache_modified_bitmap)
366		.field("cache_modified", &self.cache_modified)
367		.finish()
368	}
369}
370
371impl<T> Index<usize> for BufferVec<T>
372where
373	T: BufferVecItem {
374	type Output = T;
375	fn index(&self, index: usize) -> &T {
376		if index >= self.len() {
377			panic!("Index {index:?} out of bounds (len() == {})", self.len());
378		}
379		unsafe {&*self.staging_buffer_data_address.wrapping_add(index)}
380	}
381}
382
383impl<T> IndexMut<usize> for BufferVec<T>
384where
385	T: BufferVecItem {
386	fn index_mut(&mut self, index: usize) -> &mut T {
387		if index >= self.len() {
388			panic!("Index {index:?} out of bounds (len() == {})", self.len());
389		}
390		self.cache_modified = true;
391		self.cache_modified_bitmap.set(index, true);
392		unsafe {&mut *self.staging_buffer_data_address.wrapping_add(index)}
393	}
394}
395
396impl<T> Index<Range<usize>> for BufferVec<T>
397where
398	T: BufferVecItem {
399	type Output = [T];
400	fn index(&self, range: Range<usize>) -> &[T] {
401		if range.start >= self.len() && range.end > self.len() {
402			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
403		}
404		unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(range.start), range.end - range.start)}
405	}
406}
407
408impl<T> IndexMut<Range<usize>> for BufferVec<T>
409where
410	T: BufferVecItem {
411	fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
412		if range.start >= self.len() && range.end > self.len() {
413			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
414		}
415		self.cache_modified = true;
416		for i in range.clone() {
417			self.cache_modified_bitmap.set(i, true);
418		}
419		unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(range.start), range.end - range.start)}
420	}
421}
422
423impl<T> Index<RangeFrom<usize>> for BufferVec<T>
424where
425	T: BufferVecItem {
426	type Output = [T];
427	fn index(&self, range: RangeFrom<usize>) -> &[T] {
428		if range.start >= self.len() {
429			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
430		}
431		unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(range.start), self.len() - range.start)}
432	}
433}
434
435impl<T> IndexMut<RangeFrom<usize>> for BufferVec<T>
436where
437	T: BufferVecItem {
438	fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
439		if range.start >= self.len() {
440			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
441		}
442		self.cache_modified = true;
443		for i in range.start..self.len() {
444			self.cache_modified_bitmap.set(i, true);
445		}
446		unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(range.start), self.len() - range.start)}
447	}
448}
449
450impl<T> Index<RangeTo<usize>> for BufferVec<T>
451where
452	T: BufferVecItem {
453	type Output = [T];
454	fn index(&self, range: RangeTo<usize>) -> &[T] {
455		if range.end > self.len() {
456			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
457		}
458		unsafe {from_raw_parts(self.staging_buffer_data_address, range.end)}
459	}
460}
461
462impl<T> IndexMut<RangeTo<usize>> for BufferVec<T>
463where
464	T: BufferVecItem {
465	fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
466		if range.end > self.len() {
467			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
468		}
469		self.cache_modified = true;
470		for i in 0..range.end {
471			self.cache_modified_bitmap.set(i, true);
472		}
473		unsafe {from_raw_parts_mut(self.staging_buffer_data_address, range.end)}
474	}
475}
476
477impl<T> Index<RangeFull> for BufferVec<T>
478where
479	T: BufferVecItem {
480	type Output = [T];
481	fn index(&self, _: RangeFull) -> &[T] {
482		unsafe {from_raw_parts(self.staging_buffer_data_address, self.len())}
483	}
484}
485
486impl<T> IndexMut<RangeFull> for BufferVec<T>
487where
488	T: BufferVecItem {
489	fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
490		self.cache_modified = true;
491		self.cache_modified_bitmap.fill(true);
492		unsafe {from_raw_parts_mut(self.staging_buffer_data_address, self.len())}
493	}
494}
495
496impl<T> Index<RangeInclusive<usize>> for BufferVec<T>
497where
498	T: BufferVecItem {
499	type Output = [T];
500	fn index(&self, range: RangeInclusive<usize>) -> &[T] {
501		if *range.start() >= self.len() || *range.end() >= self.len() {
502			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
503		}
504		unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(*range.start()), range.end() + 1 - range.start())}
505	}
506}
507
508impl<T> IndexMut<RangeInclusive<usize>> for BufferVec<T>
509where
510	T: BufferVecItem {
511	fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
512		if *range.start() >= self.len() || *range.end() >= self.len() {
513			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
514		}
515		self.cache_modified = true;
516		for i in range.clone() {
517			self.cache_modified_bitmap.set(i, true);
518		}
519		unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(*range.start()), range.end() + 1 - range.start())}
520	}
521}
522
523impl<T> Index<RangeToInclusive<usize>> for BufferVec<T>
524where
525	T: BufferVecItem {
526	type Output = [T];
527	fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
528		if range.end >= self.len() {
529			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
530		}
531		unsafe {from_raw_parts(self.staging_buffer_data_address, range.end + 1)}
532	}
533}
534
535impl<T> IndexMut<RangeToInclusive<usize>> for BufferVec<T>
536where
537	T: BufferVecItem {
538	fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
539		if range.end >= self.len() {
540			panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
541		}
542		self.cache_modified = true;
543		for i in 0..=range.end {
544			self.cache_modified_bitmap.set(i, true);
545		}
546		unsafe {from_raw_parts_mut(self.staging_buffer_data_address, range.end + 1)}
547	}
548}
549
550/// The trait that the struct of uniform must implement
551pub trait TexelBufferDataType: Copy + Clone + Sized + Default + Debug {}
552impl<T> TexelBufferDataType for T where T: Copy + Clone + Sized + Default + Debug {}
553
554pub type TexelBuffer<T> = BufferVec<T>;
555
556/// The trait for the `UniformBuffer` to be able to wrap into an object
557pub trait GenericTexelBuffer: Debug {
558	/// Get the `VkBuffer`
559	fn get_vk_buffer(&self) -> VkBuffer;
560
561	/// Get the size of the buffer
562	fn get_size(&self) -> usize;
563
564	/// Get the address of the staging buffer
565	fn get_staging_buffer_address(&self) -> *mut c_void;
566
567	/// Create a buffer view map
568	fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError>;
569
570	/// Create a buffer view map
571	fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError>;
572
573	/// Upload to GPU
574	fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
575}
576
577impl<T> GenericTexelBuffer for TexelBuffer<T>
578where
579	T: TexelBufferDataType {
580	fn get_vk_buffer(&self) -> VkBuffer {
581		self.buffer.get_vk_buffer()
582	}
583
584	fn get_size(&self) -> usize {
585		self.capacity * size_of::<T>()
586	}
587
588	fn get_staging_buffer_address(&self) -> *mut c_void {
589		self.staging_buffer_data_address as *mut c_void
590	}
591
592	fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError> {
593		self.buffer.create_buffer_view(format)
594	}
595
596	fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError> {
597		self.buffer.create_buffer_view_partial(range)
598	}
599
600	fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
601		self.flush(cmdbuf)
602	}
603}