1
2use crate::prelude::*;
3use bitvec::vec::BitVec;
4use std::{
5 any::Any,
6 cmp::min,
7 ffi::c_void,
8 fmt::{self, Debug, Formatter},
9 marker::PhantomData,
10 mem::{size_of, size_of_val},
11 ops::{Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
12 ptr::{copy, null_mut},
13 slice::{from_raw_parts, from_raw_parts_mut},
14 sync::Arc,
15};
16
17pub trait BufferVecItem: Clone + Copy + Sized + Default + Send + Sync + Debug + Any {}
19impl<T> BufferVecItem for T where T: Clone + Copy + Sized + Default + Send + Sync + Debug + Any {}
20
21pub struct BufferVec<T: BufferVecItem> {
23 buffer: Buffer,
25
26 staging_buffer_data_address: *mut T,
28
29 num_items: usize,
31
32 capacity: usize,
34
35 cache_modified_bitmap: BitVec,
37
38 cache_modified: bool,
40
41 _phantom: PhantomData<T>,
43}
44
45impl<T> BufferVec<T>
46where
47 T: BufferVecItem {
48 pub fn new(device: Arc<VulkanDevice>, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
50 let buffer = Buffer::new(device, 0, None, usage)?;
51 Ok(Self {
52 buffer,
53 staging_buffer_data_address: null_mut(),
54 num_items: 0,
55 capacity: 0,
56 cache_modified_bitmap: BitVec::new(),
57 cache_modified: false,
58 _phantom: PhantomData,
59 })
60 }
61
62 pub(crate) fn get_vk_buffer(&self) -> VkBuffer {
64 self.buffer.get_vk_buffer()
65 }
66
67 pub fn get_device(&self) -> Arc<VulkanDevice> {
69 self.buffer.device.clone()
70 }
71
72 pub fn from(device: Arc<VulkanDevice>, data: &[T], cmdbuf: VkCommandBuffer, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
74 let buffer = Buffer::new(device, size_of_val(data) as VkDeviceSize, Some(data.as_ptr() as *const c_void), usage)?;
75 let staging_buffer_data_address = buffer.get_staging_buffer_address()? as *mut T;
76 buffer.upload_staging_buffer(cmdbuf, 0, size_of_val(data) as VkDeviceSize)?;
77 Ok(Self {
78 buffer,
79 staging_buffer_data_address,
80 num_items: data.len(),
81 capacity: data.len(),
82 cache_modified_bitmap: {let mut bv = BitVec::with_capacity(data.len()); bv.resize(data.len(), false); bv},
83 cache_modified: false,
84 _phantom: PhantomData,
85 })
86 }
87
88 pub fn with_capacity(device: Arc<VulkanDevice>, capacity: usize, usage: VkBufferUsageFlags) -> Result<Self, VulkanError> {
90 let buffer = Buffer::new(device, capacity as VkDeviceSize, None, usage)?;
91 let staging_buffer_data_address = buffer.get_staging_buffer_address()? as *mut T;
92 Ok(Self {
93 buffer,
94 staging_buffer_data_address,
95 num_items: 0,
96 capacity,
97 cache_modified_bitmap: BitVec::with_capacity(capacity),
98 cache_modified: true,
99 _phantom: PhantomData,
100 })
101 }
102
103 pub fn change_capacity(&mut self, new_capacity: usize) -> Result<(), VulkanError> {
106 let new_buffer = Buffer::new(self.buffer.device.clone(), new_capacity as VkDeviceSize, None, self.buffer.get_usage())?;
107 if new_capacity != 0 {
108 let new_address = new_buffer.get_staging_buffer_address()? as *mut T;
109 unsafe {copy(self.staging_buffer_data_address as *const T, new_address, self.capacity)}
110 self.staging_buffer_data_address = new_address;
111 self.cache_modified = true;
112 self.cache_modified_bitmap.resize(new_capacity, false);
113 } else {
114 self.staging_buffer_data_address = null_mut();
115 self.cache_modified = false;
116 self.cache_modified_bitmap.clear();
117 self.cache_modified_bitmap.shrink_to_fit();
118 }
119 self.buffer = new_buffer;
120 self.capacity = new_capacity;
121 self.num_items = min(self.num_items, new_capacity);
122 Ok(())
123 }
124
125 pub unsafe fn set_data_modified(&mut self, first_index: usize, length: usize, flag: bool) {
138 let last_index = first_index + length - 1;
139 if first_index >= self.num_items {
140 panic!("The `first_index` is {first_index} but the length of the `BufferVec` is {}", self.num_items);
141 }
142 if last_index >= self.num_items {
143 panic!("The last index is {last_index}, which is exceeded the length {}", self.num_items);
144 }
145 self.cache_modified |= flag;
146 for i in first_index..=last_index {
147 self.cache_modified_bitmap.set(i, flag);
148 }
149 }
150
151 pub unsafe fn set_len(&mut self, new_len: usize) {
161 if new_len > self.num_items {
162 for i in self.num_items..new_len {
163 self.cache_modified_bitmap.set(i, true);
164 }
165 }
166 self.num_items = new_len;
167 }
168
169 pub fn into_inner(self) -> Buffer {
171 self.buffer
172 }
173
174 pub unsafe fn from_raw_parts(buffer: Buffer, length: usize) -> Result<Self, VulkanError> {
182 let capacity = buffer.get_size() as usize / size_of::<T>();
183 let lock = buffer.ensure_staging_buffer()?;
184 let staging_buffer_data_address = lock.as_ref().unwrap().get_address() as *mut T;
185 drop(lock);
186 Ok(Self {
187 buffer,
188 staging_buffer_data_address,
189 num_items: length,
190 capacity,
191 cache_modified_bitmap: BitVec::with_capacity(capacity),
192 cache_modified: true,
193 _phantom: PhantomData,
194 })
195 }
196
197 fn grow(&mut self) -> Result<(), VulkanError> {
199 let mut new_capacity = ((self.capacity * 3) >> 1) + 1;
200 if new_capacity < self.num_items {
201 new_capacity = self.num_items;
202 }
203 self.change_capacity(new_capacity)
204 }
205
206 pub fn push(&mut self, data: T) -> Result<(), VulkanError> {
208 if self.num_items >= self.capacity {
209 self.grow()?;
210 }
211 unsafe {*self.staging_buffer_data_address.wrapping_add(self.num_items) = data};
212 self.cache_modified = true;
213 self.cache_modified_bitmap.push(true);
214 self.num_items += 1;
215 Ok(())
216 }
217
218 pub fn pop(&mut self) -> T {
220 if self.num_items == 0 {
221 panic!("`BufferVec::<T>::pop()` called on an empty `BufferVec<T>`.");
222 }
223 self.num_items -= 1;
224 self.cache_modified_bitmap.pop();
225 unsafe {*self.staging_buffer_data_address.wrapping_add(self.num_items)}
226 }
227
228 pub fn remove(&mut self, index: usize) -> T {
235 let ret = self[index];
236 let from_index = index + 1;
237 unsafe {copy(
238 self.staging_buffer_data_address.wrapping_add(from_index),
239 self.staging_buffer_data_address.wrapping_add(index),
240 self.num_items - from_index)
241 };
242 self.num_items -= 1;
243 for i in index..self.num_items {
244 self.cache_modified_bitmap.set(i, true);
245 }
246 self.cache_modified_bitmap.pop();
247 ret
248 }
249
250 pub fn swap_remove(&mut self, index: usize) -> T {
259 if self.num_items > 1 {
260 let last_index = self.num_items - 1;
261 let last_item = unsafe {&mut *self.staging_buffer_data_address.wrapping_add(self.num_items)};
262 let swap_item = &mut self[index];
263 let ret = *swap_item;
264 if last_index != index {
265 *swap_item = *last_item;
266 }
267 self.num_items -= 1;
268 self.cache_modified_bitmap.pop();
269 ret
270 } else {
271 if index != 0 {
272 panic!("Index {index} out of bounds (len() == {})", self.len());
273 }
274 self.pop()
275 }
276 }
277
278 pub fn resize(&mut self, new_len: usize, new_data: T) -> Result<(), VulkanError> {
280 if self.num_items == new_len && self.capacity >= self.num_items {
281 return Ok(());
282 }
283 self.cache_modified = true;
284 if self.capacity < new_len {
285 self.change_capacity(new_len)?;
286 }
287 if new_len > self.num_items {
288 self.cache_modified = true;
289 unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(self.num_items), new_len - self.num_items)}.fill(new_data);
290 for i in self.num_items..new_len {
291 self.cache_modified_bitmap.set(i, true);
292 }
293 }
294 self.num_items = new_len;
295 self.cache_modified_bitmap.resize(new_len, false);
296 Ok(())
297 }
298
299 pub fn clear(&mut self) {
301 self.num_items = 0;
302 }
303
304 pub fn get_capacity(&self) -> usize {
306 self.capacity
307 }
308
309 pub fn len(&self) -> usize {
311 self.num_items
312 }
313
314 pub fn is_empty(&self) -> bool {
316 self.num_items == 0
317 }
318
319 pub fn shrink_to_fit(&mut self) -> Result<(), VulkanError> {
321 self.change_capacity(self.num_items)
322 }
323
324 pub fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
326 if !self.cache_modified {
327 return Ok(());
328 }
329 const MAX_GAP: usize = 16;
330 let mut si = 0;
331 let mut ei = 0;
332 let mut gap = 0;
333 let mut is_in = false;
334 let mut region: Vec<BufferRegion> = Vec::new();
335 for (i, b) in self.cache_modified_bitmap.iter().enumerate() {
336 if *b {
337 if !is_in {
338 is_in = true;
339 si = i;
340 gap = 0;
341 }
342 } else if is_in {
343 ei = i;
344 is_in = false;
345 gap = 1; } else {
347 gap += 1;
348 if gap == MAX_GAP {
349 region.push(BufferRegion {
350 offset: (si * size_of::<T>()) as VkDeviceSize,
351 size: ((ei + 1 - si) * size_of::<T>()) as VkDeviceSize,
352 });
353 }
354 }
355 }
356 self.cache_modified_bitmap.fill(false);
357 if is_in || gap != 0 {
358 region.push(BufferRegion {
359 offset: (si * size_of::<T>()) as VkDeviceSize,
360 size: ((self.cache_modified_bitmap.len() - si) * size_of::<T>()) as VkDeviceSize,
361 });
362 }
363 if !region.is_empty() {
364 self.buffer.upload_staging_buffer_multi(cmdbuf, region.as_ref())?;
365 }
366 self.cache_modified = false;
367 Ok(())
368 }
369}
370
371impl<T> Clone for BufferVec<T>
372where
373 T: BufferVecItem {
374 fn clone(&self) -> Self {
375 let buffer = self.buffer.clone();
376 let staging_buffer_data_address = buffer.get_staging_buffer_address().unwrap() as *mut T;
377 Self {
378 buffer,
379 staging_buffer_data_address,
380 num_items: self.num_items,
381 capacity: self.capacity,
382 cache_modified_bitmap: self.cache_modified_bitmap.clone(),
383 cache_modified: self.cache_modified,
384 _phantom: self._phantom,
385 }
386 }
387}
388
389impl<T> Debug for BufferVec<T>
390where
391 T: BufferVecItem {
392 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
393 f.debug_struct("BufferVec")
394 .field("buffer", &self.buffer)
395 .field("staging_buffer_data_address", &self.staging_buffer_data_address)
396 .field("num_items", &self.num_items)
397 .field("capacity", &self.capacity)
398 .field("cache_modified_bitmap", &self.cache_modified_bitmap)
399 .field("cache_modified", &self.cache_modified)
400 .finish()
401 }
402}
403
404impl<T> Index<usize> for BufferVec<T>
405where
406 T: BufferVecItem {
407 type Output = T;
408 fn index(&self, index: usize) -> &T {
409 if index >= self.len() {
410 panic!("Index {index:?} out of bounds (len() == {})", self.len());
411 }
412 unsafe {&*self.staging_buffer_data_address.wrapping_add(index)}
413 }
414}
415
416impl<T> IndexMut<usize> for BufferVec<T>
417where
418 T: BufferVecItem {
419 fn index_mut(&mut self, index: usize) -> &mut T {
420 if index >= self.len() {
421 panic!("Index {index:?} out of bounds (len() == {})", self.len());
422 }
423 self.cache_modified = true;
424 self.cache_modified_bitmap.set(index, true);
425 unsafe {&mut *self.staging_buffer_data_address.wrapping_add(index)}
426 }
427}
428
429impl<T> Index<Range<usize>> for BufferVec<T>
430where
431 T: BufferVecItem {
432 type Output = [T];
433 fn index(&self, range: Range<usize>) -> &[T] {
434 if range.start >= self.len() && range.end > self.len() {
435 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
436 }
437 unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(range.start), range.end - range.start)}
438 }
439}
440
441impl<T> IndexMut<Range<usize>> for BufferVec<T>
442where
443 T: BufferVecItem {
444 fn index_mut(&mut self, range: Range<usize>) -> &mut [T] {
445 if range.start >= self.len() && range.end > self.len() {
446 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
447 }
448 self.cache_modified = true;
449 for i in range.clone() {
450 self.cache_modified_bitmap.set(i, true);
451 }
452 unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(range.start), range.end - range.start)}
453 }
454}
455
456impl<T> Index<RangeFrom<usize>> for BufferVec<T>
457where
458 T: BufferVecItem {
459 type Output = [T];
460 fn index(&self, range: RangeFrom<usize>) -> &[T] {
461 if range.start >= self.len() {
462 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
463 }
464 unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(range.start), self.len() - range.start)}
465 }
466}
467
468impl<T> IndexMut<RangeFrom<usize>> for BufferVec<T>
469where
470 T: BufferVecItem {
471 fn index_mut(&mut self, range: RangeFrom<usize>) -> &mut [T] {
472 if range.start >= self.len() {
473 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
474 }
475 self.cache_modified = true;
476 for i in range.start..self.len() {
477 self.cache_modified_bitmap.set(i, true);
478 }
479 unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(range.start), self.len() - range.start)}
480 }
481}
482
483impl<T> Index<RangeTo<usize>> for BufferVec<T>
484where
485 T: BufferVecItem {
486 type Output = [T];
487 fn index(&self, range: RangeTo<usize>) -> &[T] {
488 if range.end > self.len() {
489 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
490 }
491 unsafe {from_raw_parts(self.staging_buffer_data_address, range.end)}
492 }
493}
494
495impl<T> IndexMut<RangeTo<usize>> for BufferVec<T>
496where
497 T: BufferVecItem {
498 fn index_mut(&mut self, range: RangeTo<usize>) -> &mut [T] {
499 if range.end > self.len() {
500 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
501 }
502 self.cache_modified = true;
503 for i in 0..range.end {
504 self.cache_modified_bitmap.set(i, true);
505 }
506 unsafe {from_raw_parts_mut(self.staging_buffer_data_address, range.end)}
507 }
508}
509
510impl<T> Index<RangeFull> for BufferVec<T>
511where
512 T: BufferVecItem {
513 type Output = [T];
514 fn index(&self, _: RangeFull) -> &[T] {
515 unsafe {from_raw_parts(self.staging_buffer_data_address, self.len())}
516 }
517}
518
519impl<T> IndexMut<RangeFull> for BufferVec<T>
520where
521 T: BufferVecItem {
522 fn index_mut(&mut self, _: RangeFull) -> &mut [T] {
523 self.cache_modified = true;
524 self.cache_modified_bitmap.fill(true);
525 unsafe {from_raw_parts_mut(self.staging_buffer_data_address, self.len())}
526 }
527}
528
529impl<T> Index<RangeInclusive<usize>> for BufferVec<T>
530where
531 T: BufferVecItem {
532 type Output = [T];
533 fn index(&self, range: RangeInclusive<usize>) -> &[T] {
534 if *range.start() >= self.len() || *range.end() >= self.len() {
535 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
536 }
537 unsafe {from_raw_parts(self.staging_buffer_data_address.wrapping_add(*range.start()), range.end() + 1 - range.start())}
538 }
539}
540
541impl<T> IndexMut<RangeInclusive<usize>> for BufferVec<T>
542where
543 T: BufferVecItem {
544 fn index_mut(&mut self, range: RangeInclusive<usize>) -> &mut [T] {
545 if *range.start() >= self.len() || *range.end() >= self.len() {
546 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
547 }
548 self.cache_modified = true;
549 for i in range.clone() {
550 self.cache_modified_bitmap.set(i, true);
551 }
552 unsafe {from_raw_parts_mut(self.staging_buffer_data_address.wrapping_add(*range.start()), range.end() + 1 - range.start())}
553 }
554}
555
556impl<T> Index<RangeToInclusive<usize>> for BufferVec<T>
557where
558 T: BufferVecItem {
559 type Output = [T];
560 fn index(&self, range: RangeToInclusive<usize>) -> &[T] {
561 if range.end >= self.len() {
562 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
563 }
564 unsafe {from_raw_parts(self.staging_buffer_data_address, range.end + 1)}
565 }
566}
567
568impl<T> IndexMut<RangeToInclusive<usize>> for BufferVec<T>
569where
570 T: BufferVecItem {
571 fn index_mut(&mut self, range: RangeToInclusive<usize>) -> &mut [T] {
572 if range.end >= self.len() {
573 panic!("Slice range {range:?} out of bounds (len() == {})", self.len());
574 }
575 self.cache_modified = true;
576 for i in 0..=range.end {
577 self.cache_modified_bitmap.set(i, true);
578 }
579 unsafe {from_raw_parts_mut(self.staging_buffer_data_address, range.end + 1)}
580 }
581}
582
583unsafe impl<T> Send for BufferVec<T> where T: BufferVecItem {}
584unsafe impl<T> Sync for BufferVec<T> where T: BufferVecItem {}
585
586pub trait TexelBufferDataType: Copy + Clone + Sized + Default + Send + Sync + Debug + Any {}
588impl<T> TexelBufferDataType for T where T: Copy + Clone + Sized + Default + Send + Sync + Debug + Any {}
589
590pub type TexelBuffer<T> = BufferVec<T>;
591
592pub trait GenericTexelBuffer: Debug {
594 fn get_vk_buffer(&self) -> VkBuffer;
596
597 fn get_size(&self) -> usize;
599
600 fn get_staging_buffer_address(&self) -> *mut c_void;
602
603 fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError>;
605
606 fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError>;
608
609 fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError>;
611}
612
613impl<T> GenericTexelBuffer for TexelBuffer<T>
614where
615 T: TexelBufferDataType {
616 fn get_vk_buffer(&self) -> VkBuffer {
617 self.buffer.get_vk_buffer()
618 }
619
620 fn get_size(&self) -> usize {
621 self.capacity * size_of::<T>()
622 }
623
624 fn get_staging_buffer_address(&self) -> *mut c_void {
625 self.staging_buffer_data_address as *mut c_void
626 }
627
628 fn create_buffer_view(&self, format: VkFormat) -> Result<VulkanBufferView, VulkanError> {
629 self.buffer.create_buffer_view(format)
630 }
631
632 fn create_buffer_view_partial(&self, range: &BufferViewRange) -> Result<VulkanBufferView, VulkanError> {
633 self.buffer.create_buffer_view_partial(range)
634 }
635
636 fn flush(&mut self, cmdbuf: VkCommandBuffer) -> Result<(), VulkanError> {
637 self.flush(cmdbuf)
638 }
639}