1
2use crate::prelude::*;
3use bitvec::vec::BitVec;
4use std::{
5 fmt::Debug,
6 marker::PhantomData,
7 mem::{size_of, size_of_val},
8 ops::{Deref, DerefMut, Index, IndexMut, Range, RangeFrom, RangeTo, RangeFull, RangeInclusive, RangeToInclusive},
9 rc::Rc,
10};
11
12pub trait BufferVecItem: Copy + Sized + Default + Debug {}
14impl<T> BufferVecItem for T where T: Copy + Sized + Default + Debug {}
15
16pub trait BufferVec<T: BufferVecItem>: Debug + Clone + From<Buffer> {
18 fn get_buffer(&self) -> &Buffer;
20
21 fn len(&self) -> usize;
23
24 fn capacity(&self) -> usize;
26
27 fn resize(&mut self, new_len: usize, value: T);
29
30 fn shrink_to_fit(&mut self);
32
33 fn get(&self, index: usize) -> T;
35
36 fn set(&mut self, index: usize, data: &T);
38
39 fn get_slice_of_data(&self, start_index: usize, len: usize) -> Vec<T>;
41
42 fn set_slice_of_data(&mut self, start_index: usize, data: &[T]);
44
45 fn get_mut(&mut self, index: usize) -> BufferVecItemRefMut<Self, T> {
47 BufferVecItemRefMut::new(self, index)
48 }
49
50 fn get_mut_slice(&mut self, range: Range<usize>) -> BufferVecSliceRefMut<Self, T> {
52 BufferVecSliceRefMut::new_range(self, range)
53 }
54
55 fn get_mut_slice_range_from(&mut self, range: RangeFrom<usize>) -> BufferVecSliceRefMut<Self, T> {
57 BufferVecSliceRefMut::new_range_from(self, range)
58 }
59
60 fn get_mut_slice_range_to(&mut self, range: RangeTo<usize>) -> BufferVecSliceRefMut<Self, T> {
62 BufferVecSliceRefMut::new_range_to(self, range)
63 }
64
65 fn get_mut_slice_range_full(&mut self, range: RangeFull) -> BufferVecSliceRefMut<Self, T> {
67 BufferVecSliceRefMut::new_range_full(self, range)
68 }
69
70 fn get_mut_slice_range_inclusive(&mut self, range: RangeInclusive<usize>) -> BufferVecSliceRefMut<Self, T> {
72 BufferVecSliceRefMut::new_range_inclusive(self, range)
73 }
74
75 fn get_mut_slice_range_to_inclusive(&mut self, range: RangeToInclusive<usize>) -> BufferVecSliceRefMut<Self, T> {
77 BufferVecSliceRefMut::new_range_to_inclusive(self, range)
78 }
79
80 fn flush(&mut self) {}
82
83 fn is_empty(&self) -> bool {
85 self.len() == 0
86 }
87
88 fn set_target(&mut self, target: BufferTarget);
90
91 fn bind<'a>(&'a self) -> BufferBind<'a> {
93 self.get_buffer().bind()
94 }
95
96 fn bind_to<'a>(&'a self, target: BufferTarget) -> BufferBind<'a> {
98 self.get_buffer().bind_to(target)
99 }
100}
101
102#[derive(Debug)]
104pub struct BufferVecItemRefMut<'a, B, T>
105where
106 B: BufferVec<T>,
107 T: BufferVecItem {
108 item: T,
109 index: usize,
110 buffer: &'a mut B
111}
112
113#[derive(Debug)]
115pub struct BufferVecSliceRefMut<'a, B, T>
116where
117 B: BufferVec<T>,
118 T: BufferVecItem {
119 slice: Vec<T>,
120 start_index: usize,
121 buffer: &'a mut B
122}
123
124impl<'a, B, T> BufferVecItemRefMut<'a, B, T>
125where
126 B: BufferVec<T>,
127 T: BufferVecItem {
128 fn new(buffer: &'a mut B, index: usize) -> Self {
129 let item = buffer.get(index);
130 Self {
131 item,
132 index,
133 buffer,
134 }
135 }
136}
137
138impl<'a, B, T> Deref for BufferVecItemRefMut<'a, B, T>
139where
140 B: BufferVec<T>,
141 T: BufferVecItem {
142 type Target = T;
143 fn deref(&self) -> &T {
144 &self.item
145 }
146}
147
148impl<'a, B, T> DerefMut for BufferVecItemRefMut<'a, B, T>
149where
150 B: BufferVec<T>,
151 T: BufferVecItem {
152 fn deref_mut(&mut self) -> &mut T {
153 &mut self.item
154 }
155}
156
157impl<'a, B, T> Drop for BufferVecItemRefMut<'a, B, T>
158where
159 B: BufferVec<T>,
160 T: BufferVecItem {
161 fn drop(&mut self) {
162 self.buffer.set(self.index, &self.item)
163 }
164}
165
166impl<'a, B, T> BufferVecSliceRefMut<'a, B, T>
167where
168 B: BufferVec<T>,
169 T: BufferVecItem {
170 fn new_range(buffer: &'a mut B, range: Range<usize>) -> Self {
171 let slice = buffer.get_slice_of_data(range.start, range.end - range.start);
172 Self {
173 slice,
174 start_index: range.start,
175 buffer,
176 }
177 }
178
179 fn new_range_from(buffer: &'a mut B, range: RangeFrom<usize>) -> Self {
180 let slice = buffer.get_slice_of_data(range.start, buffer.len() - range.start);
181 Self {
182 slice,
183 start_index: range.start,
184 buffer,
185 }
186 }
187
188 fn new_range_to(buffer: &'a mut B, range: RangeTo<usize>) -> Self {
189 let slice = buffer.get_slice_of_data(0, range.end);
190 Self {
191 slice,
192 start_index: 0,
193 buffer,
194 }
195 }
196
197 fn new_range_full(buffer: &'a mut B, _: RangeFull) -> Self {
198 let slice = buffer.get_slice_of_data(0, buffer.len());
199 Self {
200 slice,
201 start_index: 0,
202 buffer,
203 }
204 }
205
206 fn new_range_inclusive(buffer: &'a mut B, range: RangeInclusive<usize>) -> Self {
207 let slice = buffer.get_slice_of_data(*range.start(), *range.end() + 1 - *range.start());
208 Self {
209 slice,
210 start_index: *range.start(),
211 buffer,
212 }
213 }
214
215 fn new_range_to_inclusive(buffer: &'a mut B, range: RangeToInclusive<usize>) -> Self {
216 let slice = buffer.get_slice_of_data(0, range.end + 1);
217 Self {
218 slice,
219 start_index: 0,
220 buffer,
221 }
222 }
223}
224
225impl<'a, B, T> Deref for BufferVecSliceRefMut<'a, B, T>
226where
227 B: BufferVec<T>,
228 T: BufferVecItem {
229 type Target = [T];
230 fn deref(&self) -> &[T] {
231 &self.slice
232 }
233}
234
235impl<'a, B, T> DerefMut for BufferVecSliceRefMut<'a, B, T>
236where
237 B: BufferVec<T>,
238 T: BufferVecItem {
239 fn deref_mut(&mut self) -> &mut [T] {
240 &mut self.slice
241 }
242}
243
244impl<'a, B, T> Drop for BufferVecSliceRefMut<'a, B, T>
245where
246 B: BufferVec<T>,
247 T: BufferVecItem {
248 fn drop(&mut self) {
249 self.buffer.set_slice_of_data(self.start_index, &self.slice[..])
250 }
251}
252
253#[derive(Debug, Clone)]
255pub struct BufferVecStatic<T: BufferVecItem> {
256 pub glcore: Rc<GLCore>,
257 buffer: Buffer,
258 num_items: usize,
259 capacity: usize,
260 _item_type: PhantomData<T>,
261}
262
263impl<T: BufferVecItem> BufferVecStatic<T> {
265 pub fn get_name(&self) -> u32 {
267 self.buffer.get_name()
268 }
269
270 pub fn new(glcore: Rc<GLCore>, buffer: Buffer) -> Self {
272 let capacity = buffer.size() / size_of::<T>();
273 Self {
274 glcore,
275 buffer,
276 num_items: 0,
277 capacity,
278 _item_type: PhantomData,
279 }
280 }
281}
282
283impl<T: BufferVecItem> BufferVec<T> for BufferVecStatic<T> {
284 fn get_buffer(&self) -> &Buffer {
285 &self.buffer
286 }
287
288 fn capacity(&self) -> usize {
289 self.capacity
290 }
291
292 fn len(&self) -> usize {
293 self.num_items
294 }
295
296 fn resize(&mut self, new_len: usize, value: T) {
297 let new_size = new_len * size_of::<T>();
298 if new_size > self.capacity {
299 self.buffer.resize(new_len * size_of::<T>(), value);
300 }
301 self.num_items = new_len;
302 }
303
304 fn shrink_to_fit(&mut self) {
305 self.capacity = self.num_items;
306 self.buffer.resize(self.capacity * size_of::<T>(), T::default());
307 }
308
309 fn get(&self, index: usize) -> T {
310 let offset = index * size_of::<T>();
311 let bind = self.buffer.bind();
312 let (map, addr) = bind.map_ranged(offset, size_of::<T>(), MapAccess::WriteOnly);
313 let addr = addr as *mut T;
314 let ret = unsafe { *addr };
315 map.unmap();
316 ret
317 }
318
319 fn set(&mut self, index: usize, data: &T) {
320 let offset = index * size_of::<T>();
321 let bind = self.buffer.bind();
322 let (map, addr) = bind.map_ranged(offset, size_of::<T>(), MapAccess::WriteOnly);
323 let addr = addr as *mut T;
324 unsafe {
325 *addr = *data;
326 }
327 map.unmap();
328 }
329
330 fn get_slice_of_data(&self, start_index: usize, len: usize) -> Vec<T> {
331 let offset = start_index * size_of::<T>();
332 let end_index = start_index + len;
333 let bind = self.buffer.bind();
334 let (map, addr) = bind.map_ranged(offset, len * size_of::<T>(), MapAccess::WriteOnly);
335 let addr = addr as *mut T;
336 let mut ret: Vec<T> = Vec::with_capacity(len);
337 for i in start_index..end_index {
338 ret.push(unsafe {*addr.wrapping_add(i)});
339 }
340 map.unmap();
341 ret
342 }
343
344 fn set_slice_of_data(&mut self, index: usize, data: &[T]) {
345 let offset = index * size_of::<T>();
346 let bind = self.buffer.bind();
347 let (map, addr) = bind.map_ranged(offset, size_of_val(data), MapAccess::WriteOnly);
348 let addr = addr as *mut T;
349 for (i, item) in data.iter().enumerate() {
350 unsafe { *addr.wrapping_add(i) = *item; };
351 }
352 map.unmap();
353 }
354
355 fn set_target(&mut self, target: BufferTarget) {
356 self.buffer.set_target(target)
357 }
358}
359
360impl<T: BufferVecItem> From<BufferVecStatic<T>> for Buffer {
361 fn from(val: BufferVecStatic<T>) -> Self {
362 val.buffer
363 }
364}
365
366impl<T: BufferVecItem> From<Buffer> for BufferVecStatic<T> {
367 fn from(val: Buffer) -> Self {
368 let capacity = val.size() / size_of::<T>();
369 BufferVecStatic {
370 glcore: val.glcore.clone(),
371 buffer: val,
372 num_items: 0,
373 capacity,
374 _item_type: PhantomData,
375 }
376 }
377}
378
379#[derive(Debug, Clone)]
381pub struct BufferVecDynamic<T: BufferVecItem> {
382 pub glcore: Rc<GLCore>,
383 buffer: BufferVecStatic<T>,
384 num_items: usize,
385 capacity: usize,
386 cache: Vec<T>,
387 cache_modified_bitmap: BitVec,
388 cache_modified: bool,
389}
390
391impl<T: BufferVecItem> BufferVecDynamic<T> {
392 pub fn get_name(&self) -> u32 {
394 self.buffer.get_name()
395 }
396
397 pub fn new(buffer: BufferVecStatic<T>) -> Self {
399 let capacity = buffer.capacity();
400 let mut cache_modified_bitmap = BitVec::new();
401 let cache = buffer.get_slice_of_data(0, capacity);
402 cache_modified_bitmap.resize(capacity, false);
403 let num_items = buffer.len();
404 Self {
405 glcore: buffer.glcore.clone(),
406 buffer,
407 cache,
408 cache_modified_bitmap,
409 cache_modified: false,
410 num_items,
411 capacity
412 }
413 }
414}
415
416impl<T: BufferVecItem> BufferVec<T> for BufferVecDynamic<T> {
417 fn get_buffer(&self) -> &Buffer {
418 self.buffer.get_buffer()
419 }
420
421 fn len(&self) -> usize {
422 self.num_items
423 }
424
425 fn capacity(&self) -> usize {
426 self.capacity
427 }
428
429 fn resize(&mut self, new_len: usize, value: T) {
430 self.cache.resize(new_len, value);
431 self.num_items = new_len;
432 if new_len > self.capacity {
433 self.cache_modified_bitmap.clear(); self.cache_modified_bitmap.resize(new_len, false);
435 self.buffer.resize(new_len, value);
436 self.capacity = new_len;
437 self.cache_modified = false;
438 } else {
439 self.cache_modified_bitmap.resize(new_len, false);
440 }
441 }
442
443 fn shrink_to_fit(&mut self) {
444 if self.capacity > self.num_items {
445 self.cache.shrink_to_fit();
446 self.cache_modified_bitmap.clear(); self.cache_modified_bitmap.resize(self.num_items, false);
448 self.buffer.resize(self.num_items, T::default());
449 self.capacity = self.num_items;
450 self.cache_modified = false;
451 }
452 }
453
454 fn get(&self, index: usize) -> T {
455 self.cache[index]
456 }
457
458 fn set(&mut self, index: usize, data: &T) {
459 self.cache[index] = *data;
460 self.cache_modified = true;
461 self.cache_modified_bitmap.set(index, true);
462 }
463
464 fn get_slice_of_data(&self, start_index: usize, len: usize) -> Vec<T> {
465 let end_index = start_index + len;
466 self.cache[start_index..end_index].to_vec()
467 }
468
469 fn set_slice_of_data(&mut self, start_index: usize, data: &[T]) {
470 let end_index = start_index + data.len();
471 self.cache_modified = true;
472 for i in start_index..end_index {
473 self.cache[i] = data[i - start_index];
474 self.cache_modified_bitmap.set(i, true);
475 }
476 }
477
478 fn set_target(&mut self, target: BufferTarget) {
479 self.buffer.set_target(target)
480 }
481
482 fn flush(&mut self) {
483 if !self.cache_modified {
484 return;
485 }
486
487 const MAXIMUM_GAP: usize = 16;
488
489 let mut is_in: bool = false;
490 let mut start_index: usize = 0;
491 let mut end_index: usize = 0;
492 let mut gap_length: usize = 0;
493 for i in 0..self.num_items {
494 if self.cache_modified_bitmap[i] {
495 if !is_in {
496 is_in = true;
497 start_index = i;
498 }
499 gap_length = 0;
500 end_index = i;
501 self.cache_modified_bitmap.set(i, false);
502 } else if is_in {
503 if gap_length < MAXIMUM_GAP {
504 gap_length += 1;
505 } else {
506 self.buffer.set_slice_of_data(0, &self.cache[start_index..=end_index]);
507 is_in = false;
508 }
509 }
510 }
511 if is_in {
512 self.buffer.set_slice_of_data(0, &self.cache[start_index..=end_index]);
513 }
514
515 self.cache_modified = false;
516 }
517}
518
519impl<T: BufferVecItem> From<BufferVecStatic<T>> for BufferVecDynamic<T> {
520 fn from(val: BufferVecStatic<T>) -> Self {
521 BufferVecDynamic::new(val)
522 }
523}
524
525impl<T: BufferVecItem> From<BufferVecDynamic<T>> for BufferVecStatic<T> {
526 fn from(mut val: BufferVecDynamic<T>) -> Self {
527 val.flush();
528 val.buffer
529 }
530}
531
532impl<T: BufferVecItem> From<BufferVecDynamic<T>> for Buffer {
533 fn from(val: BufferVecDynamic<T>) -> Self {
534 val.buffer.into()
535 }
536}
537
538impl<T: BufferVecItem> From<Buffer> for BufferVecDynamic<T> {
539 fn from(val: Buffer) -> Self {
540 let ab: BufferVecStatic<T> = val.into();
541 ab.into()
542 }
543}
544
545impl<T: BufferVecItem> Index<usize> for BufferVecDynamic<T> {
546 type Output = T;
547 fn index(&self, i: usize) -> &T {
548 &self.cache[i]
549 }
550}
551
552impl<T: BufferVecItem> IndexMut<usize> for BufferVecDynamic<T> {
553 fn index_mut(&mut self, i: usize) -> &mut T {
554 self.cache_modified = true;
555 self.cache_modified_bitmap.set(i, true);
556 &mut self.cache[i]
557 }
558}
559
560impl<T: BufferVecItem> Index<Range<usize>> for BufferVecDynamic<T> {
561 type Output = [T];
562 fn index(&self, r: Range<usize>) -> &[T] {
563 &self.cache[r]
564 }
565}
566
567impl<T: BufferVecItem> IndexMut<Range<usize>> for BufferVecDynamic<T> {
568 fn index_mut(&mut self, r: Range<usize>) -> &mut [T] {
569 self.cache_modified = true;
570 for i in r.start..r.end {
571 self.cache_modified_bitmap.set(i, true);
572 }
573 &mut self.cache[r]
574 }
575}
576
577impl<T: BufferVecItem> Index<RangeFrom<usize>> for BufferVecDynamic<T> {
578 type Output = [T];
579 fn index(&self, r: RangeFrom<usize>) -> &[T] {
580 &self.cache[r]
581 }
582}
583
584impl<T: BufferVecItem> IndexMut<RangeFrom<usize>> for BufferVecDynamic<T> {
585 fn index_mut(&mut self, r: RangeFrom<usize>) -> &mut [T] {
586 self.cache_modified = true;
587 for i in r.start..self.num_items {
588 self.cache_modified_bitmap.set(i, true);
589 }
590 &mut self.cache[r]
591 }
592}
593
594impl<T: BufferVecItem> Index<RangeTo<usize>> for BufferVecDynamic<T> {
595 type Output = [T];
596 fn index(&self, r: RangeTo<usize>) -> &[T] {
597 &self.cache[r]
598 }
599}
600
601impl<T: BufferVecItem> IndexMut<RangeTo<usize>> for BufferVecDynamic<T> {
602 fn index_mut(&mut self, r: RangeTo<usize>) -> &mut [T] {
603 self.cache_modified = true;
604 for i in 0..r.end {
605 self.cache_modified_bitmap.set(i, true);
606 }
607 &mut self.cache[r]
608 }
609}
610
611impl<T: BufferVecItem> Index<RangeFull> for BufferVecDynamic<T> {
612 type Output = [T];
613 fn index(&self, r: RangeFull) -> &[T] {
614 &self.cache[r]
615 }
616}
617
618impl<T: BufferVecItem> IndexMut<RangeFull> for BufferVecDynamic<T> {
619 fn index_mut(&mut self, r: RangeFull) -> &mut [T] {
620 self.cache_modified = true;
621 for i in 0..self.num_items {
622 self.cache_modified_bitmap.set(i, true);
623 }
624 &mut self.cache[r]
625 }
626}
627
628impl<T: BufferVecItem> Index<RangeInclusive<usize>> for BufferVecDynamic<T> {
629 type Output = [T];
630 fn index(&self, r: RangeInclusive<usize>) -> &[T] {
631 &self.cache[r]
632 }
633}
634
635impl<T: BufferVecItem> IndexMut<RangeInclusive<usize>> for BufferVecDynamic<T> {
636 fn index_mut(&mut self, r: RangeInclusive<usize>) -> &mut [T] {
637 self.cache_modified = true;
638 for i in *r.start()..=*r.end() {
639 self.cache_modified_bitmap.set(i, true);
640 }
641 &mut self.cache[r]
642 }
643}
644
645impl<T: BufferVecItem> Index<RangeToInclusive<usize>> for BufferVecDynamic<T> {
646 type Output = [T];
647 fn index(&self, r: RangeToInclusive<usize>) -> &[T] {
648 &self.cache[r]
649 }
650}
651
652impl<T: BufferVecItem> IndexMut<RangeToInclusive<usize>> for BufferVecDynamic<T> {
653 fn index_mut(&mut self, r: RangeToInclusive<usize>) -> &mut [T] {
654 self.cache_modified = true;
655 for i in 0..=r.end {
656 self.cache_modified_bitmap.set(i, true);
657 }
658 &mut self.cache[r]
659 }
660}