1use std::fmt;
2use std::mem;
3use std::borrow::Cow;
4use crate::utils::range::RangeArgument;
5use std::marker::PhantomData;
6
7use crate::texture::{PixelValue, Texture1dDataSink};
8use crate::gl;
9
10use crate::backend::Facade;
11use crate::BufferExt;
12use crate::BufferSliceExt;
13use crate::GlObject;
14
15use crate::context::Context;
16use crate::context::CommandContext;
17use std::rc::Rc;
18use crate::ContextExt;
19
20use crate::buffer::BufferType;
21use crate::buffer::BufferMode;
22use crate::buffer::BufferCreationError;
23use crate::buffer::Content;
24use crate::buffer::fences::Fences;
25use crate::buffer::fences::Inserter;
26use crate::buffer::alloc::Alloc;
27use crate::buffer::alloc::Mapping;
28use crate::buffer::alloc::ReadMapping;
29use crate::buffer::alloc::WriteMapping;
30use crate::buffer::alloc::ReadError;
31use crate::buffer::alloc::CopyError;
32use crate::field::Field;
33
34pub struct Buffer<T: ?Sized> where T: Content {
36 alloc: Option<Alloc>,
38 fence: Option<Fences>,
40 marker: PhantomData<T>,
41}
42
43impl<T: ?Sized> GlObject for Buffer<T> where T: Content {
44 type Id = gl::types::GLuint;
45
46 #[inline]
47 fn get_id(&self) -> gl::types::GLuint {
48 self.alloc.as_ref().unwrap().get_id()
49 }
50}
51
52impl<T: ?Sized> Buffer<T> where T: Content {
53 pub fn new<F: ?Sized>(facade: &F, data: &T, ty: BufferType, mode: BufferMode)
56 -> Result<Buffer<T>, BufferCreationError>
57 where F: Facade
58 {
59 Alloc::new(facade, data, ty, mode)
60 .map(|buffer| {
61 Buffer {
62 alloc: Some(buffer),
63 fence: Some(Fences::new()),
64 marker: PhantomData,
65 }
66 })
67 }
68
69 pub fn empty_unsized<F: ?Sized>(facade: &F, ty: BufferType, size: usize, mode: BufferMode)
71 -> Result<Buffer<T>, BufferCreationError> where F: Facade
72 {
73 assert!(<T as Content>::is_size_suitable(size));
74
75 Alloc::empty(facade, ty, size, mode)
76 .map(|buffer| {
77 Buffer {
78 alloc: Some(buffer),
79 fence: Some(Fences::new()),
80 marker: PhantomData,
81 }
82 })
83 }
84
85 #[inline]
87 pub fn get_context(&self) -> &Rc<Context> {
88 self.alloc.as_ref().unwrap().get_context()
89 }
90
91 #[inline]
93 pub fn get_size(&self) -> usize {
94 self.alloc.as_ref().unwrap().get_size()
95 }
96
97 #[inline]
99 pub fn is_persistent(&self) -> bool {
100 self.alloc.as_ref().unwrap().uses_persistent_mapping()
101 }
102
103 pub fn write(&self, data: &T) {
117 assert!(mem::size_of_val(data) == self.get_size());
118
119 self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
120 0 .. self.get_size());
121 unsafe { self.alloc.as_ref().unwrap().upload(0, data); }
122 }
123
124 #[inline]
139 pub fn invalidate(&self) {
140 self.alloc.as_ref().unwrap().invalidate(0, self.get_size());
141 }
142
143 pub fn read(&self) -> Result<T::Owned, ReadError> {
145 self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
146 0 .. self.get_size());
147
148 unsafe {
149 self.alloc.as_ref().unwrap().read::<T>(0 .. self.get_size())
150 }
151 }
152
153 pub fn map(&mut self) -> Mapping<'_, T> {
165 self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
166 0 .. self.get_size());
167 let size = self.get_size();
168 unsafe { self.alloc.as_mut().unwrap().map(0 .. size) }
169 }
170
171 pub fn map_read(&mut self) -> ReadMapping<'_, T> {
182 self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
183 0 .. self.get_size());
184 let size = self.get_size();
185 unsafe { self.alloc.as_mut().unwrap().map_read(0 .. size) }
186 }
187
188 pub fn map_write(&mut self) -> WriteMapping<'_, T> {
200 self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
201 0 .. self.get_size());
202 let size = self.get_size();
203 unsafe { self.alloc.as_mut().unwrap().map_write(0 .. size) }
204 }
205
206 pub fn copy_to<'a, S>(&self, target: S) -> Result<(), CopyError>
213 where S: Into<BufferSlice<'a, T>>, T: 'a
214 {
215 let target = target.into();
216 let alloc = self.alloc.as_ref().unwrap();
217
218 alloc.copy_to(0 .. self.get_size(), &target.alloc, target.get_offset_bytes())?;
219
220 if let Some(inserter) = self.as_slice().add_fence() {
221 let mut ctxt = alloc.get_context().make_current();
222 inserter.insert(&mut ctxt);
223 }
224
225 if let Some(inserter) = target.add_fence() {
226 let mut ctxt = alloc.get_context().make_current();
227 inserter.insert(&mut ctxt);
228 }
229
230 Ok(())
231 }
232
233 #[inline]
252 pub unsafe fn slice_custom<R>(&self, f: Field<R>) -> BufferSlice<'_, R>
253 where
254 R: Content,
255 {
256 self.as_slice().slice_custom(f)
257 }
258
259 #[inline]
264 pub unsafe fn slice_custom_mut<R>(&mut self, f: Field<R>) -> BufferMutSlice<'_, R>
265 where
266 R: Content,
267 {
268 self.as_mut_slice().slice_custom(f)
269 }
270
271 #[inline]
276 pub fn as_slice(&self) -> BufferSlice<'_, T> {
277 BufferSlice {
278 alloc: self.alloc.as_ref().unwrap(),
279 bytes_start: 0,
280 bytes_end: self.get_size(),
281 fence: self.fence.as_ref().unwrap(),
282 marker: PhantomData,
283 }
284 }
285
286 #[inline]
291 pub fn as_mut_slice(&mut self) -> BufferMutSlice<'_, T> {
292 let size = self.get_size();
293
294 BufferMutSlice {
295 alloc: self.alloc.as_mut().unwrap(),
296 bytes_start: 0,
297 bytes_end: size,
298 fence: self.fence.as_ref().unwrap(),
299 marker: PhantomData,
300 }
301 }
302
303 pub fn as_slice_any(&self) -> BufferAnySlice<'_> {
308 let size = self.get_size();
309
310 BufferAnySlice {
311 alloc: self.alloc.as_ref().unwrap(),
312 bytes_start: 0,
313 bytes_end: self.get_size(),
314 elements_size: <T as Content>::get_elements_size(),
315 fence: self.fence.as_ref().unwrap(),
316 }
317 }
318}
319
320impl<T> Buffer<T> where T: Content + Copy {
321 pub fn empty<F: ?Sized>(facade: &F, ty: BufferType, mode: BufferMode)
323 -> Result<Buffer<T>, BufferCreationError> where F: Facade
324 {
325 Alloc::empty(facade, ty, mem::size_of::<T>(), mode)
326 .map(|buffer| {
327 Buffer {
328 alloc: Some(buffer),
329 fence: Some(Fences::new()),
330 marker: PhantomData,
331 }
332 })
333 }
334}
335
336impl<T> Buffer<[T]> where [T]: Content, T: Copy {
337 pub fn empty_array<F: ?Sized>(facade: &F, ty: BufferType, len: usize, mode: BufferMode)
339 -> Result<Buffer<[T]>, BufferCreationError> where F: Facade
340 {
341 Alloc::empty(facade, ty, len * mem::size_of::<T>(), mode)
342 .map(|buffer| {
343 Buffer {
344 alloc: Some(buffer),
345 fence: Some(Fences::new()),
346 marker: PhantomData,
347 }
348 })
349 }
350
351 #[inline]
353 pub fn len(&self) -> usize {
354 self.alloc.as_ref().unwrap().get_size() / mem::size_of::<T>()
355 }
356
357 #[inline]
362 pub fn slice<R: RangeArgument<usize>>(&self, range: R) -> Option<BufferSlice<'_, [T]>> {
363 self.as_slice().slice(range)
364 }
365
366 #[inline]
371 pub fn slice_mut<R: RangeArgument<usize>>(&mut self, range: R) -> Option<BufferMutSlice<'_, [T]>> {
372 self.as_mut_slice().slice(range)
373 }
374}
375
376impl<T> Buffer<[T]> where T: PixelValue {
377 #[inline]
379 pub fn read_as_texture_1d<S>(&self) -> Result<S, ReadError> where S: Texture1dDataSink<T> {
380 let data = self.read()?;
381 Ok(S::from_raw(Cow::Owned(data), self.len() as u32))
382 }
383}
384
385impl<T: ?Sized> fmt::Debug for Buffer<T> where T: Content {
386 #[inline]
387 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
388 write!(fmt, "{:?}", self.alloc.as_ref().unwrap())
389 }
390}
391
392impl<T: ?Sized> Drop for Buffer<T> where T: Content {
393 #[inline]
394 fn drop(&mut self) {
395 if let (Some(alloc), Some(mut fence)) = (self.alloc.take(), self.fence.take()) {
396 fence.clean(&mut alloc.get_context().make_current());
397 }
398 }
399}
400
401impl<T: ?Sized> BufferExt for Buffer<T> where T: Content {
402 #[inline]
403 fn get_offset_bytes(&self) -> usize {
404 0
405 }
406
407 #[inline]
408 fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
409 let alloc = self.alloc.as_ref().unwrap();
410 alloc.prepare_for_vertex_attrib_array(ctxt);
411 }
412
413 #[inline]
414 fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
415 let alloc = self.alloc.as_ref().unwrap();
416 alloc.prepare_for_element_array(ctxt);
417 }
418
419 #[inline]
420 fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
421 let alloc = self.alloc.as_ref().unwrap();
422 alloc.bind_to_element_array(ctxt);
423 }
424
425 #[inline]
426 fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
427 let alloc = self.alloc.as_ref().unwrap();
428 alloc.prepare_and_bind_for_pixel_pack(ctxt);
429 }
430
431 #[inline]
432 fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
433 Alloc::unbind_pixel_pack(ctxt)
434 }
435
436 #[inline]
437 fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
438 let alloc = self.alloc.as_ref().unwrap();
439 alloc.prepare_and_bind_for_pixel_unpack(ctxt);
440 }
441
442 #[inline]
443 fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
444 Alloc::unbind_pixel_unpack(ctxt)
445 }
446
447 #[inline]
448 fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
449 let alloc = self.alloc.as_ref().unwrap();
450 alloc.prepare_and_bind_for_query(ctxt);
451 }
452
453 #[inline]
454 fn unbind_query(ctxt: &mut CommandContext<'_>) {
455 Alloc::unbind_query(ctxt)
456 }
457
458 #[inline]
459 fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
460 let alloc = self.alloc.as_ref().unwrap();
461 alloc.prepare_and_bind_for_draw_indirect(ctxt);
462 }
463
464 #[inline]
465 fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
466 let alloc = self.alloc.as_ref().unwrap();
467 alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
468 }
469
470 #[inline]
471 fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
472 let alloc = self.alloc.as_ref().unwrap();
473 alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. alloc.get_size());
474 }
475
476 #[inline]
477 fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
478 let alloc = self.alloc.as_ref().unwrap();
479 alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. alloc.get_size());
480 }
481
482 #[inline]
483 fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
484 let alloc = self.alloc.as_ref().unwrap();
485 alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. alloc.get_size());
486 }
487
488 #[inline]
489 fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
490 let alloc = self.alloc.as_ref().unwrap();
491 alloc.bind_to_transform_feedback(ctxt, index, 0 .. alloc.get_size());
492 }
493}
494
495#[derive(Copy, Clone)]
497pub struct BufferSlice<'a, T: ?Sized> where T: Content {
498 alloc: &'a Alloc,
499 bytes_start: usize,
500 bytes_end: usize,
501 fence: &'a Fences,
502 marker: PhantomData<&'a T>,
503}
504
505impl<'a, T: ?Sized> BufferSlice<'a, T> where T: Content + 'a {
506 #[inline]
508 pub fn get_size(&self) -> usize {
509 self.bytes_end - self.bytes_start
510 }
511
512 #[inline]
514 pub fn get_context(&self) -> &Rc<Context> {
515 self.alloc.get_context()
516 }
517
518 pub fn write(&self, data: &T) {
532 assert_eq!(mem::size_of_val(data), self.get_size());
533
534 self.fence.wait(&mut self.alloc.get_context().make_current(),
535 self.bytes_start .. self.bytes_end);
536 unsafe { self.alloc.upload(self.bytes_start, data); }
537 }
538
539 #[inline]
549 pub fn invalidate(&self) {
550 self.alloc.invalidate(self.bytes_start, self.get_size());
551 }
552
553 pub fn read(&self) -> Result<T::Owned, ReadError> {
555 self.fence.wait(&mut self.alloc.get_context().make_current(),
556 self.bytes_start .. self.bytes_end);
557
558 unsafe {
559 self.alloc.read::<T>(self.bytes_start .. self.bytes_end)
560 }
561 }
562
563 pub fn copy_to<S>(&self, target: S) -> Result<(), CopyError>
569 where S: Into<BufferSlice<'a, T>>
570 {
571 let target = target.into();
572
573 self.alloc.copy_to(self.bytes_start .. self.bytes_end, &target.alloc,
574 target.get_offset_bytes())?;
575
576 if let Some(inserter) = self.add_fence() {
577 let mut ctxt = self.alloc.get_context().make_current();
578 inserter.insert(&mut ctxt);
579 }
580
581 if let Some(inserter) = target.add_fence() {
582 let mut ctxt = self.alloc.get_context().make_current();
583 inserter.insert(&mut ctxt);
584 }
585
586 Ok(())
587 }
588
589 #[inline]
608 pub unsafe fn slice_custom<R>(&self, f: Field<R>) -> BufferSlice<'a, R>
609 where
610 R: Content,
611 {
612 let size = f.size();
613 let result = f.offs();
614
615 assert!(result <= self.get_size());
616 assert!(result + size <= self.get_size());
617
618 BufferSlice {
619 alloc: self.alloc,
620 bytes_start: self.bytes_start + result,
621 bytes_end: self.bytes_start + result + size,
622 fence: self.fence,
623 marker: PhantomData,
624 }
625 }
626
627 #[inline]
632 pub fn as_slice_any(&self) -> BufferAnySlice<'a> {
633 BufferAnySlice {
634 alloc: self.alloc,
635 bytes_start: self.bytes_start,
636 bytes_end: self.bytes_end,
637 elements_size: <T as Content>::get_elements_size(),
638 fence: self.fence,
639 }
640 }
641}
642
643impl<'a, T> BufferSlice<'a, [T]> where [T]: Content + 'a {
644 #[inline]
646 pub fn len(&self) -> usize {
647 (self.bytes_end - self.bytes_start) / mem::size_of::<T>()
648 }
649
650 #[inline]
655 pub fn slice<R: RangeArgument<usize>>(&self, range: R) -> Option<BufferSlice<'a, [T]>> {
656 if range.start().map_or(0, |e| *e) > self.len() || range.end().map_or(0, |e| *e) > self.len() {
657 return None;
658 }
659
660 Some(BufferSlice {
661 alloc: self.alloc,
662 bytes_start: self.bytes_start + range.start().map_or(0, |e| *e) * mem::size_of::<T>(),
663 bytes_end: self.bytes_start + range.end().map_or(self.len(), |e| *e) * mem::size_of::<T>(),
664 fence: self.fence,
665 marker: PhantomData,
666 })
667 }
668}
669
670impl<'a, T> BufferSlice<'a, [T]> where T: PixelValue + 'a {
671 #[inline]
673 pub fn read_as_texture_1d<S>(&self) -> Result<S, ReadError> where S: Texture1dDataSink<T> {
674 let data = self.read()?;
675 Ok(S::from_raw(Cow::Owned(data), self.len() as u32))
676 }
677}
678
679impl<'a, T: ?Sized> fmt::Debug for BufferSlice<'a, T> where T: Content {
680 #[inline]
681 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
682 write!(fmt, "{:?}", self.alloc)
683 }
684}
685
686impl<'a, T: ?Sized> From<BufferMutSlice<'a, T>> for BufferSlice<'a, T> where T: Content + 'a {
687 #[inline]
688 fn from(s: BufferMutSlice<'a, T>) -> BufferSlice<'a, T> {
689 BufferSlice {
690 alloc: s.alloc,
691 bytes_start: s.bytes_start,
692 bytes_end: s.bytes_end,
693 fence: s.fence,
694 marker: PhantomData,
695 }
696 }
697}
698
699impl<'a, T: ?Sized> From<&'a Buffer<T>> for BufferSlice<'a, T> where T: Content + 'a {
700 #[inline]
701 fn from(b: &'a Buffer<T>) -> BufferSlice<'a, T> {
702 b.as_slice()
703 }
704}
705
706impl<'a, T: ?Sized> From<&'a mut Buffer<T>> for BufferSlice<'a, T> where T: Content + 'a {
707 #[inline]
708 fn from(b: &'a mut Buffer<T>) -> BufferSlice<'a, T> {
709 b.as_slice()
710 }
711}
712
713impl<'a, T: ?Sized> BufferSliceExt<'a> for BufferSlice<'a, T> where T: Content {
714 #[inline]
715 fn add_fence(&self) -> Option<Inserter<'a>> {
716 if !self.alloc.uses_persistent_mapping() {
717 return None;
718 }
719
720 Some(self.fence.inserter(self.bytes_start .. self.bytes_end))
721 }
722}
723
724impl<'a, T: ?Sized> BufferExt for BufferSlice<'a, T> where T: Content {
725 #[inline]
726 fn get_offset_bytes(&self) -> usize {
727 self.bytes_start
728 }
729
730 #[inline]
731 fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
732 self.alloc.prepare_for_vertex_attrib_array(ctxt);
733 }
734
735 #[inline]
736 fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
737 self.alloc.prepare_for_element_array(ctxt);
738 }
739
740 #[inline]
741 fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
742 self.alloc.bind_to_element_array(ctxt);
743 }
744
745 #[inline]
746 fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
747 self.alloc.prepare_and_bind_for_pixel_pack(ctxt);
748 }
749
750 #[inline]
751 fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
752 Alloc::unbind_pixel_pack(ctxt)
753 }
754
755 #[inline]
756 fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
757 self.alloc.prepare_and_bind_for_pixel_unpack(ctxt);
758 }
759
760 #[inline]
761 fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
762 Alloc::unbind_pixel_unpack(ctxt)
763 }
764
765 #[inline]
766 fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
767 self.alloc.prepare_and_bind_for_query(ctxt);
768 }
769
770 #[inline]
771 fn unbind_query(ctxt: &mut CommandContext<'_>) {
772 Alloc::unbind_query(ctxt)
773 }
774
775 #[inline]
776 fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
777 self.alloc.prepare_and_bind_for_draw_indirect(ctxt);
778 }
779
780 #[inline]
781 fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
782 self.alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
783 }
784
785 #[inline]
786 fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
787 self.alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. self.alloc.get_size());
788 }
789
790 #[inline]
791 fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
792 self.alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. self.alloc.get_size());
793 }
794
795 #[inline]
796 fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
797 self.alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. self.alloc.get_size());
798 }
799
800 #[inline]
801 fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
802 self.alloc.bind_to_transform_feedback(ctxt, index, 0 .. self.alloc.get_size());
803 }
804}
805
806pub struct BufferMutSlice<'a, T: ?Sized> where T: Content {
808 alloc: &'a mut Alloc,
809 bytes_start: usize,
810 bytes_end: usize,
811 fence: &'a Fences,
812 marker: PhantomData<T>,
813}
814
815impl<'a, T: ?Sized> BufferMutSlice<'a, T> where T: Content + 'a {
816 #[inline]
818 pub fn get_size(&self) -> usize {
819 self.bytes_end - self.bytes_start
820 }
821
822 #[inline]
834 pub fn map(self) -> Mapping<'a, T> {
835 self.fence.wait(&mut self.alloc.get_context().make_current(),
836 self.bytes_start .. self.bytes_end);
837 unsafe { self.alloc.map(self.bytes_start .. self.bytes_end) }
838 }
839
840 #[inline]
851 pub fn map_read(self) -> ReadMapping<'a, T> {
852 self.fence.wait(&mut self.alloc.get_context().make_current(),
853 self.bytes_start .. self.bytes_end);
854 unsafe { self.alloc.map_read(self.bytes_start .. self.bytes_end) }
855 }
856
857 #[inline]
868 pub fn map_write(self) -> WriteMapping<'a, T> {
869 self.fence.wait(&mut self.alloc.get_context().make_current(),
870 self.bytes_start .. self.bytes_end);
871 unsafe { self.alloc.map_write(self.bytes_start .. self.bytes_end) }
872 }
873
874 #[inline]
888 pub fn write(&self, data: &T) {
889 self.fence.wait(&mut self.alloc.get_context().make_current(),
890 self.bytes_start .. self.bytes_end);
891 unsafe { self.alloc.upload(self.bytes_start, data); }
892 }
893
894 #[inline]
904 pub fn invalidate(&self) {
905 self.alloc.invalidate(self.bytes_start, self.get_size());
906 }
907
908 #[inline]
910 pub fn read(&self) -> Result<T::Owned, ReadError> {
911 unsafe {
912 self.alloc.read::<T>(self.bytes_start .. self.bytes_end)
913 }
914 }
915
916 pub fn copy_to<S>(&self, target: S) -> Result<(), CopyError>
922 where S: Into<BufferSlice<'a, T>>
923 {
924 let target = target.into();
925
926 self.alloc.copy_to(self.bytes_start .. self.bytes_end, &target.alloc,
927 target.get_offset_bytes())?;
928
929 if let Some(inserter) = self.add_fence() {
930 let mut ctxt = self.alloc.get_context().make_current();
931 inserter.insert(&mut ctxt);
932 }
933
934 if let Some(inserter) = self.add_fence() {
935 let mut ctxt = self.alloc.get_context().make_current();
936 inserter.insert(&mut ctxt);
937 }
938
939 Ok(())
940 }
941
942 #[inline]
960 pub unsafe fn slice_custom<R>(self, f: Field<R>) -> BufferMutSlice<'a, R>
961 where
962 R: Content,
963 {
964 let size = f.size();
965 let result = f.offs();
966
967 assert!(result <= self.get_size());
968 assert!(result + size <= self.get_size());
969
970 BufferMutSlice {
971 alloc: self.alloc,
972 bytes_start: self.bytes_start + result,
973 bytes_end: self.bytes_start + result + size,
974 fence: self.fence,
975 marker: PhantomData,
976 }
977 }
978
979 #[inline]
984 pub fn as_slice_any(self) -> BufferAnySlice<'a> {
985 BufferAnySlice {
986 alloc: self.alloc,
987 bytes_start: self.bytes_start,
988 bytes_end: self.bytes_end,
989 elements_size: <T as Content>::get_elements_size(),
990 fence: self.fence,
991 }
992 }
993}
994
995impl<'a, T> BufferMutSlice<'a, [T]> where [T]: Content, T: Copy + 'a {
996 #[inline]
998 pub fn len(&self) -> usize {
999 (self.bytes_end - self.bytes_start) / mem::size_of::<T>()
1000 }
1001
1002 #[inline]
1007 pub fn slice<R: RangeArgument<usize>>(self, range: R) -> Option<BufferMutSlice<'a, [T]>> {
1008 if range.start().map_or(0, |e| *e) > self.len() || range.end().map_or(0, |e| *e) > self.len() {
1009 return None;
1010 }
1011
1012 let len = self.len();
1013 Some(BufferMutSlice {
1014 alloc: self.alloc,
1015 bytes_start: self.bytes_start + range.start().map_or(0, |e| *e) * mem::size_of::<T>(),
1016 bytes_end: self.bytes_start + range.end().map_or(len, |e| *e) * mem::size_of::<T>(),
1017 fence: self.fence,
1018 marker: PhantomData,
1019 })
1020 }
1021}
1022
1023impl<'a, T> BufferMutSlice<'a, [T]> where T: PixelValue + 'a {
1024 #[inline]
1026 pub fn read_as_texture_1d<S>(&self) -> Result<S, ReadError> where S: Texture1dDataSink<T> {
1027 let data = self.read()?;
1028 Ok(S::from_raw(Cow::Owned(data), self.len() as u32))
1029 }
1030}
1031
1032impl<'a, T: ?Sized> BufferSliceExt<'a> for BufferMutSlice<'a, T> where T: Content {
1033 #[inline]
1034 fn add_fence(&self) -> Option<Inserter<'a>> {
1035 if !self.alloc.uses_persistent_mapping() {
1036 return None;
1037 }
1038
1039 Some(self.fence.inserter(self.bytes_start .. self.bytes_end))
1040 }
1041}
1042
1043impl<'a, T: ?Sized> fmt::Debug for BufferMutSlice<'a, T> where T: Content {
1044 #[inline]
1045 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
1046 write!(fmt, "{:?}", self.alloc)
1047 }
1048}
1049
1050impl<'a, T: ?Sized> From<&'a mut Buffer<T>> for BufferMutSlice<'a, T> where T: Content + 'a {
1051 #[inline]
1052 fn from(b: &'a mut Buffer<T>) -> BufferMutSlice<'a, T> {
1053 b.as_mut_slice()
1054 }
1055}
1056
1057pub struct BufferAny {
1061 alloc: Alloc,
1062 size: usize,
1063 elements_size: usize,
1064 fence: Fences,
1065}
1066
1067impl BufferAny {
1068 #[inline]
1070 pub fn as_slice_any(&self) -> BufferAnySlice<'_> {
1071 BufferAnySlice {
1072 alloc: &self.alloc,
1073 bytes_start: 0,
1074 bytes_end: self.size,
1075 elements_size: self.elements_size,
1076 fence: &self.fence,
1077 }
1078 }
1079
1080 #[inline]
1082 pub unsafe fn as_typed_slice_mut<T: ?Sized + Content>(&mut self) -> BufferMutSlice<'_, T> {
1083 assert_eq!(<T as Content>::get_elements_size(), self.elements_size);
1084 BufferMutSlice {
1085 alloc: &mut self.alloc,
1086 bytes_start: 0,
1087 bytes_end: self.size,
1088 fence: &self.fence,
1089 marker: PhantomData,
1090 }
1091 }
1092
1093 #[inline]
1095 pub unsafe fn as_typed_slice<T: ?Sized + Content>(&self) -> BufferSlice<'_, T> {
1096 assert_eq!(<T as Content>::get_elements_size(), self.elements_size);
1097 BufferSlice {
1098 alloc: &self.alloc,
1099 bytes_start: 0,
1100 bytes_end: self.size,
1101 fence: &self.fence,
1102 marker: PhantomData,
1103 }
1104 }
1105
1106 #[inline]
1109 pub fn get_elements_size(&self) -> usize {
1110 self.elements_size
1111 }
1112
1113 #[inline]
1116 pub fn get_elements_count(&self) -> usize {
1117 self.size / self.elements_size
1118 }
1119
1120 #[inline]
1122 pub fn get_context(&self) -> &Rc<Context> {
1123 self.alloc.get_context()
1124 }
1125
1126 #[inline]
1128 pub fn get_size(&self) -> usize {
1129 self.size
1130 }
1131
1132 #[inline]
1137 pub fn invalidate(&self) {
1138 self.alloc.invalidate(0, self.size);
1139 }
1140
1141 #[inline]
1151 pub unsafe fn read<T>(&self) -> Result<T::Owned, ReadError> where T: Content {
1152 self.fence.wait(&mut self.alloc.get_context().make_current(), 0 .. self.get_size());
1154 self.alloc.read::<T>(0 .. self.get_size())
1155 }
1156}
1157
1158impl<T: ?Sized> From<Buffer<T>> for BufferAny where T: Content + Send + 'static {
1159 #[inline]
1160 fn from(mut buffer: Buffer<T>) -> BufferAny {
1161 let size = buffer.get_size();
1162
1163 BufferAny {
1164 alloc: buffer.alloc.take().unwrap(),
1165 size,
1166 elements_size: <T as Content>::get_elements_size(),
1167 fence: buffer.fence.take().unwrap(),
1168 }
1169 }
1170}
1171
1172impl Drop for BufferAny {
1173 #[inline]
1174 fn drop(&mut self) {
1175 self.fence.clean(&mut self.alloc.get_context().make_current());
1176 }
1177}
1178
1179impl fmt::Debug for BufferAny {
1180 #[inline]
1181 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
1182 write!(fmt, "{:?}", self.alloc)
1183 }
1184}
1185
1186impl BufferExt for BufferAny {
1187 #[inline]
1188 fn get_offset_bytes(&self) -> usize {
1189 0
1190 }
1191
1192 #[inline]
1193 fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
1194 self.alloc.prepare_for_vertex_attrib_array(ctxt);
1195 }
1196
1197 #[inline]
1198 fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
1199 self.alloc.prepare_for_element_array(ctxt);
1200 }
1201
1202 #[inline]
1203 fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
1204 self.alloc.bind_to_element_array(ctxt);
1205 }
1206
1207 #[inline]
1208 fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
1209 self.alloc.prepare_and_bind_for_pixel_pack(ctxt);
1210 }
1211
1212 #[inline]
1213 fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
1214 Alloc::unbind_pixel_pack(ctxt)
1215 }
1216
1217 #[inline]
1218 fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
1219 self.alloc.prepare_and_bind_for_pixel_unpack(ctxt);
1220 }
1221
1222 #[inline]
1223 fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
1224 Alloc::unbind_pixel_unpack(ctxt)
1225 }
1226
1227 #[inline]
1228 fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
1229 self.alloc.prepare_and_bind_for_query(ctxt);
1230 }
1231
1232 #[inline]
1233 fn unbind_query(ctxt: &mut CommandContext<'_>) {
1234 Alloc::unbind_query(ctxt)
1235 }
1236
1237 #[inline]
1238 fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
1239 self.alloc.prepare_and_bind_for_draw_indirect(ctxt);
1240 }
1241
1242 #[inline]
1243 fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
1244 self.alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
1245 }
1246
1247 #[inline]
1248 fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1249 self.alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. self.alloc.get_size());
1250 }
1251
1252 #[inline]
1253 fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1254 self.alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. self.alloc.get_size());
1255 }
1256
1257 #[inline]
1258 fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1259 self.alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. self.alloc.get_size());
1260 }
1261
1262 #[inline]
1263 fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1264 self.alloc.bind_to_transform_feedback(ctxt, index, 0 .. self.alloc.get_size());
1265 }
1266}
1267
1268#[derive(Copy, Clone)]
1270pub struct BufferAnySlice<'a> {
1271 alloc: &'a Alloc,
1272 bytes_start: usize,
1273 bytes_end: usize,
1274 elements_size: usize,
1275 fence: &'a Fences,
1276}
1277
1278impl<'a> GlObject for BufferAnySlice<'a> {
1279 type Id = gl::types::GLuint;
1280
1281 #[inline]
1282 fn get_id(&self) -> gl::types::GLuint {
1283 self.alloc.get_id()
1284 }
1285}
1286
1287impl<'a> BufferAnySlice<'a> {
1288 #[inline]
1290 pub fn get_size(&self) -> usize {
1291 self.bytes_end - self.bytes_start
1292 }
1293
1294 #[inline]
1297 pub fn get_elements_size(&self) -> usize {
1298 self.elements_size
1299 }
1300
1301 #[inline]
1304 pub fn get_elements_count(&self) -> usize {
1305 self.get_size() / self.elements_size
1306 }
1307
1308 #[inline]
1313 pub fn invalidate(&self) {
1314 self.alloc.invalidate(self.bytes_start, self.get_size());
1315 }
1316
1317 #[inline]
1319 pub fn get_context(&self) -> &Rc<Context> {
1320 self.alloc.get_context()
1321 }
1322}
1323
1324impl<'a> fmt::Debug for BufferAnySlice<'a> {
1325 #[inline]
1326 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
1327 write!(fmt, "{:?}", self.alloc)
1328 }
1329}
1330
1331impl<'a> BufferSliceExt<'a> for BufferAnySlice<'a> {
1332 #[inline]
1333 fn add_fence(&self) -> Option<Inserter<'a>> {
1334 if !self.alloc.uses_persistent_mapping() {
1335 return None;
1336 }
1337
1338 Some(self.fence.inserter(self.bytes_start .. self.bytes_end))
1339 }
1340}
1341
1342impl<'a> BufferExt for BufferAnySlice<'a> {
1343 #[inline]
1344 fn get_offset_bytes(&self) -> usize {
1345 self.bytes_start
1346 }
1347
1348 #[inline]
1349 fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
1350 self.alloc.prepare_for_vertex_attrib_array(ctxt);
1351 }
1352
1353 #[inline]
1354 fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
1355 self.alloc.prepare_for_element_array(ctxt);
1356 }
1357
1358 #[inline]
1359 fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
1360 self.alloc.bind_to_element_array(ctxt);
1361 }
1362
1363 #[inline]
1364 fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
1365 self.alloc.prepare_and_bind_for_pixel_pack(ctxt);
1366 }
1367
1368 #[inline]
1369 fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
1370 Alloc::unbind_pixel_pack(ctxt)
1371 }
1372
1373 #[inline]
1374 fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
1375 self.alloc.prepare_and_bind_for_pixel_unpack(ctxt);
1376 }
1377
1378 #[inline]
1379 fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
1380 Alloc::unbind_pixel_unpack(ctxt)
1381 }
1382
1383 #[inline]
1384 fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
1385 self.alloc.prepare_and_bind_for_query(ctxt);
1386 }
1387
1388 #[inline]
1389 fn unbind_query(ctxt: &mut CommandContext<'_>) {
1390 Alloc::unbind_query(ctxt)
1391 }
1392
1393 #[inline]
1394 fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
1395 self.alloc.prepare_and_bind_for_draw_indirect(ctxt);
1396 }
1397
1398 #[inline]
1399 fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
1400 self.alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
1401 }
1402
1403 #[inline]
1404 fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1405 self.alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. self.alloc.get_size());
1406 }
1407
1408 #[inline]
1409 fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1410 self.alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. self.alloc.get_size());
1411 }
1412
1413 #[inline]
1414 fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1415 self.alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. self.alloc.get_size());
1416 }
1417
1418 #[inline]
1419 fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1420 self.alloc.bind_to_transform_feedback(ctxt, index, 0 .. self.alloc.get_size());
1421 }
1422}