glium/buffer/
view.rs

1use std::fmt;
2use std::mem;
3use std::borrow::Cow;
4use crate::utils::range::RangeArgument;
5use std::marker::PhantomData;
6
7use crate::texture::{PixelValue, Texture1dDataSink};
8use crate::gl;
9
10use crate::backend::Facade;
11use crate::BufferExt;
12use crate::BufferSliceExt;
13use crate::GlObject;
14
15use crate::context::Context;
16use crate::context::CommandContext;
17use std::rc::Rc;
18use crate::ContextExt;
19
20use crate::buffer::BufferType;
21use crate::buffer::BufferMode;
22use crate::buffer::BufferCreationError;
23use crate::buffer::Content;
24use crate::buffer::fences::Fences;
25use crate::buffer::fences::Inserter;
26use crate::buffer::alloc::Alloc;
27use crate::buffer::alloc::Mapping;
28use crate::buffer::alloc::ReadMapping;
29use crate::buffer::alloc::WriteMapping;
30use crate::buffer::alloc::ReadError;
31use crate::buffer::alloc::CopyError;
32use crate::field::Field;
33
34/// Represents a view of a buffer.
35pub struct Buffer<T: ?Sized> where T: Content {
36    // TODO: this `Option` is here because we have a destructor and need to be able to move out
37    alloc: Option<Alloc>,
38    // TODO: this `Option` is here because we have a destructor and need to be able to move out
39    fence: Option<Fences>,
40    marker: PhantomData<T>,
41}
42
43impl<T: ?Sized> GlObject for Buffer<T> where T: Content {
44    type Id = gl::types::GLuint;
45
46    #[inline]
47    fn get_id(&self) -> gl::types::GLuint {
48        self.alloc.as_ref().unwrap().get_id()
49    }
50}
51
52impl<T: ?Sized> Buffer<T> where T: Content {
53    /// Builds a new buffer containing the given data. The size of the buffer is equal to the size
54    /// of the data.
55    pub fn new<F: ?Sized>(facade: &F, data: &T, ty: BufferType, mode: BufferMode)
56                  -> Result<Buffer<T>, BufferCreationError>
57                  where F: Facade
58    {
59        Alloc::new(facade, data, ty, mode)
60            .map(|buffer| {
61                Buffer {
62                    alloc: Some(buffer),
63                    fence: Some(Fences::new()),
64                    marker: PhantomData,
65                }
66            })
67    }
68
69    /// Builds a new buffer of the given size.
70    pub fn empty_unsized<F: ?Sized>(facade: &F, ty: BufferType, size: usize, mode: BufferMode)
71                            -> Result<Buffer<T>, BufferCreationError> where F: Facade
72    {
73        assert!(<T as Content>::is_size_suitable(size));
74
75        Alloc::empty(facade, ty, size, mode)
76            .map(|buffer| {
77                Buffer {
78                    alloc: Some(buffer),
79                    fence: Some(Fences::new()),
80                    marker: PhantomData,
81                }
82            })
83    }
84
85    /// Returns the context corresponding to this buffer.
86    #[inline]
87    pub fn get_context(&self) -> &Rc<Context> {
88        self.alloc.as_ref().unwrap().get_context()
89    }
90
91    /// Returns the size in bytes of this buffer.
92    #[inline]
93    pub fn get_size(&self) -> usize {
94        self.alloc.as_ref().unwrap().get_size()
95    }
96
97    /// Returns true if this buffer uses persistent mapping.
98    #[inline]
99    pub fn is_persistent(&self) -> bool {
100        self.alloc.as_ref().unwrap().uses_persistent_mapping()
101    }
102
103    /// Uploads some data in this buffer.
104    ///
105    /// # Implementation
106    ///
107    /// - For persistent-mapped buffers, waits untils the data is no longer used by the GPU then
108    ///   memcpies the data to the mapping.
109    /// - For immutable buffers, creates a temporary buffer that contains the data then calls
110    ///   `glCopyBufferSubData` to copy from the temporary buffer to the real one.
111    /// - For other types, calls `glBufferSubData`.
112    ///
113    /// # Panic
114    ///
115    /// Panics if the length of `data` is different from the length of this buffer.
116    pub fn write(&self, data: &T) {
117        assert!(mem::size_of_val(data) == self.get_size());
118
119        self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
120                                          0 .. self.get_size());
121        unsafe { self.alloc.as_ref().unwrap().upload(0, data); }
122    }
123
124    /// Invalidates the content of the buffer. The data becomes undefined.
125    ///
126    /// You should call this if you only use parts of a buffer. For example if you want to use
127    /// the first half of the buffer, you invalidate the whole buffer then write the first half.
128    ///
129    /// This operation is a no-op if the backend doesn't support it and for persistent-mapped
130    /// buffers.
131    ///
132    /// # Implementation
133    ///
134    /// Calls `glInvalidateBufferData` if supported. Otherwise, calls `glBufferData` with a null
135    /// pointer for data. If `glBufferStorage` has been used to create the buffer and
136    /// `glInvalidateBufferData` is not supported, does nothing.
137    ///
138    #[inline]
139    pub fn invalidate(&self) {
140        self.alloc.as_ref().unwrap().invalidate(0, self.get_size());
141    }
142
143    /// Reads the content of the buffer.
144    pub fn read(&self) -> Result<T::Owned, ReadError> {
145        self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
146                                          0 .. self.get_size());
147
148        unsafe {
149            self.alloc.as_ref().unwrap().read::<T>(0 .. self.get_size())
150        }
151    }
152
153    /// Maps the buffer in memory for both reading and writing.
154    ///
155    /// # Implementation
156    ///
157    /// - For persistent-mapped buffers, waits until the data is no longer accessed by the GPU then
158    ///   returns a pointer to the existing mapping.
159    /// - For immutable buffers, creates a temporary buffer containing the data of the buffer and
160    ///   maps it. When the mapping object is destroyed, copies the content of the temporary buffer
161    ///   to the real buffer.
162    /// - For other types, calls `glMapBuffer` or `glMapSubBuffer`.
163    ///
164    pub fn map(&mut self) -> Mapping<'_, T> {
165        self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
166                                          0 .. self.get_size());
167        let size = self.get_size();
168        unsafe { self.alloc.as_mut().unwrap().map(0 .. size) }
169    }
170
171    /// Maps the buffer in memory for reading.
172    ///
173    /// # Implementation
174    ///
175    /// - For persistent-mapped buffers, waits until the data is no longer accessed by the GPU then
176    ///   returns a pointer to the existing mapping.
177    /// - For immutable buffers, creates a temporary buffer containing the data of the buffer and
178    ///   maps it.
179    /// - For other types, calls `glMapBuffer` or `glMapSubBuffer`.
180    ///
181    pub fn map_read(&mut self) -> ReadMapping<'_, T> {
182        self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
183                                          0 .. self.get_size());
184        let size = self.get_size();
185        unsafe { self.alloc.as_mut().unwrap().map_read(0 .. size) }
186    }
187
188    /// Maps the buffer in memory for writing only.
189    ///
190    /// # Implementation
191    ///
192    /// - For persistent-mapped buffers, waits until the data is no longer accessed by the GPU then
193    ///   returns a pointer to the existing mapping.
194    /// - For immutable buffers, creates a temporary buffer and
195    ///   maps it. When the mapping object is destroyed, copies the content of the temporary buffer
196    ///   to the real buffer.
197    /// - For other types, calls `glMapBuffer` or `glMapSubBuffer`.
198    ///
199    pub fn map_write(&mut self) -> WriteMapping<'_, T> {
200        self.fence.as_ref().unwrap().wait(&mut self.alloc.as_ref().unwrap().get_context().make_current(),
201                                          0 .. self.get_size());
202        let size = self.get_size();
203        unsafe { self.alloc.as_mut().unwrap().map_write(0 .. size) }
204    }
205
206    /// Copies the content of the buffer to another buffer.
207    ///
208    /// # Panic
209    ///
210    /// Panics if `T` is unsized and the other buffer is too small.
211    ///
212    pub fn copy_to<'a, S>(&self, target: S) -> Result<(), CopyError>
213                          where S: Into<BufferSlice<'a, T>>, T: 'a
214    {
215        let target = target.into();
216        let alloc = self.alloc.as_ref().unwrap();
217
218        alloc.copy_to(0 .. self.get_size(), &target.alloc, target.get_offset_bytes())?;
219
220        if let Some(inserter) = self.as_slice().add_fence() {
221            let mut ctxt = alloc.get_context().make_current();
222            inserter.insert(&mut ctxt);
223        }
224
225        if let Some(inserter) = target.add_fence() {
226            let mut ctxt = alloc.get_context().make_current();
227            inserter.insert(&mut ctxt);
228        }
229
230        Ok(())
231    }
232
233    /// Builds a slice that contains an element from inside the buffer.
234    ///
235    /// This method builds an object that represents a slice of the buffer. No actual operation
236    /// OpenGL is performed.
237    ///
238    /// # Example
239    ///
240    /// ```no_run
241    /// #[derive(Copy, Clone)]
242    /// struct BufferContent {
243    ///     value1: u16,
244    ///     value2: u16,
245    /// }
246    ///
247    /// # fn example(buffer: glium::buffer::Buffer<BufferContent>) {
248    /// let slice = unsafe { buffer.slice_custom(glium::field!(BufferContent, value2)) };
249    /// # }
250    /// ```
251    #[inline]
252    pub unsafe fn slice_custom<R>(&self, f: Field<R>) -> BufferSlice<'_, R>
253    where
254        R: Content,
255    {
256        self.as_slice().slice_custom(f)
257    }
258
259    /// Same as `slice_custom` but returns a mutable slice.
260    ///
261    /// This method builds an object that represents a slice of the buffer. No actual operation
262    /// OpenGL is performed.
263    #[inline]
264    pub unsafe fn slice_custom_mut<R>(&mut self, f: Field<R>) -> BufferMutSlice<'_, R>
265    where
266        R: Content,
267    {
268        self.as_mut_slice().slice_custom(f)
269    }
270
271    /// Builds a slice containing the whole subbuffer.
272    ///
273    /// This method builds an object that represents a slice of the buffer. No actual operation
274    /// OpenGL is performed.
275    #[inline]
276    pub fn as_slice(&self) -> BufferSlice<'_, T> {
277        BufferSlice {
278            alloc: self.alloc.as_ref().unwrap(),
279            bytes_start: 0,
280            bytes_end: self.get_size(),
281            fence: self.fence.as_ref().unwrap(),
282            marker: PhantomData,
283        }
284    }
285
286    /// Builds a slice containing the whole subbuffer.
287    ///
288    /// This method builds an object that represents a slice of the buffer. No actual operation
289    /// OpenGL is performed.
290    #[inline]
291    pub fn as_mut_slice(&mut self) -> BufferMutSlice<'_, T> {
292        let size = self.get_size();
293
294        BufferMutSlice {
295            alloc: self.alloc.as_mut().unwrap(),
296            bytes_start: 0,
297            bytes_end: size,
298            fence: self.fence.as_ref().unwrap(),
299            marker: PhantomData,
300        }
301    }
302
303    /// Builds a slice-any containing the whole subbuffer.
304    ///
305    /// This method builds an object that represents a slice of the buffer. No actual operation
306    /// OpenGL is performed.
307    pub fn as_slice_any(&self) -> BufferAnySlice<'_> {
308        let size = self.get_size();
309
310        BufferAnySlice {
311            alloc: self.alloc.as_ref().unwrap(),
312            bytes_start: 0,
313            bytes_end: self.get_size(),
314            elements_size: <T as Content>::get_elements_size(),
315            fence: self.fence.as_ref().unwrap(),
316        }
317    }
318}
319
320impl<T> Buffer<T> where T: Content + Copy {
321    /// Builds a new buffer of the given size.
322    pub fn empty<F: ?Sized>(facade: &F, ty: BufferType, mode: BufferMode)
323                    -> Result<Buffer<T>, BufferCreationError> where F: Facade
324    {
325        Alloc::empty(facade, ty, mem::size_of::<T>(), mode)
326            .map(|buffer| {
327                Buffer {
328                    alloc: Some(buffer),
329                    fence: Some(Fences::new()),
330                    marker: PhantomData,
331                }
332            })
333    }
334}
335
336impl<T> Buffer<[T]> where [T]: Content, T: Copy {
337    /// Builds a new buffer of the given size.
338    pub fn empty_array<F: ?Sized>(facade: &F, ty: BufferType, len: usize, mode: BufferMode)
339                          -> Result<Buffer<[T]>, BufferCreationError> where F: Facade
340    {
341        Alloc::empty(facade, ty, len * mem::size_of::<T>(), mode)
342            .map(|buffer| {
343                Buffer {
344                    alloc: Some(buffer),
345                    fence: Some(Fences::new()),
346                    marker: PhantomData,
347                }
348            })
349    }
350
351    /// Returns the number of elements in this buffer.
352    #[inline]
353    pub fn len(&self) -> usize {
354        self.alloc.as_ref().unwrap().get_size() / mem::size_of::<T>()
355    }
356
357    /// Builds a slice of this subbuffer. Returns `None` if out of range.
358    ///
359    /// This method builds an object that represents a slice of the buffer. No actual operation
360    /// OpenGL is performed.
361    #[inline]
362    pub fn slice<R: RangeArgument<usize>>(&self, range: R) -> Option<BufferSlice<'_, [T]>> {
363        self.as_slice().slice(range)
364    }
365
366    /// Builds a slice of this subbuffer. Returns `None` if out of range.
367    ///
368    /// This method builds an object that represents a slice of the buffer. No actual operation
369    /// OpenGL is performed.
370    #[inline]
371    pub fn slice_mut<R: RangeArgument<usize>>(&mut self, range: R) -> Option<BufferMutSlice<'_, [T]>> {
372        self.as_mut_slice().slice(range)
373    }
374}
375
376impl<T> Buffer<[T]> where T: PixelValue {
377    /// Reads the content of the buffer.
378    #[inline]
379    pub fn read_as_texture_1d<S>(&self) -> Result<S, ReadError> where S: Texture1dDataSink<T> {
380        let data = self.read()?;
381        Ok(S::from_raw(Cow::Owned(data), self.len() as u32))
382    }
383}
384
385impl<T: ?Sized> fmt::Debug for Buffer<T> where T: Content {
386    #[inline]
387    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
388        write!(fmt, "{:?}", self.alloc.as_ref().unwrap())
389    }
390}
391
392impl<T: ?Sized> Drop for Buffer<T> where T: Content {
393    #[inline]
394    fn drop(&mut self) {
395        if let (Some(alloc), Some(mut fence)) = (self.alloc.take(), self.fence.take()) {
396            fence.clean(&mut alloc.get_context().make_current());
397        }
398    }
399}
400
401impl<T: ?Sized> BufferExt for Buffer<T> where T: Content {
402    #[inline]
403    fn get_offset_bytes(&self) -> usize {
404        0
405    }
406
407    #[inline]
408    fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
409        let alloc = self.alloc.as_ref().unwrap();
410        alloc.prepare_for_vertex_attrib_array(ctxt);
411    }
412
413    #[inline]
414    fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
415        let alloc = self.alloc.as_ref().unwrap();
416        alloc.prepare_for_element_array(ctxt);
417    }
418
419    #[inline]
420    fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
421        let alloc = self.alloc.as_ref().unwrap();
422        alloc.bind_to_element_array(ctxt);
423    }
424
425    #[inline]
426    fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
427        let alloc = self.alloc.as_ref().unwrap();
428        alloc.prepare_and_bind_for_pixel_pack(ctxt);
429    }
430
431    #[inline]
432    fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
433        Alloc::unbind_pixel_pack(ctxt)
434    }
435
436    #[inline]
437    fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
438        let alloc = self.alloc.as_ref().unwrap();
439        alloc.prepare_and_bind_for_pixel_unpack(ctxt);
440    }
441
442    #[inline]
443    fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
444        Alloc::unbind_pixel_unpack(ctxt)
445    }
446
447    #[inline]
448    fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
449        let alloc = self.alloc.as_ref().unwrap();
450        alloc.prepare_and_bind_for_query(ctxt);
451    }
452
453    #[inline]
454    fn unbind_query(ctxt: &mut CommandContext<'_>) {
455        Alloc::unbind_query(ctxt)
456    }
457
458    #[inline]
459    fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
460        let alloc = self.alloc.as_ref().unwrap();
461        alloc.prepare_and_bind_for_draw_indirect(ctxt);
462    }
463
464    #[inline]
465    fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
466        let alloc = self.alloc.as_ref().unwrap();
467        alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
468    }
469
470    #[inline]
471    fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
472        let alloc = self.alloc.as_ref().unwrap();
473        alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. alloc.get_size());
474    }
475
476    #[inline]
477    fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
478        let alloc = self.alloc.as_ref().unwrap();
479        alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. alloc.get_size());
480    }
481
482    #[inline]
483    fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
484        let alloc = self.alloc.as_ref().unwrap();
485        alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. alloc.get_size());
486    }
487
488    #[inline]
489    fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
490        let alloc = self.alloc.as_ref().unwrap();
491        alloc.bind_to_transform_feedback(ctxt, index, 0 .. alloc.get_size());
492    }
493}
494
495/// Represents a sub-part of a buffer.
496#[derive(Copy, Clone)]
497pub struct BufferSlice<'a, T: ?Sized> where T: Content {
498    alloc: &'a Alloc,
499    bytes_start: usize,
500    bytes_end: usize,
501    fence: &'a Fences,
502    marker: PhantomData<&'a T>,
503}
504
505impl<'a, T: ?Sized> BufferSlice<'a, T> where T: Content + 'a {
506    /// Returns the size in bytes of this slice.
507    #[inline]
508    pub fn get_size(&self) -> usize {
509        self.bytes_end - self.bytes_start
510    }
511
512    /// Returns the context corresponding to this buffer.
513    #[inline]
514    pub fn get_context(&self) -> &Rc<Context> {
515        self.alloc.get_context()
516    }
517
518    /// Uploads some data in this buffer.
519    ///
520    /// # Implementation
521    ///
522    /// - For persistent-mapped buffers, waits untils the data is no longer used by the GPU then
523    ///   memcpies the data to the mapping.
524    /// - For immutable buffers, creates a temporary buffer that contains the data then calls
525    ///   `glCopyBufferSubData` to copy from the temporary buffer to the real one.
526    /// - For other types, calls `glBufferSubData`.
527    ///
528    /// # Panic
529    ///
530    /// Panics if the length of `data` is different from the length of this buffer.
531    pub fn write(&self, data: &T) {
532        assert_eq!(mem::size_of_val(data), self.get_size());
533
534        self.fence.wait(&mut self.alloc.get_context().make_current(),
535                        self.bytes_start .. self.bytes_end);
536        unsafe { self.alloc.upload(self.bytes_start, data); }
537    }
538
539    /// Invalidates the content of the slice. The data becomes undefined.
540    ///
541    /// This operation is a no-op if the backend doesn't support it and for persistent-mapped
542    /// buffers.
543    ///
544    /// # Implementation
545    ///
546    /// Calls `glInvalidateBufferSubData` if supported.
547    ///
548    #[inline]
549    pub fn invalidate(&self) {
550        self.alloc.invalidate(self.bytes_start, self.get_size());
551    }
552
553    /// Reads the content of the buffer.
554    pub fn read(&self) -> Result<T::Owned, ReadError> {
555        self.fence.wait(&mut self.alloc.get_context().make_current(),
556                        self.bytes_start .. self.bytes_end);
557
558        unsafe {
559            self.alloc.read::<T>(self.bytes_start .. self.bytes_end)
560        }
561    }
562
563    /// Copies the content of this slice to another slice.
564    ///
565    /// # Panic
566    ///
567    /// Panics if `T` is unsized and the other buffer is too small.
568    pub fn copy_to<S>(&self, target: S) -> Result<(), CopyError>
569                      where S: Into<BufferSlice<'a, T>>
570    {
571        let target = target.into();
572
573        self.alloc.copy_to(self.bytes_start .. self.bytes_end, &target.alloc,
574                           target.get_offset_bytes())?;
575
576        if let Some(inserter) = self.add_fence() {
577            let mut ctxt = self.alloc.get_context().make_current();
578            inserter.insert(&mut ctxt);
579        }
580
581        if let Some(inserter) = target.add_fence() {
582            let mut ctxt = self.alloc.get_context().make_current();
583            inserter.insert(&mut ctxt);
584        }
585
586        Ok(())
587    }
588
589    /// Builds a slice that contains an element from inside the buffer.
590    ///
591    /// This method builds an object that represents a slice of the buffer. No actual operation
592    /// OpenGL is performed.
593    ///
594    /// # Example
595    ///
596    /// ```no_run
597    /// #[derive(Copy, Clone)]
598    /// struct BufferContent {
599    ///     value1: u16,
600    ///     value2: u16,
601    /// }
602    ///
603    /// # fn example(buffer: glium::buffer::Buffer<BufferContent>) {
604    /// let slice = unsafe { buffer.slice_custom(glium::field!(BufferContent, value2)) };
605    /// # }
606    /// ```
607    #[inline]
608    pub unsafe fn slice_custom<R>(&self, f: Field<R>) -> BufferSlice<'a, R>
609    where
610        R: Content,
611    {
612        let size = f.size();
613        let result = f.offs();
614
615        assert!(result <= self.get_size());
616        assert!(result + size <= self.get_size());
617
618        BufferSlice {
619            alloc: self.alloc,
620            bytes_start: self.bytes_start + result,
621            bytes_end: self.bytes_start + result + size,
622            fence: self.fence,
623            marker: PhantomData,
624        }
625    }
626
627    /// Builds a slice-any containing the whole subbuffer.
628    ///
629    /// This method builds an object that represents a slice of the buffer. No actual operation
630    /// OpenGL is performed.
631    #[inline]
632    pub fn as_slice_any(&self) -> BufferAnySlice<'a> {
633        BufferAnySlice {
634            alloc: self.alloc,
635            bytes_start: self.bytes_start,
636            bytes_end: self.bytes_end,
637            elements_size: <T as Content>::get_elements_size(),
638            fence: self.fence,
639        }
640    }
641}
642
643impl<'a, T> BufferSlice<'a, [T]> where [T]: Content + 'a {
644    /// Returns the number of elements in this slice.
645    #[inline]
646    pub fn len(&self) -> usize {
647        (self.bytes_end - self.bytes_start) / mem::size_of::<T>()
648    }
649
650    /// Builds a subslice of this slice. Returns `None` if out of range.
651    ///
652    /// This method builds an object that represents a slice of the buffer. No actual operation
653    /// OpenGL is performed.
654    #[inline]
655    pub fn slice<R: RangeArgument<usize>>(&self, range: R) -> Option<BufferSlice<'a, [T]>> {
656        if range.start().map_or(0, |e| *e) > self.len() || range.end().map_or(0, |e| *e) > self.len() {
657            return None;
658        }
659
660        Some(BufferSlice {
661            alloc: self.alloc,
662            bytes_start: self.bytes_start + range.start().map_or(0, |e| *e) * mem::size_of::<T>(),
663            bytes_end: self.bytes_start + range.end().map_or(self.len(), |e| *e) * mem::size_of::<T>(),
664            fence: self.fence,
665            marker: PhantomData,
666        })
667    }
668}
669
670impl<'a, T> BufferSlice<'a, [T]> where T: PixelValue + 'a {
671    /// Reads the content of the buffer.
672    #[inline]
673    pub fn read_as_texture_1d<S>(&self) -> Result<S, ReadError> where S: Texture1dDataSink<T> {
674        let data = self.read()?;
675        Ok(S::from_raw(Cow::Owned(data), self.len() as u32))
676    }
677}
678
679impl<'a, T: ?Sized> fmt::Debug for BufferSlice<'a, T> where T: Content {
680    #[inline]
681    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
682        write!(fmt, "{:?}", self.alloc)
683    }
684}
685
686impl<'a, T: ?Sized> From<BufferMutSlice<'a, T>> for BufferSlice<'a, T> where T: Content + 'a {
687    #[inline]
688    fn from(s: BufferMutSlice<'a, T>) -> BufferSlice<'a, T> {
689        BufferSlice {
690            alloc: s.alloc,
691            bytes_start: s.bytes_start,
692            bytes_end: s.bytes_end,
693            fence: s.fence,
694            marker: PhantomData,
695        }
696    }
697}
698
699impl<'a, T: ?Sized> From<&'a Buffer<T>> for BufferSlice<'a, T> where T: Content + 'a {
700    #[inline]
701    fn from(b: &'a Buffer<T>) -> BufferSlice<'a, T> {
702        b.as_slice()
703    }
704}
705
706impl<'a, T: ?Sized> From<&'a mut Buffer<T>> for BufferSlice<'a, T> where T: Content + 'a {
707    #[inline]
708    fn from(b: &'a mut Buffer<T>) -> BufferSlice<'a, T> {
709        b.as_slice()
710    }
711}
712
713impl<'a, T: ?Sized> BufferSliceExt<'a> for BufferSlice<'a, T> where T: Content {
714    #[inline]
715    fn add_fence(&self) -> Option<Inserter<'a>> {
716        if !self.alloc.uses_persistent_mapping() {
717            return None;
718        }
719
720        Some(self.fence.inserter(self.bytes_start .. self.bytes_end))
721    }
722}
723
724impl<'a, T: ?Sized> BufferExt for BufferSlice<'a, T> where T: Content {
725    #[inline]
726    fn get_offset_bytes(&self) -> usize {
727        self.bytes_start
728    }
729
730    #[inline]
731    fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
732        self.alloc.prepare_for_vertex_attrib_array(ctxt);
733    }
734
735    #[inline]
736    fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
737        self.alloc.prepare_for_element_array(ctxt);
738    }
739
740    #[inline]
741    fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
742        self.alloc.bind_to_element_array(ctxt);
743    }
744
745    #[inline]
746    fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
747        self.alloc.prepare_and_bind_for_pixel_pack(ctxt);
748    }
749
750    #[inline]
751    fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
752        Alloc::unbind_pixel_pack(ctxt)
753    }
754
755    #[inline]
756    fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
757        self.alloc.prepare_and_bind_for_pixel_unpack(ctxt);
758    }
759
760    #[inline]
761    fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
762        Alloc::unbind_pixel_unpack(ctxt)
763    }
764
765    #[inline]
766    fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
767        self.alloc.prepare_and_bind_for_query(ctxt);
768    }
769
770    #[inline]
771    fn unbind_query(ctxt: &mut CommandContext<'_>) {
772        Alloc::unbind_query(ctxt)
773    }
774
775    #[inline]
776    fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
777        self.alloc.prepare_and_bind_for_draw_indirect(ctxt);
778    }
779
780    #[inline]
781    fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
782        self.alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
783    }
784
785    #[inline]
786    fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
787        self.alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. self.alloc.get_size());
788    }
789
790    #[inline]
791    fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
792        self.alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. self.alloc.get_size());
793    }
794
795    #[inline]
796    fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
797        self.alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. self.alloc.get_size());
798    }
799
800    #[inline]
801    fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
802        self.alloc.bind_to_transform_feedback(ctxt, index, 0 .. self.alloc.get_size());
803    }
804}
805
806/// Represents a sub-part of a buffer.
807pub struct BufferMutSlice<'a, T: ?Sized> where T: Content {
808    alloc: &'a mut Alloc,
809    bytes_start: usize,
810    bytes_end: usize,
811    fence: &'a Fences,
812    marker: PhantomData<T>,
813}
814
815impl<'a, T: ?Sized> BufferMutSlice<'a, T> where T: Content + 'a {
816    /// Returns the size in bytes of this slice.
817    #[inline]
818    pub fn get_size(&self) -> usize {
819        self.bytes_end - self.bytes_start
820    }
821
822    /// Maps the buffer in memory for both reading and writing.
823    ///
824    /// # Implementation
825    ///
826    /// - For persistent-mapped buffers, waits until the data is no longer accessed by the GPU then
827    ///   returns a pointer to the existing mapping.
828    /// - For immutable buffers, creates a temporary buffer containing the data of the buffer and
829    ///   maps it. When the mapping object is destroyed, copies the content of the temporary buffer
830    ///   to the real buffer.
831    /// - For other types, calls `glMapBuffer` or `glMapSubBuffer`.
832    ///
833    #[inline]
834    pub fn map(self) -> Mapping<'a, T> {
835        self.fence.wait(&mut self.alloc.get_context().make_current(),
836                        self.bytes_start .. self.bytes_end);
837        unsafe { self.alloc.map(self.bytes_start .. self.bytes_end) }
838    }
839
840    /// Maps the buffer in memory for reading.
841    ///
842    /// # Implementation
843    ///
844    /// - For persistent-mapped buffers, waits until the data is no longer accessed by the GPU then
845    ///   returns a pointer to the existing mapping.
846    /// - For immutable buffers, creates a temporary buffer containing the data of the buffer and
847    ///   maps it.
848    /// - For other types, calls `glMapBuffer` or `glMapSubBuffer`.
849    ///
850    #[inline]
851    pub fn map_read(self) -> ReadMapping<'a, T> {
852        self.fence.wait(&mut self.alloc.get_context().make_current(),
853                        self.bytes_start .. self.bytes_end);
854        unsafe { self.alloc.map_read(self.bytes_start .. self.bytes_end) }
855    }
856
857    /// Maps the buffer in memory for writing only.
858    ///
859    /// # Implementation
860    ///
861    /// - For persistent-mapped buffers, waits until the data is no longer accessed by the GPU then
862    ///   returns a pointer to the existing mapping.
863    /// - For immutable buffers, creates a temporary buffer and maps it. When the mapping object
864    ///   is destroyed, copies the content of the temporary buffer to the real buffer.
865    /// - For other types, calls `glMapBuffer` or `glMapSubBuffer`.
866    ///
867    #[inline]
868    pub fn map_write(self) -> WriteMapping<'a, T> {
869        self.fence.wait(&mut self.alloc.get_context().make_current(),
870                        self.bytes_start .. self.bytes_end);
871        unsafe { self.alloc.map_write(self.bytes_start .. self.bytes_end) }
872    }
873
874    /// Uploads some data in this buffer.
875    ///
876    /// # Implementation
877    ///
878    /// - For persistent-mapped buffers, waits untils the data is no longer used by the GPU then
879    ///   memcpies the data to the mapping.
880    /// - For immutable buffers, creates a temporary buffer that contains the data then calls
881    ///   `glCopyBufferSubData` to copy from the temporary buffer to the real one.
882    /// - For other types, calls `glBufferSubData`.
883    ///
884    /// # Panic
885    ///
886    /// Panics if the length of `data` is different from the length of this buffer.
887    #[inline]
888    pub fn write(&self, data: &T) {
889        self.fence.wait(&mut self.alloc.get_context().make_current(),
890                        self.bytes_start .. self.bytes_end);
891        unsafe { self.alloc.upload(self.bytes_start, data); }
892    }
893
894    /// Invalidates the content of the slice. The data becomes undefined.
895    ///
896    /// This operation is a no-op if the backend doesn't support it and for persistent-mapped
897    /// buffers.
898    ///
899    /// # Implementation
900    ///
901    /// Calls `glInvalidateBufferSubData` if supported.
902    ///
903    #[inline]
904    pub fn invalidate(&self) {
905        self.alloc.invalidate(self.bytes_start, self.get_size());
906    }
907
908    /// Reads the content of the buffer.
909    #[inline]
910    pub fn read(&self) -> Result<T::Owned, ReadError> {
911        unsafe {
912            self.alloc.read::<T>(self.bytes_start .. self.bytes_end)
913        }
914    }
915
916    /// Copies the content of this slice to another slice.
917    ///
918    /// # Panic
919    ///
920    /// Panics if `T` is unsized and the other buffer is too small.
921    pub fn copy_to<S>(&self, target: S) -> Result<(), CopyError>
922                      where S: Into<BufferSlice<'a, T>>
923    {
924        let target = target.into();
925
926        self.alloc.copy_to(self.bytes_start .. self.bytes_end, &target.alloc,
927                           target.get_offset_bytes())?;
928
929        if let Some(inserter) = self.add_fence() {
930            let mut ctxt = self.alloc.get_context().make_current();
931            inserter.insert(&mut ctxt);
932        }
933
934        if let Some(inserter) = self.add_fence() {
935            let mut ctxt = self.alloc.get_context().make_current();
936            inserter.insert(&mut ctxt);
937        }
938
939        Ok(())
940    }
941
942    /// Builds a slice that contains an element from inside the buffer.
943    ///
944    /// This method builds an object that represents a slice of the buffer. No actual operation
945    /// OpenGL is performed.
946    ///
947    /// # Example
948    ///
949    /// ```no_run
950    /// #[derive(Copy, Clone)]
951    /// struct BufferContent {
952    ///     value1: u16,
953    ///     value2: u16,
954    /// }
955    /// # let buffer: glium::buffer::BufferSlice<BufferContent> =
956    /// #                                                   unsafe { std::mem::zeroed() };
957    /// let slice = unsafe { buffer.slice_custom(glium::field!(BufferContent, value2)) };
958    /// ```
959    #[inline]
960    pub unsafe fn slice_custom<R>(self, f: Field<R>) -> BufferMutSlice<'a, R>
961    where
962        R: Content,
963    {
964        let size = f.size();
965        let result = f.offs();
966
967        assert!(result <= self.get_size());
968        assert!(result + size <= self.get_size());
969
970        BufferMutSlice {
971            alloc: self.alloc,
972            bytes_start: self.bytes_start + result,
973            bytes_end: self.bytes_start + result + size,
974            fence: self.fence,
975            marker: PhantomData,
976        }
977    }
978
979    /// Builds a slice-any containing the whole subbuffer.
980    ///
981    /// This method builds an object that represents a slice of the buffer. No actual operation
982    /// OpenGL is performed.
983    #[inline]
984    pub fn as_slice_any(self) -> BufferAnySlice<'a> {
985        BufferAnySlice {
986            alloc: self.alloc,
987            bytes_start: self.bytes_start,
988            bytes_end: self.bytes_end,
989            elements_size: <T as Content>::get_elements_size(),
990            fence: self.fence,
991        }
992    }
993}
994
995impl<'a, T> BufferMutSlice<'a, [T]> where [T]: Content, T: Copy + 'a {
996    /// Returns the number of elements in this slice.
997    #[inline]
998    pub fn len(&self) -> usize {
999        (self.bytes_end - self.bytes_start) / mem::size_of::<T>()
1000    }
1001
1002    /// Builds a subslice of this slice. Returns `None` if out of range.
1003    ///
1004    /// This method builds an object that represents a slice of the buffer. No actual operation
1005    /// OpenGL is performed.
1006    #[inline]
1007    pub fn slice<R: RangeArgument<usize>>(self, range: R) -> Option<BufferMutSlice<'a, [T]>> {
1008        if range.start().map_or(0, |e| *e) > self.len() || range.end().map_or(0, |e| *e) > self.len() {
1009            return None;
1010        }
1011
1012        let len = self.len();
1013        Some(BufferMutSlice {
1014            alloc: self.alloc,
1015            bytes_start: self.bytes_start + range.start().map_or(0, |e| *e) * mem::size_of::<T>(),
1016            bytes_end: self.bytes_start + range.end().map_or(len, |e| *e) * mem::size_of::<T>(),
1017            fence: self.fence,
1018            marker: PhantomData,
1019        })
1020    }
1021}
1022
1023impl<'a, T> BufferMutSlice<'a, [T]> where T: PixelValue + 'a {
1024    /// Reads the content of the buffer.
1025    #[inline]
1026    pub fn read_as_texture_1d<S>(&self) -> Result<S, ReadError> where S: Texture1dDataSink<T> {
1027        let data = self.read()?;
1028        Ok(S::from_raw(Cow::Owned(data), self.len() as u32))
1029    }
1030}
1031
1032impl<'a, T: ?Sized> BufferSliceExt<'a> for BufferMutSlice<'a, T> where T: Content {
1033    #[inline]
1034    fn add_fence(&self) -> Option<Inserter<'a>> {
1035        if !self.alloc.uses_persistent_mapping() {
1036            return None;
1037        }
1038
1039        Some(self.fence.inserter(self.bytes_start .. self.bytes_end))
1040    }
1041}
1042
1043impl<'a, T: ?Sized> fmt::Debug for BufferMutSlice<'a, T> where T: Content {
1044    #[inline]
1045    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
1046        write!(fmt, "{:?}", self.alloc)
1047    }
1048}
1049
1050impl<'a, T: ?Sized> From<&'a mut Buffer<T>> for BufferMutSlice<'a, T> where T: Content + 'a {
1051    #[inline]
1052    fn from(b: &'a mut Buffer<T>) -> BufferMutSlice<'a, T> {
1053        b.as_mut_slice()
1054    }
1055}
1056
1057/// Represents a sub-part of a buffer.
1058///
1059/// Doesn't contain any information about the content, contrary to `Buffer`.
1060pub struct BufferAny {
1061    alloc: Alloc,
1062    size: usize,
1063    elements_size: usize,
1064    fence: Fences,
1065}
1066
1067impl BufferAny {
1068    /// Builds a slice-any containing the whole subbuffer.
1069    #[inline]
1070    pub fn as_slice_any(&self) -> BufferAnySlice<'_> {
1071        BufferAnySlice {
1072            alloc: &self.alloc,
1073            bytes_start: 0,
1074            bytes_end: self.size,
1075            elements_size: self.elements_size,
1076            fence: &self.fence,
1077        }
1078    }
1079
1080    /// Builds a mutable typed slice containing the whole subbuffer, without checking the type.
1081    #[inline]
1082    pub unsafe fn as_typed_slice_mut<T: ?Sized + Content>(&mut self) -> BufferMutSlice<'_, T> {
1083        assert_eq!(<T as Content>::get_elements_size(), self.elements_size);
1084        BufferMutSlice {
1085            alloc: &mut self.alloc,
1086            bytes_start: 0,
1087            bytes_end: self.size,
1088            fence: &self.fence,
1089            marker: PhantomData,
1090        }
1091    }
1092
1093    /// Builds a typed slice containing the whole subbuffer, without checking the type.
1094    #[inline]
1095    pub unsafe fn as_typed_slice<T: ?Sized + Content>(&self) -> BufferSlice<'_, T> {
1096        assert_eq!(<T as Content>::get_elements_size(), self.elements_size);
1097        BufferSlice {
1098            alloc: &self.alloc,
1099            bytes_start: 0,
1100            bytes_end: self.size,
1101            fence: &self.fence,
1102            marker: PhantomData,
1103        }
1104    }
1105
1106    /// Returns the size in bytes of each element in the buffer.
1107    // TODO: clumsy, remove this function
1108    #[inline]
1109    pub fn get_elements_size(&self) -> usize {
1110        self.elements_size
1111    }
1112
1113    /// Returns the number of elements in the buffer.
1114    // TODO: clumsy, remove this function
1115    #[inline]
1116    pub fn get_elements_count(&self) -> usize {
1117        self.size / self.elements_size
1118    }
1119
1120    /// Returns the context corresponding to this buffer.
1121    #[inline]
1122    pub fn get_context(&self) -> &Rc<Context> {
1123        self.alloc.get_context()
1124    }
1125
1126    /// Returns the number of bytes in this subbuffer.
1127    #[inline]
1128    pub fn get_size(&self) -> usize {
1129        self.size
1130    }
1131
1132    /// Invalidates the content of the buffer. The data becomes undefined.
1133    ///
1134    /// This operation is a no-op if the backend doesn't support it and for persistent-mapped
1135    /// buffers.
1136    #[inline]
1137    pub fn invalidate(&self) {
1138        self.alloc.invalidate(0, self.size);
1139    }
1140
1141    /// UNSTABLE. This function can be removed at any moment without any further notice.
1142    ///
1143    /// Considers that the buffer is filled with elements of type `T` and reads them.
1144    ///
1145    /// # Panic
1146    ///
1147    /// Panics if the size of the buffer is not a multiple of the size of the data.
1148    /// For example, trying to read some `(u8, u8, u8, u8)`s from a buffer of 7 bytes will panic.
1149    ///
1150    #[inline]
1151    pub unsafe fn read<T>(&self) -> Result<T::Owned, ReadError> where T: Content {
1152        // TODO: add check
1153        self.fence.wait(&mut self.alloc.get_context().make_current(), 0 .. self.get_size());
1154        self.alloc.read::<T>(0 .. self.get_size())
1155    }
1156}
1157
1158impl<T: ?Sized> From<Buffer<T>> for BufferAny where T: Content + Send + 'static {
1159    #[inline]
1160    fn from(mut buffer: Buffer<T>) -> BufferAny {
1161        let size = buffer.get_size();
1162
1163        BufferAny {
1164            alloc: buffer.alloc.take().unwrap(),
1165            size,
1166            elements_size: <T as Content>::get_elements_size(),
1167            fence: buffer.fence.take().unwrap(),
1168        }
1169    }
1170}
1171
1172impl Drop for BufferAny {
1173    #[inline]
1174    fn drop(&mut self) {
1175        self.fence.clean(&mut self.alloc.get_context().make_current());
1176    }
1177}
1178
1179impl fmt::Debug for BufferAny {
1180    #[inline]
1181    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
1182        write!(fmt, "{:?}", self.alloc)
1183    }
1184}
1185
1186impl BufferExt for BufferAny {
1187    #[inline]
1188    fn get_offset_bytes(&self) -> usize {
1189        0
1190    }
1191
1192    #[inline]
1193    fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
1194        self.alloc.prepare_for_vertex_attrib_array(ctxt);
1195    }
1196
1197    #[inline]
1198    fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
1199        self.alloc.prepare_for_element_array(ctxt);
1200    }
1201
1202    #[inline]
1203    fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
1204        self.alloc.bind_to_element_array(ctxt);
1205    }
1206
1207    #[inline]
1208    fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
1209        self.alloc.prepare_and_bind_for_pixel_pack(ctxt);
1210    }
1211
1212    #[inline]
1213    fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
1214        Alloc::unbind_pixel_pack(ctxt)
1215    }
1216
1217    #[inline]
1218    fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
1219        self.alloc.prepare_and_bind_for_pixel_unpack(ctxt);
1220    }
1221
1222    #[inline]
1223    fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
1224        Alloc::unbind_pixel_unpack(ctxt)
1225    }
1226
1227    #[inline]
1228    fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
1229        self.alloc.prepare_and_bind_for_query(ctxt);
1230    }
1231
1232    #[inline]
1233    fn unbind_query(ctxt: &mut CommandContext<'_>) {
1234        Alloc::unbind_query(ctxt)
1235    }
1236
1237    #[inline]
1238    fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
1239        self.alloc.prepare_and_bind_for_draw_indirect(ctxt);
1240    }
1241
1242    #[inline]
1243    fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
1244        self.alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
1245    }
1246
1247    #[inline]
1248    fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1249        self.alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. self.alloc.get_size());
1250    }
1251
1252    #[inline]
1253    fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1254        self.alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. self.alloc.get_size());
1255    }
1256
1257    #[inline]
1258    fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1259        self.alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. self.alloc.get_size());
1260    }
1261
1262    #[inline]
1263    fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1264        self.alloc.bind_to_transform_feedback(ctxt, index, 0 .. self.alloc.get_size());
1265    }
1266}
1267
1268/// Slice of a `Buffer` without any type info.
1269#[derive(Copy, Clone)]
1270pub struct BufferAnySlice<'a> {
1271    alloc: &'a Alloc,
1272    bytes_start: usize,
1273    bytes_end: usize,
1274    elements_size: usize,
1275    fence: &'a Fences,
1276}
1277
1278impl<'a> GlObject for BufferAnySlice<'a> {
1279    type Id = gl::types::GLuint;
1280
1281    #[inline]
1282    fn get_id(&self) -> gl::types::GLuint {
1283        self.alloc.get_id()
1284    }
1285}
1286
1287impl<'a> BufferAnySlice<'a> {
1288    /// Returns the number of bytes in this slice.
1289    #[inline]
1290    pub fn get_size(&self) -> usize {
1291        self.bytes_end - self.bytes_start
1292    }
1293
1294    /// Returns the size in bytes of each element in the buffer.
1295    // TODO: clumsy, remove this function
1296    #[inline]
1297    pub fn get_elements_size(&self) -> usize {
1298        self.elements_size
1299    }
1300
1301    /// Returns the number of elements in the buffer.
1302    // TODO: clumsy, remove this function
1303    #[inline]
1304    pub fn get_elements_count(&self) -> usize {
1305        self.get_size() / self.elements_size
1306    }
1307
1308    /// Invalidates the content of the slice. The data becomes undefined.
1309    ///
1310    /// This operation is a no-op if the backend doesn't support it and for persistent-mapped
1311    /// buffers.
1312    #[inline]
1313    pub fn invalidate(&self) {
1314        self.alloc.invalidate(self.bytes_start, self.get_size());
1315    }
1316
1317    /// Returns the context corresponding to this buffer.
1318    #[inline]
1319    pub fn get_context(&self) -> &Rc<Context> {
1320        self.alloc.get_context()
1321    }
1322}
1323
1324impl<'a> fmt::Debug for BufferAnySlice<'a> {
1325    #[inline]
1326    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
1327        write!(fmt, "{:?}", self.alloc)
1328    }
1329}
1330
1331impl<'a> BufferSliceExt<'a> for BufferAnySlice<'a> {
1332    #[inline]
1333    fn add_fence(&self) -> Option<Inserter<'a>> {
1334        if !self.alloc.uses_persistent_mapping() {
1335            return None;
1336        }
1337
1338        Some(self.fence.inserter(self.bytes_start .. self.bytes_end))
1339    }
1340}
1341
1342impl<'a> BufferExt for BufferAnySlice<'a> {
1343    #[inline]
1344    fn get_offset_bytes(&self) -> usize {
1345        self.bytes_start
1346    }
1347
1348    #[inline]
1349    fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
1350        self.alloc.prepare_for_vertex_attrib_array(ctxt);
1351    }
1352
1353    #[inline]
1354    fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
1355        self.alloc.prepare_for_element_array(ctxt);
1356    }
1357
1358    #[inline]
1359    fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
1360        self.alloc.bind_to_element_array(ctxt);
1361    }
1362
1363    #[inline]
1364    fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
1365        self.alloc.prepare_and_bind_for_pixel_pack(ctxt);
1366    }
1367
1368    #[inline]
1369    fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
1370        Alloc::unbind_pixel_pack(ctxt)
1371    }
1372
1373    #[inline]
1374    fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
1375        self.alloc.prepare_and_bind_for_pixel_unpack(ctxt);
1376    }
1377
1378    #[inline]
1379    fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
1380        Alloc::unbind_pixel_unpack(ctxt)
1381    }
1382
1383    #[inline]
1384    fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
1385        self.alloc.prepare_and_bind_for_query(ctxt);
1386    }
1387
1388    #[inline]
1389    fn unbind_query(ctxt: &mut CommandContext<'_>) {
1390        Alloc::unbind_query(ctxt)
1391    }
1392
1393    #[inline]
1394    fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
1395        self.alloc.prepare_and_bind_for_draw_indirect(ctxt);
1396    }
1397
1398    #[inline]
1399    fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
1400        self.alloc.prepare_and_bind_for_dispatch_indirect(ctxt);
1401    }
1402
1403    #[inline]
1404    fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1405        self.alloc.prepare_and_bind_for_uniform(ctxt, index, 0 .. self.alloc.get_size());
1406    }
1407
1408    #[inline]
1409    fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1410        self.alloc.prepare_and_bind_for_shared_storage(ctxt, index, 0 .. self.alloc.get_size());
1411    }
1412
1413    #[inline]
1414    fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1415        self.alloc.prepare_and_bind_for_atomic_counter(ctxt, index, 0 .. self.alloc.get_size());
1416    }
1417
1418    #[inline]
1419    fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint) {
1420        self.alloc.bind_to_transform_feedback(ctxt, index, 0 .. self.alloc.get_size());
1421    }
1422}