1use crate::backend::Facade;
2use crate::context::CommandContext;
3use crate::context::Context;
4use crate::version::Version;
5use crate::CapabilitiesSource;
6use crate::ContextExt;
7use crate::gl;
8use std::os::raw;
9use std::error::Error;
10use std::{fmt, mem, ptr};
11use std::cell::Cell;
12use std::rc::Rc;
13use std::ops::{Deref, DerefMut, Range};
14use crate::GlObject;
15use crate::TransformFeedbackSessionExt;
16
17use crate::buffer::{Content, BufferType, BufferMode, BufferCreationError};
18use crate::vertex::TransformFeedbackSession;
19use crate::vertex_array_object::VertexAttributesSystem;
20
21use crate::version::Api;
22
23#[derive(Debug, Copy, Clone)]
25pub enum ReadError {
26 NotSupported,
28
29 ContextLost,
31}
32
33impl fmt::Display for ReadError {
34 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
35 use self::ReadError::*;
36 let desc = match *self {
37 NotSupported => "The backend doesn't support reading from a buffer",
38 ContextLost => "The context has been lost. Reading from the buffer would return garbage data",
39 };
40 fmt.write_str(desc)
41 }
42}
43
44impl Error for ReadError {}
45
46#[derive(Debug, Copy, Clone)]
48pub enum CopyError {
49 NotSupported,
51}
52
53impl fmt::Display for CopyError {
54 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
55 use self::CopyError::*;
56 let desc = match *self {
57 NotSupported => "The backend doesn't support copying between buffers",
58 };
59 fmt.write_str(desc)
60 }
61}
62
63impl Error for CopyError {}
64
65pub struct Alloc {
67 context: Rc<Context>,
68
69 id: gl::types::GLuint,
71
72 ty: BufferType,
74
75 size: usize,
77
78 persistent_mapping: Option<*mut raw::c_void>,
80
81 immutable: bool,
84
85 creation_mode: BufferMode,
87
88 created_with_buffer_storage: bool,
90
91 mapped: Cell<bool>,
95
96 latest_shader_write: Cell<u64>,
98}
99
100impl Alloc {
101 pub fn new<D: ?Sized, F: ?Sized>(facade: &F, data: &D, ty: BufferType, mode: BufferMode)
104 -> Result<Alloc, BufferCreationError>
105 where D: Content, F: Facade
106 {
107 let mut ctxt = facade.get_context().make_current();
108
109 let size = mem::size_of_val(data);
110
111 let (id, immutable, created_with_buffer_storage, persistent_mapping) = unsafe {
112 create_buffer(&mut ctxt, size, Some(data), ty, mode)
113 }?;
114
115 Ok(Alloc {
116 context: facade.get_context().clone(),
117 id,
118 ty,
119 size,
120 persistent_mapping,
121 immutable,
122 created_with_buffer_storage,
123 creation_mode: mode,
124 mapped: Cell::new(false),
125 latest_shader_write: Cell::new(0),
126 })
127 }
128
129 pub fn empty<F: ?Sized>(facade: &F, ty: BufferType, size: usize, mode: BufferMode)
131 -> Result<Alloc, BufferCreationError> where F: Facade
132 {
133 let mut ctxt = facade.get_context().make_current();
134
135 let (id, immutable, created_with_buffer_storage, persistent_mapping) = unsafe {
136 create_buffer::<()>(&mut ctxt, size, None, ty, mode)
137 }?;
138
139 Ok(Alloc {
140 context: facade.get_context().clone(),
141 id,
142 ty,
143 size,
144 persistent_mapping,
145 immutable,
146 created_with_buffer_storage,
147 creation_mode: mode,
148 mapped: Cell::new(false),
149 latest_shader_write: Cell::new(0),
150 })
151 }
152
153 #[inline]
155 pub fn get_context(&self) -> &Rc<Context> {
156 &self.context
157 }
158
159 #[inline]
161 pub fn get_size(&self) -> usize {
162 self.size
163 }
164
165 #[inline]
167 pub fn uses_persistent_mapping(&self) -> bool {
168 self.persistent_mapping.is_some()
169 }
170
171 pub fn set_type(mut self, ty: BufferType) -> Result<Alloc, Alloc> {
173 self.ty = ty;
175 Ok(self)
176 }
177
178 fn assert_unmapped(&self, ctxt: &mut CommandContext<'_>) {
181 if self.mapped.get() {
182 unsafe { unmap_buffer(ctxt, self.id, self.ty) };
183 self.mapped.set(false);
184 }
185 }
186
187 #[inline]
189 fn assert_not_transform_feedback(&self, ctxt: &mut CommandContext<'_>) {
190 TransformFeedbackSession::ensure_buffer_out_of_transform_feedback(ctxt, self.id);
191 }
192
193 fn barrier_for_buffer_update(&self, ctxt: &mut CommandContext<'_>) {
195 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_buffer_update {
196 unsafe { ctxt.gl.MemoryBarrier(gl::BUFFER_UPDATE_BARRIER_BIT); }
197 ctxt.state.latest_memory_barrier_buffer_update = ctxt.state.next_draw_call_id;
198 }
199 }
200
201 pub fn prepare_for_vertex_attrib_array(&self, ctxt: &mut CommandContext<'_>) {
203 self.assert_unmapped(ctxt);
204 self.assert_not_transform_feedback(ctxt);
205
206 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_vertex_attrib_array {
207 unsafe { ctxt.gl.MemoryBarrier(gl::VERTEX_ATTRIB_ARRAY_BARRIER_BIT); }
208 ctxt.state.latest_memory_barrier_vertex_attrib_array = ctxt.state.next_draw_call_id;
209 }
210 }
211
212 pub fn prepare_for_element_array(&self, ctxt: &mut CommandContext<'_>) {
214 self.assert_unmapped(ctxt);
215 self.assert_not_transform_feedback(ctxt);
216
217 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_element_array {
218 unsafe { ctxt.gl.MemoryBarrier(gl::ELEMENT_ARRAY_BARRIER_BIT); }
219 ctxt.state.latest_memory_barrier_element_array = ctxt.state.next_draw_call_id;
220 }
221
222 }
223
224 pub fn bind_to_element_array(&self, ctxt: &mut CommandContext<'_>) {
226 if ctxt.version >= &Version(Api::Gl, 1, 5) ||
227 ctxt.version >= &Version(Api::GlEs, 2, 0)
228 {
229 unsafe { ctxt.gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.id); }
230 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
231 unsafe { ctxt.gl.BindBufferARB(gl::ELEMENT_ARRAY_BUFFER, self.id); }
232 } else {
233 unreachable!();
234 }
235 }
236
237 pub fn prepare_and_bind_for_pixel_pack(&self, ctxt: &mut CommandContext<'_>) {
240 self.assert_unmapped(ctxt);
241 self.assert_not_transform_feedback(ctxt);
242
243 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_pixel_buffer {
244 unsafe { ctxt.gl.MemoryBarrier(gl::PIXEL_BUFFER_BARRIER_BIT); }
245 ctxt.state.latest_memory_barrier_pixel_buffer = ctxt.state.next_draw_call_id;
246 }
247
248 unsafe { bind_buffer(ctxt, self.id, BufferType::PixelPackBuffer); }
249 }
250
251 #[inline]
253 pub fn unbind_pixel_pack(ctxt: &mut CommandContext<'_>) {
254 unsafe { bind_buffer(ctxt, 0, BufferType::PixelPackBuffer); }
255 }
256
257 pub fn prepare_and_bind_for_pixel_unpack(&self, ctxt: &mut CommandContext<'_>) {
260 self.assert_unmapped(ctxt);
261 self.assert_not_transform_feedback(ctxt);
262
263 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_pixel_buffer {
264 unsafe { ctxt.gl.MemoryBarrier(gl::PIXEL_BUFFER_BARRIER_BIT); }
265 ctxt.state.latest_memory_barrier_pixel_buffer = ctxt.state.next_draw_call_id;
266 }
267
268 unsafe { bind_buffer(ctxt, self.id, BufferType::PixelUnpackBuffer); }
269 }
270
271 #[inline]
273 pub fn unbind_pixel_unpack(ctxt: &mut CommandContext<'_>) {
274 unsafe { bind_buffer(ctxt, 0, BufferType::PixelUnpackBuffer); }
275 }
276
277 pub fn prepare_and_bind_for_query(&self, ctxt: &mut CommandContext<'_>) {
280 assert!(ctxt.version >= &Version(Api::Gl, 4, 4) ||
281 ctxt.extensions.gl_arb_query_buffer_object ||
282 ctxt.extensions.gl_amd_query_buffer_object);
283
284 self.assert_unmapped(ctxt);
285 self.assert_not_transform_feedback(ctxt);
286
287 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_pixel_buffer {
288 unsafe { ctxt.gl.MemoryBarrier(gl::QUERY_BUFFER_BARRIER_BIT); }
289 ctxt.state.latest_memory_barrier_query_buffer = ctxt.state.next_draw_call_id;
290 }
291
292 unsafe { bind_buffer(ctxt, self.id, BufferType::QueryBuffer); }
293 }
294
295 #[inline]
297 pub fn unbind_query(ctxt: &mut CommandContext<'_>) {
298 unsafe { bind_buffer(ctxt, 0, BufferType::QueryBuffer); }
299 }
300
301 pub fn prepare_and_bind_for_draw_indirect(&self, ctxt: &mut CommandContext<'_>) {
304 self.assert_unmapped(ctxt);
305 self.assert_not_transform_feedback(ctxt);
306
307 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_command {
308 unsafe { ctxt.gl.MemoryBarrier(gl::COMMAND_BARRIER_BIT); }
309 ctxt.state.latest_memory_barrier_command = ctxt.state.next_draw_call_id;
310 }
311
312 unsafe { bind_buffer(ctxt, self.id, BufferType::DrawIndirectBuffer); }
313 }
314
315 pub fn prepare_and_bind_for_dispatch_indirect(&self, ctxt: &mut CommandContext<'_>) {
318 self.assert_unmapped(ctxt);
319 self.assert_not_transform_feedback(ctxt);
320
321 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_command {
322 unsafe { ctxt.gl.MemoryBarrier(gl::COMMAND_BARRIER_BIT); }
323 ctxt.state.latest_memory_barrier_command = ctxt.state.next_draw_call_id;
324 }
325
326 unsafe { bind_buffer(ctxt, self.id, BufferType::DispatchIndirectBuffer); }
327 }
328
329 pub fn prepare_and_bind_for_uniform(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint,
332 range: Range<usize>)
333 {
334 self.assert_unmapped(ctxt);
335 self.assert_not_transform_feedback(ctxt);
336
337 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_uniform {
338 unsafe { ctxt.gl.MemoryBarrier(gl::UNIFORM_BARRIER_BIT); }
339 ctxt.state.latest_memory_barrier_uniform = ctxt.state.next_draw_call_id;
340 }
341
342 self.indexed_bind(ctxt, BufferType::UniformBuffer, index, range);
343 }
344
345 pub fn prepare_and_bind_for_shared_storage(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint,
348 range: Range<usize>)
349 {
350 self.assert_unmapped(ctxt);
351 self.assert_not_transform_feedback(ctxt);
352
353 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_shader_storage {
354 unsafe { ctxt.gl.MemoryBarrier(gl::SHADER_STORAGE_BARRIER_BIT); }
355 ctxt.state.latest_memory_barrier_shader_storage = ctxt.state.next_draw_call_id;
356 }
357
358 self.indexed_bind(ctxt, BufferType::ShaderStorageBuffer, index, range);
359
360 self.latest_shader_write.set(ctxt.state.next_draw_call_id); }
362
363 pub fn prepare_and_bind_for_atomic_counter(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint,
366 range: Range<usize>)
367 {
368 self.assert_unmapped(ctxt);
369 self.assert_not_transform_feedback(ctxt);
370
371 if self.latest_shader_write.get() >= ctxt.state.latest_memory_barrier_atomic_counter {
372 unsafe { ctxt.gl.MemoryBarrier(gl::ATOMIC_COUNTER_BARRIER_BIT); }
373 ctxt.state.latest_memory_barrier_atomic_counter = ctxt.state.next_draw_call_id;
374 }
375
376 self.indexed_bind(ctxt, BufferType::AtomicCounterBuffer, index, range);
377
378 self.latest_shader_write.set(ctxt.state.next_draw_call_id); }
380
381 #[inline]
384 pub fn bind_to_transform_feedback(&self, ctxt: &mut CommandContext<'_>, index: gl::types::GLuint,
385 range: Range<usize>)
386 {
387 self.indexed_bind(ctxt, BufferType::TransformFeedbackBuffer, index, range);
388 }
389
390 #[inline]
398 fn bind(&self, ctxt: &mut CommandContext<'_>, ty: BufferType) {
399 self.assert_unmapped(ctxt);
400 unsafe { bind_buffer(ctxt, self.id, ty); }
401 }
402
403 #[inline]
414 fn indexed_bind(&self, ctxt: &mut CommandContext<'_>, ty: BufferType,
415 index: gl::types::GLuint, range: Range<usize>)
416 {
417 self.assert_unmapped(ctxt);
418 unsafe { indexed_bind_buffer(ctxt, self.id, ty, index, range); }
419 }
420
421 pub unsafe fn upload<D: ?Sized>(&self, offset_bytes: usize, data: &D)
435 where D: Content
436 {
437 assert!(offset_bytes + mem::size_of_val(data) <= self.size);
438
439 if self.persistent_mapping.is_some() {
440 let mapping = Mapping { mapping: self.map_shared(offset_bytes .. offset_bytes + mem::size_of_val(data), false, true) };
441 ptr::copy_nonoverlapping(data.to_void_ptr() as *const u8, <D as Content>::to_void_ptr(&mapping) as *mut u8, mem::size_of_val(data));
442
443 } else if self.immutable {
444 let mut ctxt = self.context.make_current();
445 self.barrier_for_buffer_update(&mut ctxt);
446
447 self.assert_unmapped(&mut ctxt);
448 self.assert_not_transform_feedback(&mut ctxt);
449
450 let (tmp_buffer, _, _, _) = create_buffer(&mut ctxt, mem::size_of_val(data), Some(data),
451 BufferType::CopyReadBuffer,
452 BufferMode::Dynamic).unwrap();
453 copy_buffer(&mut ctxt, tmp_buffer, 0, self.id, offset_bytes, mem::size_of_val(data)).unwrap();
454 destroy_buffer(&mut ctxt, tmp_buffer);
455
456 } else {
457 assert!(offset_bytes < self.size);
458
459 let mut ctxt = self.context.make_current();
460 self.barrier_for_buffer_update(&mut ctxt);
461
462 let invalidate_all = offset_bytes == 0 && mem::size_of_val(data) == self.size;
463
464 self.assert_unmapped(&mut ctxt);
465 self.assert_not_transform_feedback(&mut ctxt);
466
467 if invalidate_all && (ctxt.version >= &Version(Api::Gl, 4, 3) ||
468 ctxt.extensions.gl_arb_invalidate_subdata)
469 {
470 ctxt.gl.InvalidateBufferData(self.id);
471 }
472
473 if ctxt.version >= &Version(Api::Gl, 4, 5) {
474 ctxt.gl.NamedBufferSubData(self.id, offset_bytes as gl::types::GLintptr,
475 mem::size_of_val(data) as gl::types::GLsizeiptr,
476 data.to_void_ptr() as *const _)
477
478 } else if ctxt.extensions.gl_ext_direct_state_access {
479 ctxt.gl.NamedBufferSubDataEXT(self.id, offset_bytes as gl::types::GLintptr,
480 mem::size_of_val(data) as gl::types::GLsizeiptr,
481 data.to_void_ptr() as *const _)
482
483 } else if ctxt.version >= &Version(Api::Gl, 1, 5) ||
484 ctxt.version >= &Version(Api::GlEs, 2, 0)
485 {
486 let bind = bind_buffer(&mut ctxt, self.id, self.ty);
487 ctxt.gl.BufferSubData(bind, offset_bytes as gl::types::GLintptr,
488 mem::size_of_val(data) as gl::types::GLsizeiptr,
489 data.to_void_ptr() as *const _);
490
491 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
492 let bind = bind_buffer(&mut ctxt, self.id, self.ty);
493 ctxt.gl.BufferSubDataARB(bind, offset_bytes as gl::types::GLintptr,
494 mem::size_of_val(data) as gl::types::GLsizeiptr,
495 data.to_void_ptr() as *const _);
496
497 } else {
498 unreachable!();
499 }
500 }
501 }
502
503 pub fn invalidate(&self, offset: usize, size: usize) {
512 assert!(offset + size <= self.size);
513
514 let is_whole_buffer = offset == 0 && size == self.size;
515
516 let mut ctxt = self.context.make_current();
517 self.assert_unmapped(&mut ctxt);
518 self.assert_not_transform_feedback(&mut ctxt);
519
520 if self.persistent_mapping.is_none() &&
521 (ctxt.version >= &Version(Api::Gl, 4, 3) || ctxt.extensions.gl_arb_invalidate_subdata)
522 {
523 if is_whole_buffer {
524 unsafe { ctxt.gl.InvalidateBufferData(self.id) };
525 } else {
526 unsafe { ctxt.gl.InvalidateBufferSubData(self.id, offset as gl::types::GLintptr,
527 size as gl::types::GLsizeiptr) };
528 }
529
530 } else if !self.created_with_buffer_storage && is_whole_buffer {
531 let flags = match self.creation_mode {
532 BufferMode::Default | BufferMode::Immutable => gl::STATIC_DRAW,
533 BufferMode::Persistent | BufferMode::Dynamic => gl::DYNAMIC_DRAW,
534 };
535
536 if ctxt.version >= &Version(Api::Gl, 1, 5) ||
537 ctxt.version >= &Version(Api::GlEs, 2, 0)
538 {
539 unsafe {
540 let bind = bind_buffer(&mut ctxt, self.id, self.ty);
541 ctxt.gl.BufferData(bind, size as gl::types::GLsizeiptr,
542 ptr::null(), flags);
543 }
544
545 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
546 unsafe {
547 let bind = bind_buffer(&mut ctxt, self.id, self.ty);
548 ctxt.gl.BufferDataARB(bind, size as gl::types::GLsizeiptr,
549 ptr::null(), flags);
550 }
551
552 } else {
553 unreachable!();
554 }
555 }
556 }
557
558 unsafe fn map_shared<D: ?Sized>(&self, bytes_range: Range<usize>, read: bool, write: bool)
588 -> MappingImpl<'_, D> where D: Content
589 {
590 if let Some(existing_mapping) = self.persistent_mapping {
591 let mut ctxt = self.context.make_current();
593 self.barrier_for_buffer_update(&mut ctxt);
594
595 let data = (existing_mapping as *mut u8).add(bytes_range.start);
596 let data = Content::ref_from_ptr(data as *mut (),
597 bytes_range.end - bytes_range.start).unwrap();
598
599 MappingImpl::PersistentMapping {
600 buffer: self,
601 offset_bytes: bytes_range.start,
602 data,
603 needs_flushing: write,
604 }
605
606 } else {
607 let size_bytes = bytes_range.end - bytes_range.start;
608
609 let mut ctxt = self.context.make_current();
610
611 let temporary_buffer = {
615 let (temporary_buffer, _, _, _) = create_buffer::<D>(&mut ctxt, size_bytes,
616 None, BufferType::CopyWriteBuffer,
617 BufferMode::Dynamic).unwrap();
618 temporary_buffer
619 };
620
621 let ptr = {
622 self.assert_unmapped(&mut ctxt);
623 self.assert_not_transform_feedback(&mut ctxt);
624
625 if read {
626 copy_buffer(&mut ctxt, self.id, bytes_range.start,
627 temporary_buffer, 0, size_bytes).unwrap();
628 }
629
630 map_buffer(&mut ctxt, temporary_buffer, self.ty, 0 .. size_bytes, true, true)
631 .expect("Buffer mapping is not supported by the backend")
632 };
633
634 let data = match Content::ref_from_ptr(ptr, bytes_range.end - bytes_range.start) {
635 Some(data) => data,
636 None => {
637 unmap_buffer(&mut ctxt, temporary_buffer, self.ty);
638 panic!("Wrong bytes range");
639 }
640 };
641
642 MappingImpl::TemporaryBuffer {
643 original_buffer: self,
644 original_buffer_offset: bytes_range.start,
645 temporary_buffer,
646 temporary_buffer_data: data,
647 needs_flushing: write,
648 }
649 }
650 }
651
652 unsafe fn map_impl<D: ?Sized>(&mut self, bytes_range: Range<usize>, read: bool, write: bool)
676 -> MappingImpl<'_, D> where D: Content
677 {
678 if self.persistent_mapping.is_some() || self.immutable {
679 self.map_shared(bytes_range, read, write)
680
681 } else {
682 let data = {
683 let mut ctxt = self.context.make_current();
684
685 let ptr = {
686 self.assert_unmapped(&mut ctxt);
687 self.assert_not_transform_feedback(&mut ctxt);
688 self.barrier_for_buffer_update(&mut ctxt);
689 let ptr = map_buffer(&mut ctxt, self.id, self.ty, bytes_range.clone(),
690 read, write)
691 .expect("Buffer mapping is not supported by the backend");
692 self.mapped.set(true);
693 ptr
694 };
695
696 match Content::ref_from_ptr(ptr, bytes_range.end - bytes_range.start) {
697 Some(data) => data,
698 None => {
699 unmap_buffer(&mut ctxt, self.id, self.ty);
700 panic!("Wrong bytes range");
701 }
702 }
703 };
704
705 MappingImpl::RegularMapping {
706 buffer: self,
707 data,
708 needs_flushing: write,
709 }
710 }
711 }
712
713 #[inline]
725 pub unsafe fn map<D: ?Sized>(&mut self, bytes_range: Range<usize>)
726 -> Mapping<'_, D> where D: Content
727 {
728 Mapping {
729 mapping: self.map_impl(bytes_range, true, true)
730 }
731 }
732
733 #[inline]
745 pub unsafe fn map_read<D: ?Sized>(&mut self, bytes_range: Range<usize>)
746 -> ReadMapping<'_, D> where D: Content
747 {
748 ReadMapping {
749 mapping: self.map_impl(bytes_range, true, false)
750 }
751 }
752
753 #[inline]
765 pub unsafe fn map_write<D: ?Sized>(&mut self, bytes_range: Range<usize>)
766 -> WriteMapping<'_, D> where D: Content
767 {
768 WriteMapping {
769 mapping: self.map_impl(bytes_range, false, true)
770 }
771 }
772
773 pub unsafe fn read<D: ?Sized>(&self, range: Range<usize>)
785 -> Result<D::Owned, ReadError>
786 where D: Content
787 {
788 let size_to_read = range.end - range.start;
789
790 if self.persistent_mapping.is_some() {
791 let mapping = ReadMapping { mapping: self.map_shared(range, true, false) };
792 <D as Content>::read(size_to_read, |output| {
793 ptr::copy_nonoverlapping(<D as Content>::to_void_ptr(&mapping) as *const u8, output as *mut D as *mut u8, size_to_read);
794 Ok(())
795 })
796
797 } else {
798 let mut ctxt = self.context.make_current();
799
800 if ctxt.state.lost_context {
801 return Err(ReadError::ContextLost);
802 }
803
804 self.assert_unmapped(&mut ctxt);
805 self.barrier_for_buffer_update(&mut ctxt);
806
807 <D as Content>::read(size_to_read, |output| {
808 if ctxt.version >= &Version(Api::Gl, 4, 5) {
809 ctxt.gl.GetNamedBufferSubData(self.id, range.start as gl::types::GLintptr,
810 size_to_read as gl::types::GLsizeiptr,
811 output as *mut _ as *mut _);
812
813 } else if ctxt.version >= &Version(Api::Gl, 1, 5) {
814 let bind = bind_buffer(&mut ctxt, self.id, self.ty);
815 ctxt.gl.GetBufferSubData(bind, range.start as gl::types::GLintptr,
816 size_to_read as gl::types::GLsizeiptr,
817 output as *mut _ as *mut _);
818
819 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
820 let bind = bind_buffer(&mut ctxt, self.id, self.ty);
821 ctxt.gl.GetBufferSubDataARB(bind, range.start as gl::types::GLintptr,
822 size_to_read as gl::types::GLsizeiptr,
823 output as *mut _ as *mut _);
824
825 } else if ctxt.version >= &Version(Api::GlEs, 1, 0) {
826 return Err(ReadError::NotSupported);
827
828 } else {
829 unreachable!()
830 }
831
832 Ok(())
833 })
834 }
835 }
836
837 pub fn copy_to(&self, range: Range<usize>, target: &Alloc, dest_offset: usize)
846 -> Result<(), CopyError>
847 {
848 assert!(range.end >= range.start);
852 assert!(range.end <= self.size);
853 assert!(dest_offset + range.end - range.start <= target.size);
854
855 let mut ctxt = self.context.make_current();
856
857 unsafe {
858 copy_buffer(&mut ctxt, self.id, range.start, target.id, dest_offset,
859 range.end - range.start)
860 }
861 }
862}
863
864impl fmt::Debug for Alloc {
865 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
866 write!(fmt, "Buffer #{} (size: {} bytes)", self.id, self.size)
867 }
868}
869
870impl Drop for Alloc {
871 fn drop(&mut self) {
872 unsafe {
873 let mut ctxt = self.context.make_current();
874 self.assert_unmapped(&mut ctxt);
875 self.assert_not_transform_feedback(&mut ctxt);
876 VertexAttributesSystem::purge_buffer(&mut ctxt, self.id);
877 destroy_buffer(&mut ctxt, self.id);
878 }
879 }
880}
881
882impl GlObject for Alloc {
883 type Id = gl::types::GLuint;
884
885 #[inline]
886 fn get_id(&self) -> gl::types::GLuint {
887 self.id
888 }
889}
890
891enum MappingImpl<'b, D: ?Sized> {
893 PersistentMapping {
894 buffer: &'b Alloc,
895 offset_bytes: usize,
896 data: *mut D,
897 needs_flushing: bool,
898 },
899
900 TemporaryBuffer {
901 original_buffer: &'b Alloc,
902 original_buffer_offset: usize,
903 temporary_buffer: gl::types::GLuint,
904 temporary_buffer_data: *mut D,
905 needs_flushing: bool,
906 },
907
908 RegularMapping {
909 buffer: &'b mut Alloc,
910 data: *mut D,
911 needs_flushing: bool,
912 },
913}
914
915unsafe impl<'a, D: ?Sized> Sync for MappingImpl<'a, D> where D: Send + Sync {}
916
917impl<'a, D: ?Sized> Drop for MappingImpl<'a, D> {
918 fn drop(&mut self) {
919 match *self {
920 MappingImpl::PersistentMapping { buffer, offset_bytes, data, needs_flushing } => {
921 let mut ctxt = buffer.context.make_current();
922 unsafe {
923 if needs_flushing {
924 flush_range(&mut ctxt, buffer.id, buffer.ty,
925 offset_bytes .. offset_bytes + mem::size_of_val(&*data));
926 }
927 }
928 },
929
930 MappingImpl::TemporaryBuffer { original_buffer, original_buffer_offset,
931 temporary_buffer, temporary_buffer_data,
932 needs_flushing } =>
933 {
934 let mut ctxt = original_buffer.context.make_current();
935 original_buffer.barrier_for_buffer_update(&mut ctxt);
936
937 unsafe {
938 if needs_flushing {
939 flush_range(&mut ctxt, temporary_buffer, original_buffer.ty,
940 0 .. mem::size_of_val(&*temporary_buffer_data));
941 }
942 unmap_buffer(&mut ctxt, temporary_buffer, original_buffer.ty);
943 if needs_flushing {
944 copy_buffer(&mut ctxt, temporary_buffer, 0, original_buffer.id,
945 original_buffer_offset, mem::size_of_val(&*temporary_buffer_data)).unwrap();
946 }
947
948 destroy_buffer(&mut ctxt, temporary_buffer);
949 }
950 },
951
952 MappingImpl::RegularMapping { ref mut buffer, data, needs_flushing } => {
953 let mut ctxt = buffer.context.make_current();
954
955 unsafe {
956 if needs_flushing {
957 flush_range(&mut ctxt, buffer.id, buffer.ty,
958 0 .. mem::size_of_val(&*data));
959 }
960 unmap_buffer(&mut ctxt, buffer.id, buffer.ty);
961 }
962
963 buffer.mapped.set(false);
964 },
965 }
966 }
967}
968
969pub struct Mapping<'b, D: ?Sized> where D: Content {
971 mapping: MappingImpl<'b, D>,
972}
973
974impl<'a, D: ?Sized> Deref for Mapping<'a, D> where D: Content {
975 type Target = D;
976
977 #[inline]
978 fn deref(&self) -> &D {
979 match self.mapping {
980 MappingImpl::PersistentMapping { data, .. } => {
981 unsafe { &*data }
982 },
983
984 MappingImpl::TemporaryBuffer { temporary_buffer_data, .. } => {
985 unsafe { &*temporary_buffer_data }
986 },
987
988 MappingImpl::RegularMapping { data, .. } => {
989 unsafe { &*data }
990 },
991 }
992 }
993}
994
995impl<'a, D: ?Sized> DerefMut for Mapping<'a, D> where D: Content {
996 #[inline]
997 fn deref_mut(&mut self) -> &mut D {
998 match self.mapping {
999 MappingImpl::PersistentMapping { data, .. } => {
1000 unsafe { &mut *data }
1001 },
1002
1003 MappingImpl::TemporaryBuffer { temporary_buffer_data, .. } => {
1004 unsafe { &mut *temporary_buffer_data }
1005 },
1006
1007 MappingImpl::RegularMapping { data, .. } => {
1008 unsafe { &mut *data }
1009 },
1010 }
1011 }
1012}
1013
1014pub struct ReadMapping<'b, D: ?Sized> where D: Content {
1016 mapping: MappingImpl<'b, D>,
1017}
1018
1019impl<'a, D: ?Sized> Deref for ReadMapping<'a, D> where D: Content {
1020 type Target = D;
1021
1022 #[inline]
1023 fn deref(&self) -> &D {
1024 match self.mapping {
1025 MappingImpl::PersistentMapping { data, .. } => {
1026 unsafe { &*data }
1027 },
1028
1029 MappingImpl::TemporaryBuffer { temporary_buffer_data, .. } => {
1030 unsafe { &*temporary_buffer_data }
1031 },
1032
1033 MappingImpl::RegularMapping { data, .. } => {
1034 unsafe { &*data }
1035 },
1036 }
1037 }
1038}
1039
1040pub struct WriteMapping<'b, D: ?Sized> where D: Content {
1042 mapping: MappingImpl<'b, D>,
1043}
1044
1045impl<'b, D: ?Sized> WriteMapping<'b, D> where D: Content {
1046 #[inline]
1047 fn get_slice(&mut self) -> &mut D {
1048 match self.mapping {
1049 MappingImpl::PersistentMapping { data, .. } => {
1050 unsafe { &mut *data }
1051 },
1052
1053 MappingImpl::TemporaryBuffer { temporary_buffer_data, .. } => {
1054 unsafe { &mut *temporary_buffer_data }
1055 },
1056
1057 MappingImpl::RegularMapping { data, .. } => {
1058 unsafe { &mut *data }
1059 },
1060 }
1061 }
1062}
1063
1064impl<'b, D> WriteMapping<'b, D> where D: Content + Copy {
1065 #[inline]
1067 pub fn write(&mut self, value: D) {
1068 let slice = self.get_slice();
1069 *slice = value;
1070 }
1071}
1072
1073impl<'b, D> WriteMapping<'b, [D]> where [D]: Content, D: Copy {
1074 #[inline]
1076 pub fn len(&self) -> usize {
1077 match self.mapping {
1078 MappingImpl::PersistentMapping { data, .. } => unsafe { (&*data).len() },
1079 MappingImpl::TemporaryBuffer { temporary_buffer_data, .. } => unsafe { (&*temporary_buffer_data).len() },
1080 MappingImpl::RegularMapping { data, .. } => unsafe { (&*data).len() },
1081 }
1082 }
1083
1084 #[inline]
1091 pub fn set(&mut self, index: usize, value: D) {
1092 let slice = self.get_slice();
1093 slice[index] = value;
1094 }
1095}
1096
1097pub fn is_buffer_read_supported<C: ?Sized>(ctxt: &C) -> bool where C: CapabilitiesSource {
1099 if ctxt.get_version() >= &Version(Api::Gl, 4, 5) {
1100 true
1101
1102 } else if ctxt.get_version() >= &Version(Api::Gl, 1, 5) {
1103 true
1104
1105 } else if ctxt.get_extensions().gl_arb_vertex_buffer_object {
1106 true
1107
1108 } else if ctxt.get_version() >= &Version(Api::GlEs, 1, 0) {
1109 false
1110
1111 } else {
1112 unreachable!();
1113 }
1114}
1115
1116unsafe fn create_buffer<D: ?Sized>(mut ctxt: &mut CommandContext<'_>, size: usize, data: Option<&D>,
1122 ty: BufferType, mode: BufferMode)
1123 -> Result<(gl::types::GLuint, bool, bool, Option<*mut raw::c_void>),
1124 BufferCreationError>
1125 where D: Content
1126{
1127 if !is_buffer_type_supported(ctxt, ty) {
1128 return Err(BufferCreationError::BufferTypeNotSupported);
1129 }
1130
1131 if let Some(data) = data {
1132 assert!(mem::size_of_val(data) == size);
1133 }
1134
1135 let id = {
1137 let mut id: gl::types::GLuint = 0;
1138 if ctxt.version >= &Version(Api::Gl, 4, 5) || ctxt.extensions.gl_arb_direct_state_access {
1139 ctxt.gl.CreateBuffers(1, &mut id);
1140 } else if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1141 ctxt.version >= &Version(Api::GlEs, 2, 0)
1142 {
1143 ctxt.gl.GenBuffers(1, &mut id);
1144 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1145 ctxt.gl.GenBuffersARB(1, &mut id);
1146 } else {
1147 unreachable!();
1148 }
1149 id
1150 };
1151
1152 let data_ptr = if let Some(data) = data {
1154 if size == 0 { ptr::null() } else {
1157 data.to_void_ptr()
1158 }
1159 } else {
1160 ptr::null()
1161 };
1162
1163 let size = match size {
1166 0 => 1,
1167 a => a
1168 };
1169
1170 let mutable_storage_flags = match mode {
1172 BufferMode::Persistent | BufferMode::Dynamic => gl::DYNAMIC_DRAW,
1173 BufferMode::Default | BufferMode::Immutable => gl::STATIC_DRAW,
1174 };
1175
1176 let immutable_storage_flags = match mode {
1178 BufferMode::Default => gl::DYNAMIC_STORAGE_BIT | gl::MAP_READ_BIT | gl::MAP_WRITE_BIT,
1179 BufferMode::Dynamic => gl::DYNAMIC_STORAGE_BIT | gl::CLIENT_STORAGE_BIT | gl::MAP_READ_BIT | gl::MAP_WRITE_BIT,
1180 BufferMode::Persistent => gl::MAP_PERSISTENT_BIT | gl::MAP_READ_BIT | gl::MAP_WRITE_BIT,
1181 BufferMode::Immutable => 0,
1182 };
1183
1184 let could_be_immutable = match mode {
1187 BufferMode::Default | BufferMode::Dynamic => false,
1188 BufferMode::Immutable | BufferMode::Persistent => true,
1189 };
1190
1191 let mut obtained_size: gl::types::GLint = 0;
1193
1194 let immutable: bool;
1197
1198 let created_with_buffer_storage: bool;
1200
1201 if ctxt.version >= &Version(Api::Gl, 4, 5) || ctxt.extensions.gl_arb_direct_state_access {
1202 ctxt.gl.NamedBufferStorage(id, size as gl::types::GLsizeiptr,
1203 data_ptr as *const _,
1204 immutable_storage_flags);
1205 ctxt.gl.GetNamedBufferParameteriv(id, gl::BUFFER_SIZE, &mut obtained_size);
1206 immutable = could_be_immutable;
1207 created_with_buffer_storage = true;
1208
1209 } else if ctxt.extensions.gl_arb_buffer_storage &&
1210 ctxt.extensions.gl_ext_direct_state_access
1211 {
1212 ctxt.gl.NamedBufferStorageEXT(id, size as gl::types::GLsizeiptr,
1213 data_ptr as *const _,
1214 immutable_storage_flags);
1215 ctxt.gl.GetNamedBufferParameterivEXT(id, gl::BUFFER_SIZE, &mut obtained_size);
1216 immutable = could_be_immutable;
1217 created_with_buffer_storage = true;
1218
1219 } else if ctxt.version >= &Version(Api::Gl, 4, 4) ||
1220 ctxt.extensions.gl_arb_buffer_storage
1221 {
1222 let bind = bind_buffer(&mut ctxt, id, ty);
1223 ctxt.gl.BufferStorage(bind, size as gl::types::GLsizeiptr,
1224 data_ptr as *const _,
1225 immutable_storage_flags);
1226 ctxt.gl.GetBufferParameteriv(bind, gl::BUFFER_SIZE, &mut obtained_size);
1227 immutable = could_be_immutable;
1228 created_with_buffer_storage = true;
1229
1230 } else if ctxt.extensions.gl_ext_buffer_storage {
1231 let bind = bind_buffer(&mut ctxt, id, ty);
1232 ctxt.gl.BufferStorageEXT(bind, size as gl::types::GLsizeiptr,
1233 data_ptr as *const _,
1234 immutable_storage_flags);
1235 ctxt.gl.GetBufferParameteriv(bind, gl::BUFFER_SIZE, &mut obtained_size);
1236 immutable = could_be_immutable;
1237 created_with_buffer_storage = true;
1238
1239 } else if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1240 ctxt.version >= &Version(Api::GlEs, 2, 0)
1241 {
1242 let bind = bind_buffer(&mut ctxt, id, ty);
1243 ctxt.gl.BufferData(bind, size as gl::types::GLsizeiptr,
1244 data_ptr as *const _, mutable_storage_flags);
1245 ctxt.gl.GetBufferParameteriv(bind, gl::BUFFER_SIZE, &mut obtained_size);
1246 immutable = false;
1247 created_with_buffer_storage = false;
1248
1249 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1250 let bind = bind_buffer(&mut ctxt, id, ty);
1251 ctxt.gl.BufferDataARB(bind, size as gl::types::GLsizeiptr,
1252 data_ptr as *const _, mutable_storage_flags);
1253 ctxt.gl.GetBufferParameterivARB(bind, gl::BUFFER_SIZE, &mut obtained_size);
1254 immutable = false;
1255 created_with_buffer_storage = false;
1256
1257 } else {
1258 unreachable!();
1259 }
1260
1261 if size != obtained_size as usize {
1262 if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1263 ctxt.version >= &Version(Api::GlEs, 2, 0)
1264 {
1265 ctxt.gl.DeleteBuffers(1, [id].as_ptr());
1266 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1267 ctxt.gl.DeleteBuffersARB(1, [id].as_ptr());
1268 } else {
1269 unreachable!();
1270 }
1271
1272 return Err(BufferCreationError::OutOfMemory);
1273 }
1274
1275 let persistent_mapping = if let BufferMode::Persistent = mode {
1276 if immutable {
1277 let ptr = if ctxt.version >= &Version(Api::Gl, 4, 5) {
1278 ctxt.gl.MapNamedBufferRange(id, 0, size as gl::types::GLsizeiptr,
1279 gl::MAP_READ_BIT | gl::MAP_WRITE_BIT |
1280 gl::MAP_PERSISTENT_BIT | gl::MAP_FLUSH_EXPLICIT_BIT)
1281
1282 } else if ctxt.version >= &Version(Api::Gl, 3, 0) ||
1283 ctxt.extensions.gl_arb_map_buffer_range
1284 {
1285 let bind = bind_buffer(&mut ctxt, id, ty);
1286 ctxt.gl.MapBufferRange(bind, 0, size as gl::types::GLsizeiptr,
1287 gl::MAP_READ_BIT | gl::MAP_WRITE_BIT |
1288 gl::MAP_PERSISTENT_BIT | gl::MAP_FLUSH_EXPLICIT_BIT)
1289 } else {
1290 unreachable!();
1291 };
1292
1293 if ptr.is_null() {
1294 let error = crate::get_gl_error(ctxt);
1295 panic!("glMapBufferRange returned null (error: {:?})", error);
1296 }
1297
1298 Some(ptr)
1299
1300 } else {
1301 None
1302 }
1303 } else {
1304 None
1305 };
1306
1307 Ok((id, immutable, created_with_buffer_storage, persistent_mapping))
1308}
1309
1310fn is_buffer_type_supported(ctxt: &mut CommandContext<'_>, ty: BufferType) -> bool {
1312 match ty {
1313 BufferType::ArrayBuffer | BufferType::ElementArrayBuffer => true,
1315
1316 BufferType::PixelPackBuffer | BufferType::PixelUnpackBuffer => {
1317 ctxt.version >= &Version(Api::Gl, 2, 1) || ctxt.version >= &Version(Api::GlEs, 3, 0) ||
1318 ctxt.extensions.gl_arb_pixel_buffer_object || ctxt.extensions.gl_nv_pixel_buffer_object
1319 },
1320
1321 BufferType::UniformBuffer => {
1322 ctxt.version >= &Version(Api::Gl, 3, 1) || ctxt.version >= &Version(Api::GlEs, 3, 0) ||
1323 ctxt.extensions.gl_arb_uniform_buffer_object
1324 },
1325
1326 BufferType::CopyReadBuffer => {
1327 ctxt.version >= &Version(Api::Gl, 3, 1) || ctxt.extensions.gl_arb_copy_buffer ||
1328 ctxt.version >= &Version(Api::GlEs, 3, 0) || ctxt.extensions.gl_nv_copy_buffer
1329 },
1330
1331 BufferType::CopyWriteBuffer => {
1332 ctxt.version >= &Version(Api::Gl, 3, 0) || ctxt.extensions.gl_arb_copy_buffer ||
1333 ctxt.version >= &Version(Api::GlEs, 3, 0) || ctxt.extensions.gl_nv_copy_buffer
1334 },
1335
1336 BufferType::DrawIndirectBuffer => {
1337 ctxt.version >= &Version(Api::Gl, 4, 3) || ctxt.extensions.gl_arb_multi_draw_indirect ||
1341 ctxt.extensions.gl_ext_multi_draw_indirect
1342 },
1343
1344 BufferType::DispatchIndirectBuffer => {
1345 ctxt.version >= &Version(Api::Gl, 4, 3) || ctxt.version >= &Version(Api::GlEs, 3, 1) ||
1346 ctxt.extensions.gl_arb_compute_shader
1347 },
1348
1349 BufferType::TextureBuffer => {
1350 ctxt.version >= &Version(Api::Gl, 3, 0) ||
1351 ctxt.extensions.gl_arb_texture_buffer_object ||
1352 ctxt.extensions.gl_ext_texture_buffer_object ||
1353 ctxt.extensions.gl_ext_texture_buffer || ctxt.extensions.gl_oes_texture_buffer
1354 },
1355
1356 BufferType::QueryBuffer => {
1357 ctxt.version >= &Version(Api::Gl, 4, 4) ||
1358 ctxt.extensions.gl_arb_query_buffer_object ||
1359 ctxt.extensions.gl_amd_query_buffer_object
1360 },
1361
1362 BufferType::ShaderStorageBuffer => {
1363 ctxt.version >= &Version(Api::Gl, 4, 3) ||
1364 ctxt.extensions.gl_arb_shader_storage_buffer_object ||
1365 ctxt.extensions.gl_nv_shader_storage_buffer_object
1366 },
1367
1368 BufferType::TransformFeedbackBuffer => {
1369 ctxt.version >= &Version(Api::Gl, 3, 0) ||
1370 ctxt.extensions.gl_ext_transform_feedback ||
1371 ctxt.extensions.gl_nv_transform_feedback
1372 },
1373
1374 BufferType::AtomicCounterBuffer => {
1375 ctxt.version >= &Version(Api::Gl, 4, 2) ||
1376 ctxt.extensions.gl_arb_shader_atomic_counters ||
1377 ctxt.extensions.gl_nv_shader_atomic_counters
1378 },
1379 }
1380}
1381
1382unsafe fn bind_buffer(ctxt: &mut CommandContext<'_>, id: gl::types::GLuint, ty: BufferType)
1389 -> gl::types::GLenum
1390{
1391 macro_rules! check {
1392 ($ctxt:expr, $input_id:expr, $input_ty:expr, $check:ident, $state_var:ident) => (
1393 if $input_ty == BufferType::$check {
1394 let en = $input_ty.to_glenum();
1395
1396 if ctxt.state.$state_var != $input_id {
1397 ctxt.state.$state_var = $input_id;
1398
1399 if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1400 ctxt.version >= &Version(Api::GlEs, 2, 0)
1401 {
1402 ctxt.gl.BindBuffer(en, id);
1403 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1404 ctxt.gl.BindBufferARB(en, id);
1405 } else {
1406 unreachable!();
1407 }
1408 }
1409
1410 return en;
1411 }
1412 );
1413 }
1414
1415 check!(ctxt, id, ty, ArrayBuffer, array_buffer_binding);
1416 check!(ctxt, id, ty, PixelPackBuffer, pixel_pack_buffer_binding);
1417 check!(ctxt, id, ty, PixelUnpackBuffer, pixel_unpack_buffer_binding);
1418 check!(ctxt, id, ty, UniformBuffer, uniform_buffer_binding);
1419 check!(ctxt, id, ty, CopyReadBuffer, copy_read_buffer_binding);
1420 check!(ctxt, id, ty, CopyWriteBuffer, copy_write_buffer_binding);
1421 check!(ctxt, id, ty, DispatchIndirectBuffer, dispatch_indirect_buffer_binding);
1422 check!(ctxt, id, ty, DrawIndirectBuffer, draw_indirect_buffer_binding);
1423 check!(ctxt, id, ty, QueryBuffer, query_buffer_binding);
1424 check!(ctxt, id, ty, TextureBuffer, texture_buffer_binding);
1425 check!(ctxt, id, ty, AtomicCounterBuffer, atomic_counter_buffer_binding);
1426 check!(ctxt, id, ty, ShaderStorageBuffer, shader_storage_buffer_binding);
1427
1428 if ty == BufferType::ElementArrayBuffer {
1429 VertexAttributesSystem::hijack_current_element_array_buffer(ctxt);
1431
1432 if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1433 ctxt.version >= &Version(Api::GlEs, 2, 0)
1434 {
1435 ctxt.gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, id);
1436 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1437 ctxt.gl.BindBufferARB(gl::ELEMENT_ARRAY_BUFFER, id);
1438 } else {
1439 unreachable!();
1440 }
1441
1442 return gl::ELEMENT_ARRAY_BUFFER;
1443 }
1444
1445 if ty == BufferType::TransformFeedbackBuffer {
1446 debug_assert!(ctxt.capabilities.max_indexed_transform_feedback_buffer >= 1);
1447
1448 if ctxt.state.indexed_transform_feedback_buffer_bindings[0].buffer != id {
1451 ctxt.state.indexed_transform_feedback_buffer_bindings[0].buffer = id;
1452
1453 if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1454 ctxt.version >= &Version(Api::GlEs, 2, 0)
1455 {
1456 ctxt.gl.BindBuffer(gl::TRANSFORM_FEEDBACK_BUFFER, id);
1457 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1458 ctxt.gl.BindBufferARB(gl::TRANSFORM_FEEDBACK_BUFFER, id);
1459 } else {
1460 unreachable!();
1461 }
1462 }
1463
1464 return gl::TRANSFORM_FEEDBACK_BUFFER;
1465 }
1466
1467 unreachable!();
1468}
1469
1470unsafe fn indexed_bind_buffer(ctxt: &mut CommandContext<'_>, id: gl::types::GLuint, ty: BufferType,
1480 index: gl::types::GLuint, range: Range<usize>)
1481{
1482 let offset = range.start as gl::types::GLintptr;
1483 let size = (range.end - range.start) as gl::types::GLsizeiptr;
1484
1485 macro_rules! check {
1486 ($ctxt:expr, $input_id:expr, $input_ty:expr, $input_index:expr, $check:ident,
1487 $state_var:ident, $max:ident) =>
1488 (
1489 if $input_ty == BufferType::$check {
1490 let en = $input_ty.to_glenum();
1491
1492 if $input_index >= ctxt.capabilities.$max as gl::types::GLuint {
1493 panic!("Indexed buffer out of range");
1494 }
1495
1496 if ctxt.state.$state_var.len() <= $input_index as usize {
1497 for _ in 0 .. 1 + ctxt.state.$state_var.len() - $input_index as usize {
1498 ctxt.state.$state_var.push(Default::default());
1499 }
1500 }
1501
1502 let unit = &mut ctxt.state.$state_var[$input_index as usize];
1503 if unit.buffer != $input_id || unit.offset != offset || unit.size != size {
1504 unit.buffer = $input_id;
1505 unit.offset = offset;
1506 unit.size = size;
1507
1508 if ctxt.version >= &Version(Api::Gl, 3, 0) ||
1509 ctxt.version >= &Version(Api::GlEs, 3, 0)
1510 {
1511 ctxt.gl.BindBufferRange(en, $input_index, id, offset, size);
1512 } else if ctxt.extensions.gl_ext_transform_feedback {
1513 ctxt.gl.BindBufferRangeEXT(en, $input_index, id, offset, size);
1514 } else {
1515 panic!("The backend doesn't support indexed buffer bind points");
1516 }
1517 }
1518
1519 return;
1520 }
1521 );
1522 }
1523
1524 check!(ctxt, id, ty, index, UniformBuffer, indexed_uniform_buffer_bindings,
1525 max_indexed_uniform_buffer);
1526 check!(ctxt, id, ty, index, TransformFeedbackBuffer, indexed_transform_feedback_buffer_bindings,
1527 max_indexed_transform_feedback_buffer);
1528 check!(ctxt, id, ty, index, AtomicCounterBuffer, indexed_atomic_counter_buffer_bindings,
1529 max_indexed_atomic_counter_buffer);
1530 check!(ctxt, id, ty, index, ShaderStorageBuffer, indexed_shader_storage_buffer_bindings,
1531 max_indexed_shader_storage_buffer);
1532
1533 panic!();
1534}
1535
1536unsafe fn copy_buffer(ctxt: &mut CommandContext<'_>, source: gl::types::GLuint,
1543 source_offset: usize, dest: gl::types::GLuint, dest_offset: usize,
1544 size: usize) -> Result<(), CopyError>
1545{
1546 if ctxt.version >= &Version(Api::Gl, 4, 5) || ctxt.extensions.gl_arb_direct_state_access {
1547 ctxt.gl.CopyNamedBufferSubData(source, dest, source_offset as gl::types::GLintptr,
1548 dest_offset as gl::types::GLintptr,
1549 size as gl::types::GLsizeiptr);
1550
1551 } else if ctxt.extensions.gl_ext_direct_state_access {
1552 ctxt.gl.NamedCopyBufferSubDataEXT(source, dest, source_offset as gl::types::GLintptr,
1553 dest_offset as gl::types::GLintptr,
1554 size as gl::types::GLsizeiptr);
1555
1556 } else if ctxt.version >= &Version(Api::Gl, 3, 1) || ctxt.version >= &Version(Api::GlEs, 3, 0)
1557 || ctxt.extensions.gl_arb_copy_buffer || ctxt.extensions.gl_nv_copy_buffer
1558 {
1559 fn find_bind_point(ctxt: &mut CommandContext<'_>, id: gl::types::GLuint)
1560 -> Option<gl::types::GLenum>
1561 {
1562 if ctxt.state.array_buffer_binding == id {
1563 Some(gl::ARRAY_BUFFER)
1564 } else if ctxt.state.pixel_pack_buffer_binding == id {
1565 Some(gl::PIXEL_PACK_BUFFER)
1566 } else if ctxt.state.pixel_unpack_buffer_binding == id {
1567 Some(gl::PIXEL_UNPACK_BUFFER)
1568 } else if ctxt.state.uniform_buffer_binding == id {
1569 Some(gl::UNIFORM_BUFFER)
1570 } else if ctxt.state.copy_read_buffer_binding == id {
1571 Some(gl::COPY_READ_BUFFER)
1572 } else if ctxt.state.copy_write_buffer_binding == id {
1573 Some(gl::COPY_WRITE_BUFFER)
1574 } else {
1575 None
1576 }
1577 }
1578
1579 let source_bind_point = match find_bind_point(ctxt, source) {
1580 Some(p) => p,
1581 None => {
1582 if ctxt.state.copy_read_buffer_binding == dest {
1585 bind_buffer(ctxt, source, BufferType::CopyWriteBuffer)
1586 } else {
1587 bind_buffer(ctxt, source, BufferType::CopyReadBuffer)
1588 }
1589 }
1590 };
1591
1592 let dest_bind_point = match find_bind_point(ctxt, dest) {
1593 Some(p) => p,
1594 None => bind_buffer(ctxt, dest, BufferType::CopyWriteBuffer)
1595 };
1596
1597 if ctxt.version >= &Version(Api::Gl, 3, 1) || ctxt.version >= &Version(Api::GlEs, 3, 0)
1598 || ctxt.extensions.gl_arb_copy_buffer
1599 {
1600 ctxt.gl.CopyBufferSubData(source_bind_point, dest_bind_point,
1601 source_offset as gl::types::GLintptr,
1602 dest_offset as gl::types::GLintptr,
1603 size as gl::types::GLsizeiptr);
1604 } else if ctxt.extensions.gl_nv_copy_buffer {
1605 ctxt.gl.CopyBufferSubDataNV(source_bind_point, dest_bind_point,
1606 source_offset as gl::types::GLintptr,
1607 dest_offset as gl::types::GLintptr,
1608 size as gl::types::GLsizeiptr);
1609 } else {
1610 unreachable!();
1611 }
1612
1613 } else {
1614 return Err(CopyError::NotSupported);
1615 }
1616
1617 Ok(())
1618}
1619
1620unsafe fn destroy_buffer(ctxt: &mut CommandContext<'_>, id: gl::types::GLuint) {
1622 if ctxt.state.array_buffer_binding == id {
1626 ctxt.state.array_buffer_binding = 0;
1627 }
1628
1629 if ctxt.state.pixel_pack_buffer_binding == id {
1630 ctxt.state.pixel_pack_buffer_binding = 0;
1631 }
1632
1633 if ctxt.state.pixel_unpack_buffer_binding == id {
1634 ctxt.state.pixel_unpack_buffer_binding = 0;
1635 }
1636
1637 if ctxt.state.uniform_buffer_binding == id {
1638 ctxt.state.uniform_buffer_binding = 0;
1639 }
1640
1641 if ctxt.state.copy_read_buffer_binding == id {
1642 ctxt.state.copy_read_buffer_binding = 0;
1643 }
1644
1645 if ctxt.state.copy_write_buffer_binding == id {
1646 ctxt.state.copy_write_buffer_binding = 0;
1647 }
1648
1649 if ctxt.state.dispatch_indirect_buffer_binding == id {
1650 ctxt.state.dispatch_indirect_buffer_binding = 0;
1651 }
1652
1653 if ctxt.state.draw_indirect_buffer_binding == id {
1654 ctxt.state.draw_indirect_buffer_binding = 0;
1655 }
1656
1657 if ctxt.state.query_buffer_binding == id {
1658 ctxt.state.query_buffer_binding = 0;
1659 }
1660
1661 if ctxt.state.texture_buffer_binding == id {
1662 ctxt.state.texture_buffer_binding = 0;
1663 }
1664
1665 if ctxt.state.atomic_counter_buffer_binding == id {
1666 ctxt.state.atomic_counter_buffer_binding = 0;
1667 }
1668
1669 if ctxt.state.shader_storage_buffer_binding == id {
1670 ctxt.state.shader_storage_buffer_binding = 0;
1671 }
1672
1673 for point in ctxt.state.indexed_atomic_counter_buffer_bindings.iter_mut() {
1674 if point.buffer == id {
1675 point.buffer = 0;
1676 }
1677 }
1678
1679 for point in ctxt.state.indexed_shader_storage_buffer_bindings.iter_mut() {
1680 if point.buffer == id {
1681 point.buffer = 0;
1682 }
1683 }
1684
1685 for point in ctxt.state.indexed_uniform_buffer_bindings.iter_mut() {
1686 if point.buffer == id {
1687 point.buffer = 0;
1688 }
1689 }
1690
1691 for point in ctxt.state.indexed_transform_feedback_buffer_bindings.iter_mut() {
1692 if point.buffer == id {
1694 point.buffer = 0;
1695 }
1696 }
1697
1698 if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1699 ctxt.version >= &Version(Api::GlEs, 2, 0)
1700 {
1701 ctxt.gl.DeleteBuffers(1, [id].as_ptr());
1702 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1703 ctxt.gl.DeleteBuffersARB(1, [id].as_ptr());
1704 } else {
1705 unreachable!();
1706 }
1707}
1708
1709unsafe fn flush_range(mut ctxt: &mut CommandContext<'_>, id: gl::types::GLuint, ty: BufferType,
1711 range: Range<usize>)
1712{
1713 if ctxt.version >= &Version(Api::Gl, 4, 5) || ctxt.extensions.gl_arb_direct_state_access {
1714 ctxt.gl.FlushMappedNamedBufferRange(id, range.start as gl::types::GLintptr,
1715 (range.end - range.start) as gl::types::GLsizeiptr);
1716
1717 } else if ctxt.extensions.gl_ext_direct_state_access {
1718 ctxt.gl.FlushMappedNamedBufferRangeEXT(id, range.start as gl::types::GLintptr,
1719 (range.end - range.start) as gl::types::GLsizeiptr);
1720
1721 } else if ctxt.version >= &Version(Api::Gl, 3, 0) ||
1722 ctxt.version >= &Version(Api::GlEs, 3, 0) ||
1723 ctxt.extensions.gl_arb_map_buffer_range
1724 {
1725 let bind = bind_buffer(&mut ctxt, id, ty);
1726 ctxt.gl.FlushMappedBufferRange(bind, range.start as gl::types::GLintptr,
1727 (range.end - range.start) as gl::types::GLsizeiptr)
1728
1729 } else {
1730 unreachable!();
1731 }
1732}
1733
1734unsafe fn map_buffer(mut ctxt: &mut CommandContext<'_>, id: gl::types::GLuint, ty: BufferType,
1738 range: Range<usize>, read: bool, write: bool) -> Option<*mut ()>
1739{
1740 let flags = match (read, write) {
1741 (true, true) => gl::MAP_FLUSH_EXPLICIT_BIT | gl::MAP_READ_BIT | gl::MAP_WRITE_BIT,
1742 (true, false) => gl::MAP_READ_BIT,
1743 (false, true) => gl::MAP_FLUSH_EXPLICIT_BIT | gl::MAP_WRITE_BIT,
1744 (false, false) => 0,
1745 };
1746
1747 if ctxt.version >= &Version(Api::Gl, 4, 5) {
1748 Some(ctxt.gl.MapNamedBufferRange(id, range.start as gl::types::GLintptr,
1749 (range.end - range.start) as gl::types::GLsizeiptr,
1750 flags) as *mut ())
1751
1752 } else if ctxt.version >= &Version(Api::Gl, 3, 0) ||
1753 ctxt.version >= &Version(Api::GlEs, 3, 0) ||
1754 ctxt.extensions.gl_arb_map_buffer_range
1755 {
1756 let bind = bind_buffer(&mut ctxt, id, ty);
1757 Some(ctxt.gl.MapBufferRange(bind, range.start as gl::types::GLintptr,
1758 (range.end - range.start) as gl::types::GLsizeiptr,
1759 flags) as *mut ())
1760
1761 } else {
1762 None }
1764}
1765
1766unsafe fn unmap_buffer(mut ctxt: &mut CommandContext<'_>, id: gl::types::GLuint, ty: BufferType) {
1772 if ctxt.version >= &Version(Api::Gl, 4, 5) {
1773 ctxt.gl.UnmapNamedBuffer(id);
1774
1775 } else if ctxt.version >= &Version(Api::Gl, 1, 5) ||
1776 ctxt.version >= &Version(Api::GlEs, 3, 0)
1777 {
1778 let bind = bind_buffer(&mut ctxt, id, ty);
1779 ctxt.gl.UnmapBuffer(bind);
1780
1781 } else if ctxt.extensions.gl_arb_vertex_buffer_object {
1782 let bind = bind_buffer(&mut ctxt, id, ty);
1783 ctxt.gl.UnmapBufferARB(bind);
1784
1785 } else {
1786 unreachable!();
1787 }
1788}