1use super::super::shader_source::{OPTIMIZED_SHADERS, UNOPTIMIZED_SHADERS};
6use api::{ColorF, ImageDescriptor, ImageFormat};
7use api::{MixBlendMode, ImageBufferKind, VoidPtrToSizeFn};
8use api::{CrashAnnotator, CrashAnnotation, CrashAnnotatorGuard};
9use api::units::*;
10use gl_context_loader::{gl, GenericGlContext, GlType, GLenum, GLuint, GLsync, GLint, GLvoid};
11use euclid::default::Transform3D;
12use crate::render_api::MemoryReport;
13use crate::internal_types::{FastHashMap, RenderTargetInfo, Swizzle, SwizzleSettings};
14use crate::util::round_up_to_multiple;
15use log::Level;
16use smallvec::SmallVec;
17use std::{
18 borrow::Cow,
19 cell::{Cell, RefCell},
20 cmp,
21 collections::hash_map::Entry,
22 marker::PhantomData,
23 mem,
24 num::NonZeroUsize,
25 os::raw::c_void,
26 ops::Add,
27 path::PathBuf,
28 ptr,
29 rc::Rc,
30 slice,
31 sync::Arc,
32 thread,
33};
34use webrender_build::shader::{
35 ProgramSourceDigest, ShaderKind, ShaderVersion, build_shader_main_string,
36 build_shader_prefix_string, do_build_shader_string, shader_source_from_file,
37};
38use malloc_size_of::MallocSizeOfOps;
39
40#[derive(Debug, Copy, Clone, PartialEq, Ord, Eq, PartialOrd)]
42#[cfg_attr(feature = "capture", derive(Serialize))]
43#[cfg_attr(feature = "replay", derive(Deserialize))]
44pub struct GpuFrameId(usize);
45
46impl GpuFrameId {
47 pub fn new(value: usize) -> Self {
48 GpuFrameId(value)
49 }
50}
51
52impl Add<usize> for GpuFrameId {
53 type Output = GpuFrameId;
54
55 fn add(self, other: usize) -> GpuFrameId {
56 GpuFrameId(self.0 + other)
57 }
58}
59
60pub struct TextureSlot(pub usize);
61
62const DEFAULT_TEXTURE: TextureSlot = TextureSlot(0);
64
65#[repr(u32)]
66pub enum DepthFunction {
67 Always = gl::ALWAYS,
68 Less = gl::LESS,
69 LessEqual = gl::LEQUAL,
70}
71
72#[repr(u32)]
73#[derive(Copy, Clone, Debug, Eq, PartialEq)]
74#[cfg_attr(feature = "capture", derive(Serialize))]
75#[cfg_attr(feature = "replay", derive(Deserialize))]
76pub enum TextureFilter {
77 Nearest,
78 Linear,
79 Trilinear,
80}
81
82#[derive(Clone, Debug)]
84#[cfg_attr(feature = "capture", derive(Serialize))]
85#[cfg_attr(feature = "replay", derive(Deserialize))]
86pub struct TextureFormatPair<T> {
87 pub internal: T,
89 pub external: T,
91}
92
93impl<T: Copy> From<T> for TextureFormatPair<T> {
94 fn from(value: T) -> Self {
95 TextureFormatPair {
96 internal: value,
97 external: value,
98 }
99 }
100}
101
102#[derive(Debug)]
103pub enum VertexAttributeKind {
104 F32,
105 U8Norm,
106 U16Norm,
107 I32,
108 U16,
109}
110
111#[derive(Debug)]
112pub struct VertexAttribute {
113 pub name: &'static str,
114 pub count: u32,
115 pub kind: VertexAttributeKind,
116}
117
118#[derive(Debug)]
119pub struct VertexDescriptor {
120 pub vertex_attributes: &'static [VertexAttribute],
121 pub instance_attributes: &'static [VertexAttribute],
122}
123
124enum FBOTarget {
125 Read,
126 Draw,
127}
128
129#[derive(Debug, Clone)]
131pub enum UploadMethod {
132 Immediate,
134 PixelBuffer(VertexUsageHint),
136}
137
138pub unsafe trait Texel: Copy {}
140unsafe impl Texel for u8 {}
141unsafe impl Texel for f32 {}
142
143fn depth_target_size_in_bytes(dimensions: &DeviceIntSize) -> usize {
145 let pixels = dimensions.width * dimensions.height;
148 (pixels as usize) * 4
149}
150
151pub fn get_gl_target(target: ImageBufferKind) -> GLuint {
152 match target {
153 ImageBufferKind::Texture2D => gl::TEXTURE_2D,
154 ImageBufferKind::TextureRect => gl::TEXTURE_RECTANGLE,
155 ImageBufferKind::TextureExternal => gl::TEXTURE_EXTERNAL_OES,
156 }
157}
158
159pub fn from_gl_target(target: GLuint) -> ImageBufferKind {
160 match target {
161 gl::TEXTURE_2D => ImageBufferKind::Texture2D,
162 gl::TEXTURE_RECTANGLE => ImageBufferKind::TextureRect,
163 gl::TEXTURE_EXTERNAL_OES => ImageBufferKind::TextureExternal,
164 _ => panic!("Unexpected target {:?}", target),
165 }
166}
167
168fn supports_extension(extensions: &[String], extension: &str) -> bool {
169 extensions.iter().any(|s| s == extension)
170}
171
172fn get_shader_version(gl: &GenericGlContext) -> ShaderVersion {
173 match gl.get_type() {
174 GlType::Gl => ShaderVersion::Gl,
175 GlType::GlEs => ShaderVersion::Gles,
176 }
177}
178
179pub fn get_unoptimized_shader_source(shader_name: &str, base_path: Option<&PathBuf>) -> Cow<'static, str> {
182 if let Some(ref base) = base_path {
183 let shader_path = base.join(&format!("{}.glsl", shader_name));
184 Cow::Owned(shader_source_from_file(&shader_path))
185 } else {
186 Cow::Borrowed(
187 UNOPTIMIZED_SHADERS
188 .get(shader_name)
189 .expect("Shader not found")
190 .source
191 )
192 }
193}
194
195pub trait FileWatcherHandler: Send {
196 fn file_changed(&self, path: PathBuf);
197}
198
199impl VertexAttributeKind {
200 fn size_in_bytes(&self) -> u32 {
201 match *self {
202 VertexAttributeKind::F32 => 4,
203 VertexAttributeKind::U8Norm => 1,
204 VertexAttributeKind::U16Norm => 2,
205 VertexAttributeKind::I32 => 4,
206 VertexAttributeKind::U16 => 2,
207 }
208 }
209}
210
211impl VertexAttribute {
212 fn size_in_bytes(&self) -> u32 {
213 self.count * self.kind.size_in_bytes()
214 }
215
216 fn bind_to_vao(
217 &self,
218 attr_index: GLuint,
219 divisor: GLuint,
220 stride: GLint,
221 offset: GLuint,
222 gl: &GenericGlContext,
223 ) {
224 gl.enable_vertex_attrib_array(attr_index);
225 gl.vertex_attrib_divisor(attr_index, divisor);
226
227 match self.kind {
228 VertexAttributeKind::F32 => {
229 gl.vertex_attrib_pointer(
230 attr_index,
231 self.count as GLint,
232 gl::FLOAT,
233 false,
234 stride,
235 offset,
236 );
237 }
238 VertexAttributeKind::U8Norm => {
239 gl.vertex_attrib_pointer(
240 attr_index,
241 self.count as GLint,
242 gl::UNSIGNED_BYTE,
243 true,
244 stride,
245 offset,
246 );
247 }
248 VertexAttributeKind::U16Norm => {
249 gl.vertex_attrib_pointer(
250 attr_index,
251 self.count as GLint,
252 gl::UNSIGNED_SHORT,
253 true,
254 stride,
255 offset,
256 );
257 }
258 VertexAttributeKind::I32 => {
259 gl.vertex_attrib_i_pointer(
260 attr_index,
261 self.count as GLint,
262 gl::INT,
263 stride,
264 offset,
265 );
266 }
267 VertexAttributeKind::U16 => {
268 gl.vertex_attrib_i_pointer(
269 attr_index,
270 self.count as GLint,
271 gl::UNSIGNED_SHORT,
272 stride,
273 offset,
274 );
275 }
276 }
277 }
278}
279
280impl VertexDescriptor {
281 fn instance_stride(&self) -> u32 {
282 self.instance_attributes
283 .iter()
284 .map(|attr| attr.size_in_bytes())
285 .sum()
286 }
287
288 fn bind_attributes(
289 attributes: &[VertexAttribute],
290 start_index: usize,
291 divisor: u32,
292 gl: &GenericGlContext,
293 vbo: VBOId,
294 ) {
295 vbo.bind(gl);
296
297 let stride: u32 = attributes
298 .iter()
299 .map(|attr| attr.size_in_bytes())
300 .sum();
301
302 let mut offset = 0;
303 for (i, attr) in attributes.iter().enumerate() {
304 let attr_index = (start_index + i) as GLuint;
305 attr.bind_to_vao(attr_index, divisor, stride as _, offset, gl);
306 offset += attr.size_in_bytes();
307 }
308 }
309
310 fn bind(&self, gl: &GenericGlContext, main: VBOId, instance: VBOId, instance_divisor: u32) {
311 Self::bind_attributes(self.vertex_attributes, 0, 0, gl, main);
312
313 if !self.instance_attributes.is_empty() {
314 Self::bind_attributes(
315 self.instance_attributes,
316 self.vertex_attributes.len(),
317 instance_divisor,
318 gl,
319 instance,
320 );
321 }
322 }
323}
324
325impl VBOId {
326 fn bind(&self, gl: &GenericGlContext) {
327 gl.bind_buffer(gl::ARRAY_BUFFER, self.0);
328 }
329}
330
331impl IBOId {
332 fn bind(&self, gl: &GenericGlContext) {
333 gl.bind_buffer(gl::ELEMENT_ARRAY_BUFFER, self.0);
334 }
335}
336
337impl FBOId {
338 fn bind(&self, gl: &GenericGlContext, target: FBOTarget) {
339 let target = match target {
340 FBOTarget::Read => gl::READ_FRAMEBUFFER,
341 FBOTarget::Draw => gl::DRAW_FRAMEBUFFER,
342 };
343 gl.bind_framebuffer(target, self.0);
344 }
345}
346
347pub struct Stream<'a> {
348 attributes: &'a [VertexAttribute],
349 vbo: VBOId,
350}
351
352pub struct VBO<V> {
353 id: GLuint,
354 target: GLenum,
355 allocated_count: usize,
356 marker: PhantomData<V>,
357}
358
359impl<V> VBO<V> {
360 pub fn allocated_count(&self) -> usize {
361 self.allocated_count
362 }
363
364 pub fn stream_with<'a>(&self, attributes: &'a [VertexAttribute]) -> Stream<'a> {
365 debug_assert_eq!(
366 mem::size_of::<V>(),
367 attributes.iter().map(|a| a.size_in_bytes() as usize).sum::<usize>()
368 );
369 Stream {
370 attributes,
371 vbo: VBOId(self.id),
372 }
373 }
374}
375
376impl<T> Drop for VBO<T> {
377 fn drop(&mut self) {
378 debug_assert!(thread::panicking() || self.id == 0);
379 }
380}
381
382#[cfg_attr(feature = "replay", derive(Clone))]
383#[derive(Debug)]
384pub struct ExternalTexture {
385 id: GLuint,
386 target: GLuint,
387 swizzle: Swizzle,
388 uv_rect: TexelRect,
389}
390
391impl ExternalTexture {
392 pub fn new(
393 id: u32,
394 target: ImageBufferKind,
395 swizzle: Swizzle,
396 uv_rect: TexelRect,
397 ) -> Self {
398 ExternalTexture {
399 id,
400 target: get_gl_target(target),
401 swizzle,
402 uv_rect,
403 }
404 }
405
406 #[cfg(feature = "replay")]
407 pub fn internal_id(&self) -> GLuint {
408 self.id
409 }
410
411 pub fn get_uv_rect(&self) -> TexelRect {
412 self.uv_rect
413 }
414}
415
416bitflags! {
417 #[derive(Default)]
418 pub struct TextureFlags: u32 {
419 const IS_SHARED_TEXTURE_CACHE = 1 << 0;
421 }
422}
423
424#[derive(Debug)]
430pub struct Texture {
431 id: GLuint,
432 target: GLuint,
433 format: ImageFormat,
434 size: DeviceIntSize,
435 filter: TextureFilter,
436 flags: TextureFlags,
437 active_swizzle: Cell<Swizzle>,
439 fbo: Option<FBOId>,
443 fbo_with_depth: Option<FBOId>,
461 last_frame_used: GpuFrameId,
462}
463
464impl Texture {
465 pub fn get_dimensions(&self) -> DeviceIntSize {
466 self.size
467 }
468
469 pub fn get_format(&self) -> ImageFormat {
470 self.format
471 }
472
473 pub fn get_filter(&self) -> TextureFilter {
474 self.filter
475 }
476
477 pub fn get_target(&self) -> ImageBufferKind {
478 from_gl_target(self.target)
479 }
480
481 pub fn supports_depth(&self) -> bool {
482 self.fbo_with_depth.is_some()
483 }
484
485 pub fn last_frame_used(&self) -> GpuFrameId {
486 self.last_frame_used
487 }
488
489 pub fn used_in_frame(&self, frame_id: GpuFrameId) -> bool {
490 self.last_frame_used == frame_id
491 }
492
493 pub fn is_render_target(&self) -> bool {
494 self.fbo.is_some()
495 }
496
497 pub fn used_recently(&self, current_frame_id: GpuFrameId, threshold: usize) -> bool {
500 self.last_frame_used + threshold >= current_frame_id
501 }
502
503 pub fn flags(&self) -> &TextureFlags {
505 &self.flags
506 }
507
508 pub fn flags_mut(&mut self) -> &mut TextureFlags {
510 &mut self.flags
511 }
512
513 pub fn size_in_bytes(&self) -> usize {
516 let bpp = self.format.bytes_per_pixel() as usize;
517 let w = self.size.width as usize;
518 let h = self.size.height as usize;
519 bpp * w * h
520 }
521
522 #[cfg(feature = "replay")]
523 pub fn into_external(mut self) -> ExternalTexture {
524 let ext = ExternalTexture {
525 id: self.id,
526 target: self.target,
527 swizzle: Swizzle::default(),
528 uv_rect: TexelRect::new(
530 0.0,
531 0.0,
532 self.size.width as f32,
533 self.size.height as f32,
534 ),
535 };
536 self.id = 0; ext
538 }
539}
540
541impl Drop for Texture {
542 fn drop(&mut self) {
543 debug_assert!(thread::panicking() || self.id == 0);
544 }
545}
546
547pub struct Program {
548 id: GLuint,
549 u_transform: GLint,
550 u_mode: GLint,
551 u_texture_size: GLint,
552 source_info: ProgramSourceInfo,
553 is_initialized: bool,
554}
555
556impl Program {
557 pub fn is_initialized(&self) -> bool {
558 self.is_initialized
559 }
560}
561
562impl Drop for Program {
563 fn drop(&mut self) {
564 debug_assert!(
565 thread::panicking() || self.id == 0,
566 "renderer::deinit not called"
567 );
568 }
569}
570
571pub struct CustomVAO {
572 id: GLuint,
573}
574
575impl Drop for CustomVAO {
576 fn drop(&mut self) {
577 debug_assert!(
578 thread::panicking() || self.id == 0,
579 "renderer::deinit not called"
580 );
581 }
582}
583
584pub struct VAO {
585 id: GLuint,
586 ibo_id: IBOId,
587 main_vbo_id: VBOId,
588 instance_vbo_id: VBOId,
589 instance_stride: usize,
590 instance_divisor: u32,
591 owns_vertices_and_indices: bool,
592}
593
594impl Drop for VAO {
595 fn drop(&mut self) {
596 debug_assert!(
597 thread::panicking() || self.id == 0,
598 "renderer::deinit not called"
599 );
600 }
601}
602
603#[derive(Debug)]
604pub struct PBO {
605 id: GLuint,
606 reserved_size: usize,
607}
608
609impl PBO {
610 pub fn get_reserved_size(&self) -> usize {
611 self.reserved_size
612 }
613}
614
615impl Drop for PBO {
616 fn drop(&mut self) {
617 debug_assert!(
618 thread::panicking() || self.id == 0,
619 "renderer::deinit not called or PBO not returned to pool"
620 );
621 }
622}
623
624pub struct BoundPBO<'a> {
625 device: &'a mut Device,
626 pub data: &'a [u8]
627}
628
629impl<'a> Drop for BoundPBO<'a> {
630 fn drop(&mut self) {
631 self.device.gl.unmap_buffer(gl::PIXEL_PACK_BUFFER);
632 self.device.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
633 }
634}
635
636#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
637pub struct FBOId(GLuint);
638
639#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
640pub struct RBOId(GLuint);
641
642#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
643pub struct VBOId(GLuint);
644
645#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
646struct IBOId(GLuint);
647
648#[derive(Clone, Debug)]
649enum ProgramSourceType {
650 Unoptimized,
651 Optimized(ShaderVersion),
652}
653
654#[derive(Clone, Debug)]
655pub struct ProgramSourceInfo {
656 base_filename: &'static str,
657 features: Vec<&'static str>,
658 full_name_cstr: Rc<std::ffi::CString>,
659 source_type: ProgramSourceType,
660 digest: ProgramSourceDigest,
661}
662
663impl ProgramSourceInfo {
664 fn new(
665 device: &Device,
666 name: &'static str,
667 features: &[&'static str],
668 ) -> Self {
669
670 use std::collections::hash_map::DefaultHasher;
674 use std::hash::Hasher;
675
676 let mut hasher = DefaultHasher::new();
678 let gl_version = get_shader_version(&*device.gl());
679
680 hasher.write(device.capabilities.renderer_name.as_bytes());
682
683 let full_name = Self::make_full_name(name, features);
684
685 let optimized_source = if device.use_optimized_shaders {
686 OPTIMIZED_SHADERS.get(&(gl_version, &full_name)).or_else(|| {
687 warn!("Missing optimized shader source for {}", &full_name);
688 None
689 })
690 } else {
691 None
692 };
693
694 let source_type = match optimized_source {
695 Some(source_and_digest) => {
696 if cfg!(debug_assertions) {
700 let mut h = DefaultHasher::new();
701 h.write(source_and_digest.vert_source.as_bytes());
702 h.write(source_and_digest.frag_source.as_bytes());
703 let d: ProgramSourceDigest = h.into();
704 let digest = d.to_string();
705 debug_assert_eq!(digest, source_and_digest.digest);
706 hasher.write(digest.as_bytes());
707 } else {
708 hasher.write(source_and_digest.digest.as_bytes());
709 }
710
711 ProgramSourceType::Optimized(gl_version)
712 }
713 None => {
714 let override_path = device.resource_override_path.as_ref();
723 let source_and_digest = UNOPTIMIZED_SHADERS.get(&name).expect("Shader not found");
724
725 build_shader_prefix_string(
727 gl_version,
728 &features,
729 ShaderKind::Vertex,
730 &name,
731 &mut |s| hasher.write(s.as_bytes()),
732 );
733
734 if override_path.is_some() || cfg!(debug_assertions) {
737 let mut h = DefaultHasher::new();
738 build_shader_main_string(
739 &name,
740 &|f| get_unoptimized_shader_source(f, override_path),
741 &mut |s| h.write(s.as_bytes())
742 );
743 let d: ProgramSourceDigest = h.into();
744 let digest = format!("{}", d);
745 debug_assert!(override_path.is_some() || digest == source_and_digest.digest);
746 hasher.write(digest.as_bytes());
747 } else {
748 hasher.write(source_and_digest.digest.as_bytes());
749 }
750
751 ProgramSourceType::Unoptimized
752 }
753 };
754
755 ProgramSourceInfo {
757 base_filename: name,
758 features: features.to_vec(),
759 full_name_cstr: Rc::new(std::ffi::CString::new(full_name).unwrap()),
760 source_type,
761 digest: hasher.into(),
762 }
763 }
764
765 fn compute_source(&self, device: &Device, kind: ShaderKind) -> String {
766 let full_name = self.full_name();
767 match self.source_type {
768 ProgramSourceType::Optimized(gl_version) => {
769 let shader = OPTIMIZED_SHADERS
770 .get(&(gl_version, &full_name))
771 .unwrap_or_else(|| panic!("Missing optimized shader source for {}", full_name));
772
773 match kind {
774 ShaderKind::Vertex => shader.vert_source.to_string(),
775 ShaderKind::Fragment => shader.frag_source.to_string(),
776 }
777 },
778 ProgramSourceType::Unoptimized => {
779 let mut src = String::new();
780 device.build_shader_string(
781 &self.features,
782 kind,
783 self.base_filename,
784 |s| src.push_str(s),
785 );
786 src
787 }
788 }
789 }
790
791 fn make_full_name(base_filename: &'static str, features: &[&'static str]) -> String {
792 if features.is_empty() {
793 base_filename.to_string()
794 } else {
795 format!("{}_{}", base_filename, features.join("_"))
796 }
797 }
798
799 fn full_name(&self) -> String {
800 Self::make_full_name(self.base_filename, &self.features)
801 }
802}
803
804#[cfg_attr(feature = "serialize_program", derive(Deserialize, Serialize))]
805pub struct ProgramBinary {
806 bytes: Vec<u8>,
807 format: GLenum,
808 source_digest: ProgramSourceDigest,
809}
810
811impl ProgramBinary {
812 fn new(bytes: Vec<u8>,
813 format: GLenum,
814 source_digest: ProgramSourceDigest) -> Self {
815 ProgramBinary {
816 bytes,
817 format,
818 source_digest,
819 }
820 }
821
822 pub fn source_digest(&self) -> &ProgramSourceDigest {
824 &self.source_digest
825 }
826}
827
828pub trait ProgramCacheObserver {
830 fn save_shaders_to_disk(&self, entries: Vec<Arc<ProgramBinary>>);
831 fn set_startup_shaders(&self, entries: Vec<Arc<ProgramBinary>>);
832 fn try_load_shader_from_disk(&self, digest: &ProgramSourceDigest, program_cache: &Rc<ProgramCache>);
833 fn notify_program_binary_failed(&self, program_binary: &Arc<ProgramBinary>);
834}
835
836struct ProgramCacheEntry {
837 binary: Arc<ProgramBinary>,
839 linked: bool,
841}
842
843pub struct ProgramCache {
844 entries: RefCell<FastHashMap<ProgramSourceDigest, ProgramCacheEntry>>,
845
846 program_cache_handler: Option<Box<dyn ProgramCacheObserver>>,
849
850 pending_entries: RefCell<Vec<Arc<ProgramBinary>>>,
852}
853
854impl ProgramCache {
855 pub fn new(program_cache_observer: Option<Box<dyn ProgramCacheObserver>>) -> Rc<Self> {
856 Rc::new(
857 ProgramCache {
858 entries: RefCell::new(FastHashMap::default()),
859 program_cache_handler: program_cache_observer,
860 pending_entries: RefCell::new(Vec::default()),
861 }
862 )
863 }
864
865 fn update_disk_cache(&self, startup_complete: bool) {
868 if let Some(ref handler) = self.program_cache_handler {
869 if !self.pending_entries.borrow().is_empty() {
870 let pending_entries = self.pending_entries.replace(Vec::default());
871 handler.save_shaders_to_disk(pending_entries);
872 }
873
874 if startup_complete {
875 let startup_shaders = self.entries.borrow().values()
876 .filter(|e| e.linked).map(|e| e.binary.clone())
877 .collect::<Vec<_>>();
878 handler.set_startup_shaders(startup_shaders);
879 }
880 }
881 }
882
883 fn add_new_program_binary(&self, program_binary: Arc<ProgramBinary>) {
887 self.pending_entries.borrow_mut().push(program_binary.clone());
888
889 let digest = program_binary.source_digest.clone();
890 let entry = ProgramCacheEntry {
891 binary: program_binary,
892 linked: true,
893 };
894 self.entries.borrow_mut().insert(digest, entry);
895 }
896
897 #[cfg(feature = "serialize_program")]
900 pub fn load_program_binary(&self, program_binary: Arc<ProgramBinary>) {
901 let digest = program_binary.source_digest.clone();
902 let entry = ProgramCacheEntry {
903 binary: program_binary,
904 linked: false,
905 };
906 self.entries.borrow_mut().insert(digest, entry);
907 }
908
909 pub fn report_memory(&self, op: VoidPtrToSizeFn) -> usize {
911 self.entries.borrow().values()
912 .map(|e| unsafe { op(e.binary.bytes.as_ptr() as *const c_void ) })
913 .sum()
914 }
915}
916
917#[derive(Debug, Copy, Clone)]
918pub enum VertexUsageHint {
919 Static,
920 Dynamic,
921 Stream,
922}
923
924impl VertexUsageHint {
925 fn to_gl(&self) -> GLuint {
926 match *self {
927 VertexUsageHint::Static => gl::STATIC_DRAW,
928 VertexUsageHint::Dynamic => gl::DYNAMIC_DRAW,
929 VertexUsageHint::Stream => gl::STREAM_DRAW,
930 }
931 }
932}
933
934#[derive(Copy, Clone, Debug)]
935pub struct UniformLocation(GLint);
936
937impl UniformLocation {
938 pub const INVALID: Self = UniformLocation(-1);
939}
940
941#[derive(Debug)]
942pub struct Capabilities {
943 pub supports_multisampling: bool,
945 pub supports_copy_image_sub_data: bool,
947 pub supports_color_buffer_float: bool,
949 pub supports_buffer_storage: bool,
951 pub supports_advanced_blend_equation: bool,
953 pub supports_dual_source_blending: bool,
955 pub supports_khr_debug: bool,
958 pub supports_texture_swizzle: bool,
960 pub supports_nonzero_pbo_offsets: bool,
963 pub supports_texture_usage: bool,
965 pub supports_render_target_partial_update: bool,
967 pub supports_shader_storage_object: bool,
969 pub requires_batched_texture_uploads: Option<bool>,
972 pub supports_alpha_target_clears: bool,
975 pub supports_r8_texture_upload: bool,
977 pub uses_native_clip_mask: bool,
980 pub uses_native_antialiasing: bool,
983 pub supports_image_external_essl3: bool,
987 pub requires_vao_rebind_after_orphaning: bool,
989 pub renderer_name: String,
991}
992
993#[derive(Clone, Debug)]
994pub enum ShaderError {
995 Compilation(String, String), Link(String, String), }
998
999struct SharedDepthTarget {
1002 rbo_id: RBOId,
1004 refcount: usize,
1006}
1007
1008#[cfg(debug_assertions)]
1009impl Drop for SharedDepthTarget {
1010 fn drop(&mut self) {
1011 debug_assert!(thread::panicking() || self.refcount == 0);
1012 }
1013}
1014
1015#[derive(PartialEq, Debug)]
1018enum TexStorageUsage {
1019 Never,
1020 NonBGRA8,
1021 Always,
1022}
1023
1024#[derive(Copy, Clone, Debug)]
1027pub enum StrideAlignment {
1028 Bytes(NonZeroUsize),
1029 Pixels(NonZeroUsize),
1030}
1031
1032impl StrideAlignment {
1033 pub fn num_bytes(&self, format: ImageFormat) -> NonZeroUsize {
1034 match *self {
1035 Self::Bytes(bytes) => bytes,
1036 Self::Pixels(pixels) => {
1037 assert!(format.bytes_per_pixel() > 0);
1038 NonZeroUsize::new(pixels.get() * format.bytes_per_pixel() as usize).unwrap()
1039 }
1040 }
1041 }
1042}
1043
1044const RESERVE_DEPTH_BITS: i32 = 2;
1049
1050pub struct Device {
1051 gl: Rc<gl_context_loader::GenericGlContext>,
1052
1053 base_gl: Option<Rc<gl_context_loader::GenericGlContext>>,
1056
1057 bound_textures: [GLuint; 16],
1059 bound_program: GLuint,
1060 bound_program_name: Rc<std::ffi::CString>,
1061 bound_vao: GLuint,
1062 bound_read_fbo: (FBOId, DeviceIntPoint),
1063 bound_draw_fbo: FBOId,
1064 program_mode_id: UniformLocation,
1065 default_read_fbo: FBOId,
1066 default_draw_fbo: FBOId,
1067
1068 depth_available: bool,
1071
1072 upload_method: UploadMethod,
1073 use_batched_texture_uploads: bool,
1074 use_draw_calls_for_texture_copy: bool,
1079
1080 capabilities: Capabilities,
1082
1083 color_formats: TextureFormatPair<ImageFormat>,
1084 bgra_formats: TextureFormatPair<GLuint>,
1085 bgra_pixel_type: GLuint,
1086 swizzle_settings: SwizzleSettings,
1087 depth_format: GLuint,
1088
1089 depth_targets: FastHashMap<DeviceIntSize, SharedDepthTarget>,
1094
1095 inside_frame: bool,
1097 crash_annotator: Option<Box<dyn CrashAnnotator>>,
1098 annotate_draw_call_crashes: bool,
1099
1100 resource_override_path: Option<PathBuf>,
1102
1103 use_optimized_shaders: bool,
1105
1106 max_texture_size: i32,
1107 cached_programs: Option<Rc<ProgramCache>>,
1108
1109 frame_id: GpuFrameId,
1112
1113 texture_storage_usage: TexStorageUsage,
1119
1120 required_pbo_stride: StrideAlignment,
1124
1125 requires_null_terminated_shader_source: bool,
1128
1129 requires_texture_external_unbind: bool,
1132
1133 extensions: Vec<String>,
1135
1136 dump_shader_source: Option<String>,
1138
1139 surface_origin_is_top_left: bool,
1140
1141 #[cfg(debug_assertions)]
1154 shader_is_ready: bool,
1155}
1156
1157#[derive(Clone, Copy, Debug)]
1159pub enum DrawTarget {
1160 Default {
1163 rect: FramebufferIntRect,
1165 total_size: FramebufferIntSize,
1167 surface_origin_is_top_left: bool,
1168 },
1169 Texture {
1171 dimensions: DeviceIntSize,
1173 with_depth: bool,
1175 fbo_id: FBOId,
1177 id: GLuint,
1179 target: GLuint,
1181 },
1182 External {
1184 fbo: FBOId,
1185 size: FramebufferIntSize,
1186 },
1187 NativeSurface {
1189 offset: DeviceIntPoint,
1190 external_fbo_id: u32,
1191 dimensions: DeviceIntSize,
1192 },
1193}
1194
1195impl DrawTarget {
1196 pub fn new_default(size: DeviceIntSize, surface_origin_is_top_left: bool) -> Self {
1197 let total_size = device_size_as_framebuffer_size(size);
1198 DrawTarget::Default {
1199 rect: total_size.into(),
1200 total_size,
1201 surface_origin_is_top_left,
1202 }
1203 }
1204
1205 pub fn is_default(&self) -> bool {
1207 match *self {
1208 DrawTarget::Default {..} => true,
1209 _ => false,
1210 }
1211 }
1212
1213 pub fn from_texture(
1214 texture: &Texture,
1215 with_depth: bool,
1216 ) -> Self {
1217 let fbo_id = if with_depth {
1218 texture.fbo_with_depth.unwrap()
1219 } else {
1220 texture.fbo.unwrap()
1221 };
1222
1223 DrawTarget::Texture {
1224 dimensions: texture.get_dimensions(),
1225 fbo_id,
1226 with_depth,
1227 id: texture.id,
1228 target: texture.target,
1229 }
1230 }
1231
1232 pub fn dimensions(&self) -> DeviceIntSize {
1234 match *self {
1235 DrawTarget::Default { total_size, .. } => total_size.cast_unit(),
1236 DrawTarget::Texture { dimensions, .. } => dimensions,
1237 DrawTarget::External { size, .. } => size.cast_unit(),
1238 DrawTarget::NativeSurface { dimensions, .. } => dimensions,
1239 }
1240 }
1241
1242 pub fn to_framebuffer_rect(&self, device_rect: DeviceIntRect) -> FramebufferIntRect {
1243 let mut fb_rect = device_rect_as_framebuffer_rect(&device_rect);
1244 match *self {
1245 DrawTarget::Default { ref rect, surface_origin_is_top_left, .. } => {
1246 if !surface_origin_is_top_left {
1248 let w = fb_rect.width();
1249 let h = fb_rect.height();
1250 fb_rect.min.x = fb_rect.min.x + rect.min.x;
1251 fb_rect.min.y = rect.max.y - fb_rect.max.y;
1252 fb_rect.max.x = fb_rect.min.x + w;
1253 fb_rect.max.y = fb_rect.min.y + h;
1254 }
1255 }
1256 DrawTarget::Texture { .. } | DrawTarget::External { .. } | DrawTarget::NativeSurface { .. } => (),
1257 }
1258 fb_rect
1259 }
1260
1261 pub fn surface_origin_is_top_left(&self) -> bool {
1262 match *self {
1263 DrawTarget::Default { surface_origin_is_top_left, .. } => surface_origin_is_top_left,
1264 DrawTarget::Texture { .. } | DrawTarget::External { .. } | DrawTarget::NativeSurface { .. } => true,
1265 }
1266 }
1267
1268 pub fn build_scissor_rect(
1272 &self,
1273 scissor_rect: Option<DeviceIntRect>,
1274 ) -> FramebufferIntRect {
1275 let dimensions = self.dimensions();
1276
1277 match scissor_rect {
1278 Some(scissor_rect) => match *self {
1279 DrawTarget::Default { ref rect, .. } => {
1280 self.to_framebuffer_rect(scissor_rect)
1281 .intersection(rect)
1282 .unwrap_or_else(FramebufferIntRect::zero)
1283 }
1284 DrawTarget::NativeSurface { offset, .. } => {
1285 device_rect_as_framebuffer_rect(&scissor_rect.translate(offset.to_vector()))
1286 }
1287 DrawTarget::Texture { .. } | DrawTarget::External { .. } => {
1288 device_rect_as_framebuffer_rect(&scissor_rect)
1289 }
1290 }
1291 None => {
1292 FramebufferIntRect::from_size(
1293 device_size_as_framebuffer_size(dimensions),
1294 )
1295 }
1296 }
1297 }
1298}
1299
1300#[derive(Clone, Copy, Debug)]
1302pub enum ReadTarget {
1303 Default,
1305 Texture {
1307 fbo_id: FBOId,
1309 },
1310 External {
1312 fbo: FBOId,
1313 },
1314 NativeSurface {
1316 fbo_id: FBOId,
1317 offset: DeviceIntPoint,
1318 },
1319}
1320
1321impl ReadTarget {
1322 pub fn from_texture(
1323 texture: &Texture,
1324 ) -> Self {
1325 ReadTarget::Texture {
1326 fbo_id: texture.fbo.unwrap(),
1327 }
1328 }
1329
1330 fn offset(&self) -> DeviceIntPoint {
1331 match *self {
1332 ReadTarget::Default |
1333 ReadTarget::Texture { .. } |
1334 ReadTarget::External { .. } => {
1335 DeviceIntPoint::zero()
1336 }
1337
1338 ReadTarget::NativeSurface { offset, .. } => {
1339 offset
1340 }
1341 }
1342 }
1343}
1344
1345impl From<DrawTarget> for ReadTarget {
1346 fn from(t: DrawTarget) -> Self {
1347 match t {
1348 DrawTarget::Default { .. } => {
1349 ReadTarget::Default
1350 }
1351 DrawTarget::NativeSurface { external_fbo_id, offset, .. } => {
1352 ReadTarget::NativeSurface {
1353 fbo_id: FBOId(external_fbo_id),
1354 offset,
1355 }
1356 }
1357 DrawTarget::Texture { fbo_id, .. } => {
1358 ReadTarget::Texture { fbo_id }
1359 }
1360 DrawTarget::External { fbo, .. } => {
1361 ReadTarget::External { fbo }
1362 }
1363 }
1364 }
1365}
1366
1367impl Device {
1368 pub fn new(
1369 gl: Rc<gl_context_loader::GenericGlContext>,
1370 crash_annotator: Option<Box<dyn CrashAnnotator>>,
1371 resource_override_path: Option<PathBuf>,
1372 use_optimized_shaders: bool,
1373 upload_method: UploadMethod,
1374 cached_programs: Option<Rc<ProgramCache>>,
1375 allow_texture_storage_support: bool,
1376 allow_texture_swizzling: bool,
1377 dump_shader_source: Option<String>,
1378 surface_origin_is_top_left: bool,
1379 _panic_on_gl_error: bool,
1380 ) -> Device {
1381 let mut max_texture_size = [0];
1382 unsafe {
1383 gl.get_integer_v(gl::MAX_TEXTURE_SIZE, &mut max_texture_size);
1384 }
1385
1386 let max_texture_size = max_texture_size[0].min(16384);
1390
1391 let renderer_name = gl.get_string(gl::RENDERER);
1392 info!("Renderer: {}", renderer_name);
1393 info!("Max texture size: {}", max_texture_size);
1394
1395 let mut extension_count = [0];
1396 unsafe {
1397 gl.get_integer_v(gl::NUM_EXTENSIONS, &mut extension_count);
1398 }
1399 let extension_count = extension_count[0] as GLuint;
1400 let mut extensions = Vec::new();
1401 for i in 0 .. extension_count {
1402 extensions.push(gl.get_string_i(gl::EXTENSIONS, i));
1403 }
1404
1405 if supports_extension(&extensions, "GL_ANGLE_provoking_vertex") {
1406 gl.provoking_vertex_angle(gl::FIRST_VERTEX_CONVENTION);
1407 }
1408
1409 let supports_texture_usage = supports_extension(&extensions, "GL_ANGLE_texture_usage");
1410
1411 let supports_gles_bgra = supports_extension(&extensions, "GL_EXT_texture_format_BGRA8888");
1454
1455 let is_emulator = renderer_name.starts_with("Android Emulator");
1458 let avoid_tex_image = is_emulator;
1459 let mut gl_version = [0; 2];
1460 unsafe {
1461 gl.get_integer_v(gl::MAJOR_VERSION, &mut gl_version[0..1]);
1462 gl.get_integer_v(gl::MINOR_VERSION, &mut gl_version[1..2]);
1463 }
1464
1465 let supports_texture_storage = allow_texture_storage_support && !cfg!(target_os = "macos") &&
1467 match gl.get_type() {
1468 GlType::Gl => supports_extension(&extensions, "GL_ARB_texture_storage"),
1469 GlType::GlEs => true,
1470 };
1471 let supports_texture_swizzle = allow_texture_swizzling &&
1472 match gl.get_type() {
1473 GlType::Gl => gl_version >= [3, 3] ||
1475 supports_extension(&extensions, "GL_ARB_texture_swizzle"),
1476 GlType::GlEs => true,
1477 };
1478
1479 let (color_formats, bgra_formats, bgra_pixel_type, bgra8_sampling_swizzle, texture_storage_usage) = match gl.get_type() {
1480 GlType::Gl if supports_texture_storage && supports_texture_swizzle => (
1482 TextureFormatPair::from(ImageFormat::RGBA8),
1483 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1484 gl::UNSIGNED_BYTE,
1485 Swizzle::Bgra, TexStorageUsage::Always
1487 ),
1488 GlType::Gl => (
1490 TextureFormatPair { internal: ImageFormat::BGRA8, external: ImageFormat::BGRA8 },
1491 TextureFormatPair { internal: gl::RGBA, external: gl::BGRA },
1492 gl::UNSIGNED_INT_8_8_8_8_REV,
1493 Swizzle::Rgba, TexStorageUsage::Never
1495 ),
1496 GlType::GlEs if supports_gles_bgra
1500 && supports_extension(&extensions, "GL_EXT_texture_storage") =>
1501 (
1502 TextureFormatPair::from(ImageFormat::BGRA8),
1503 TextureFormatPair { internal: gl::BGRA8_EXT, external: gl::BGRA_EXT },
1504 gl::UNSIGNED_BYTE,
1505 Swizzle::Rgba, TexStorageUsage::Always,
1507 ),
1508 GlType::GlEs if supports_texture_swizzle => (
1512 TextureFormatPair::from(ImageFormat::RGBA8),
1513 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1514 gl::UNSIGNED_BYTE,
1515 Swizzle::Bgra, TexStorageUsage::Always,
1517 ),
1518 GlType::GlEs if supports_gles_bgra && !avoid_tex_image => (
1522 TextureFormatPair::from(ImageFormat::BGRA8),
1523 TextureFormatPair::from(gl::BGRA_EXT),
1524 gl::UNSIGNED_BYTE,
1525 Swizzle::Rgba, TexStorageUsage::NonBGRA8,
1527 ),
1528 GlType::GlEs => {
1532 warn!("Neither BGRA or texture swizzling are supported. Images may be rendered incorrectly.");
1533 (
1534 TextureFormatPair::from(ImageFormat::RGBA8),
1535 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1536 gl::UNSIGNED_BYTE,
1537 Swizzle::Rgba,
1538 TexStorageUsage::Always,
1539 )
1540 }
1541 };
1542
1543 let is_software_webrender = renderer_name.starts_with("Software WebRender");
1544 let upload_method = if is_software_webrender {
1545 UploadMethod::Immediate
1547 } else {
1548 upload_method
1549 };
1550 let depth_format = gl::DEPTH_COMPONENT24;
1552
1553 info!("GL texture cache {:?}, bgra {:?} swizzle {:?}, texture storage {:?}, depth {:?}",
1554 color_formats, bgra_formats, bgra8_sampling_swizzle, texture_storage_usage, depth_format);
1555
1556 let supports_copy_image_sub_data = if renderer_name.starts_with("Mali") {
1562 false
1563 } else {
1564 supports_extension(&extensions, "GL_EXT_copy_image") ||
1565 supports_extension(&extensions, "GL_ARB_copy_image")
1566 };
1567
1568 let is_x86_powervr_rogue_g6430 = renderer_name.starts_with("PowerVR Rogue G6430")
1572 && cfg!(target_arch = "x86");
1573 let supports_color_buffer_float = match gl.get_type() {
1574 GlType::Gl => true,
1575 GlType::GlEs if is_x86_powervr_rogue_g6430 => false,
1576 GlType::GlEs => supports_extension(&extensions, "GL_EXT_color_buffer_float"),
1577 };
1578
1579 let is_adreno = renderer_name.starts_with("Adreno");
1580
1581 let supports_buffer_storage = if is_adreno {
1586 false
1587 } else {
1588 supports_extension(&extensions, "GL_EXT_buffer_storage") ||
1589 supports_extension(&extensions, "GL_ARB_buffer_storage")
1590 };
1591
1592 let supports_advanced_blend_equation =
1596 supports_extension(&extensions, "GL_KHR_blend_equation_advanced") &&
1597 !is_adreno;
1598
1599 let supports_dual_source_blending = match gl.get_type() {
1600 GlType::Gl => supports_extension(&extensions,"GL_ARB_blend_func_extended") &&
1601 supports_extension(&extensions,"GL_ARB_explicit_attrib_location"),
1602 GlType::GlEs => supports_extension(&extensions,"GL_EXT_blend_func_extended"),
1603 };
1604
1605 let use_optimized_shaders = use_optimized_shaders && !is_software_webrender;
1607
1608 let requires_null_terminated_shader_source = is_emulator;
1611
1612 let requires_texture_external_unbind = is_emulator;
1615
1616 let is_macos = cfg!(target_os = "macos");
1617 let is_angle = renderer_name.starts_with("ANGLE");
1621 let is_adreno_3xx = renderer_name.starts_with("Adreno (TM) 3");
1622
1623 let required_pbo_stride = if is_adreno_3xx {
1627 StrideAlignment::Bytes(NonZeroUsize::new(128).unwrap())
1630 } else if is_adreno {
1631 StrideAlignment::Pixels(NonZeroUsize::new(64).unwrap())
1635 } else if is_macos {
1636 StrideAlignment::Bytes(NonZeroUsize::new(256).unwrap())
1639 } else if is_angle {
1640 StrideAlignment::Bytes(NonZeroUsize::new(1).unwrap())
1643 } else {
1644 StrideAlignment::Bytes(NonZeroUsize::new(4).unwrap())
1647 };
1648
1649 let supports_nonzero_pbo_offsets = !is_macos;
1652
1653 let is_mali_g = renderer_name.starts_with("Mali-G");
1657 let is_mali_t = renderer_name.starts_with("Mali-T");
1658 let supports_render_target_partial_update = !is_mali_g && !is_mali_t;
1659
1660 let supports_shader_storage_object = match gl.get_type() {
1661 GlType::Gl => supports_extension(&extensions, "GL_ARB_shader_storage_buffer_object"),
1663 GlType::GlEs => gl_version >= [3, 1],
1664 };
1665
1666 let uses_native_clip_mask = is_software_webrender;
1671
1672 let uses_native_antialiasing = is_software_webrender;
1675
1676 let supports_image_external_essl3 = supports_extension(&extensions, "GL_OES_EGL_image_external_essl3");
1677
1678 let mut requires_batched_texture_uploads = None;
1679 if is_software_webrender {
1680 requires_batched_texture_uploads = Some(false);
1682 } else if is_mali_g {
1683 requires_batched_texture_uploads = Some(true);
1686 }
1687
1688 let supports_alpha_target_clears = !is_mali_t;
1692
1693 let supports_r8_texture_upload = if cfg!(target_os = "linux")
1697 && renderer_name.starts_with("AMD Radeon RX")
1698 {
1699 false
1700 } else {
1701 true
1702 };
1703
1704 let requires_vao_rebind_after_orphaning = is_adreno_3xx;
1707
1708 Device {
1709 gl,
1710 base_gl: None,
1711 crash_annotator,
1712 annotate_draw_call_crashes: false,
1713 resource_override_path,
1714 use_optimized_shaders,
1715 upload_method,
1716 use_batched_texture_uploads: requires_batched_texture_uploads.unwrap_or(false),
1717 use_draw_calls_for_texture_copy: false,
1718
1719 inside_frame: false,
1720
1721 capabilities: Capabilities {
1722 supports_multisampling: false, supports_copy_image_sub_data,
1724 supports_color_buffer_float,
1725 supports_buffer_storage,
1726 supports_advanced_blend_equation,
1727 supports_dual_source_blending,
1728 supports_khr_debug: false,
1729 supports_texture_swizzle,
1730 supports_nonzero_pbo_offsets,
1731 supports_texture_usage,
1732 supports_render_target_partial_update,
1733 supports_shader_storage_object,
1734 requires_batched_texture_uploads,
1735 supports_alpha_target_clears,
1736 supports_r8_texture_upload,
1737 uses_native_clip_mask,
1738 uses_native_antialiasing,
1739 supports_image_external_essl3,
1740 requires_vao_rebind_after_orphaning,
1741 renderer_name,
1742 },
1743
1744 color_formats,
1745 bgra_formats,
1746 bgra_pixel_type,
1747 swizzle_settings: SwizzleSettings {
1748 bgra8_sampling_swizzle,
1749 },
1750 depth_format,
1751
1752 depth_targets: FastHashMap::default(),
1753
1754 bound_textures: [0; 16],
1755 bound_program: 0,
1756 bound_program_name: Rc::new(std::ffi::CString::new("").unwrap()),
1757 bound_vao: 0,
1758 bound_read_fbo: (FBOId(0), DeviceIntPoint::zero()),
1759 bound_draw_fbo: FBOId(0),
1760 program_mode_id: UniformLocation::INVALID,
1761 default_read_fbo: FBOId(0),
1762 default_draw_fbo: FBOId(0),
1763
1764 depth_available: true,
1765
1766 max_texture_size,
1767 cached_programs,
1768 frame_id: GpuFrameId(0),
1769 extensions,
1770 texture_storage_usage,
1771 requires_null_terminated_shader_source,
1772 requires_texture_external_unbind,
1773 required_pbo_stride,
1774 dump_shader_source,
1775 surface_origin_is_top_left,
1776
1777 #[cfg(debug_assertions)]
1778 shader_is_ready: false,
1779 }
1780 }
1781
1782 pub fn gl(&self) -> &GenericGlContext {
1783 &*self.gl
1784 }
1785
1786 pub fn rc_gl(&self) -> &Rc<gl_context_loader::GenericGlContext> {
1787 &self.gl
1788 }
1789
1790 pub fn clamp_max_texture_size(&mut self, size: i32) {
1794 self.max_texture_size = self.max_texture_size.min(size);
1795 }
1796
1797 pub fn max_texture_size(&self) -> i32 {
1799 self.max_texture_size
1800 }
1801
1802 pub fn surface_origin_is_top_left(&self) -> bool {
1803 self.surface_origin_is_top_left
1804 }
1805
1806 pub fn get_capabilities(&self) -> &Capabilities {
1807 &self.capabilities
1808 }
1809
1810 pub fn preferred_color_formats(&self) -> TextureFormatPair<ImageFormat> {
1811 self.color_formats.clone()
1812 }
1813
1814 pub fn swizzle_settings(&self) -> Option<SwizzleSettings> {
1815 if self.capabilities.supports_texture_swizzle {
1816 Some(self.swizzle_settings)
1817 } else {
1818 None
1819 }
1820 }
1821
1822 pub fn depth_bits(&self) -> i32 {
1823 match self.depth_format {
1824 gl::DEPTH_COMPONENT16 => 16,
1825 gl::DEPTH_COMPONENT24 => 24,
1826 _ => panic!("Unknown depth format {:?}", self.depth_format),
1827 }
1828 }
1829
1830 pub fn max_depth_ids(&self) -> i32 {
1833 return 1 << (self.depth_bits() - RESERVE_DEPTH_BITS);
1834 }
1835
1836 pub fn ortho_near_plane(&self) -> f32 {
1837 return -self.max_depth_ids() as f32;
1838 }
1839
1840 pub fn ortho_far_plane(&self) -> f32 {
1841 return (self.max_depth_ids() - 1) as f32;
1842 }
1843
1844 pub fn required_pbo_stride(&self) -> StrideAlignment {
1845 self.required_pbo_stride
1846 }
1847
1848 pub fn upload_method(&self) -> &UploadMethod {
1849 &self.upload_method
1850 }
1851
1852 pub fn use_batched_texture_uploads(&self) -> bool {
1853 self.use_batched_texture_uploads
1854 }
1855
1856 pub fn use_draw_calls_for_texture_copy(&self) -> bool {
1857 self.use_draw_calls_for_texture_copy
1858 }
1859
1860 pub fn set_use_batched_texture_uploads(&mut self, enabled: bool) {
1861 if self.capabilities.requires_batched_texture_uploads.is_some() {
1862 return;
1863 }
1864 self.use_batched_texture_uploads = enabled;
1865 }
1866
1867 pub fn set_use_draw_calls_for_texture_copy(&mut self, enabled: bool) {
1868 self.use_draw_calls_for_texture_copy = enabled;
1869 }
1870
1871 pub fn reset_state(&mut self) {
1872 for i in 0 .. self.bound_textures.len() {
1873 self.bound_textures[i] = 0;
1874 self.gl.active_texture(gl::TEXTURE0 + i as GLuint);
1875 self.gl.bind_texture(gl::TEXTURE_2D, 0);
1876 }
1877
1878 self.bound_vao = 0;
1879 self.gl.bind_vertex_array(0);
1880
1881 self.bound_read_fbo = (self.default_read_fbo, DeviceIntPoint::zero());
1882 self.gl.bind_framebuffer(gl::READ_FRAMEBUFFER, self.default_read_fbo.0);
1883
1884 self.bound_draw_fbo = self.default_draw_fbo;
1885 self.gl.bind_framebuffer(gl::DRAW_FRAMEBUFFER, self.bound_draw_fbo.0);
1886 }
1887
1888 #[cfg(debug_assertions)]
1889 fn print_shader_errors(source: &str, log: &str) {
1890 if !log.starts_with("0:") && !log.starts_with("0(") {
1892 return;
1893 }
1894 let end_pos = match log[2..].chars().position(|c| !c.is_digit(10)) {
1895 Some(pos) => 2 + pos,
1896 None => return,
1897 };
1898 let base_line_number = match log[2 .. end_pos].parse::<usize>() {
1899 Ok(number) if number >= 2 => number - 2,
1900 _ => return,
1901 };
1902 for (line, prefix) in source.lines().skip(base_line_number).zip(&["|",">","|"]) {
1903 error!("{}\t{}", prefix, line);
1904 }
1905 }
1906
1907 pub fn compile_shader(
1908 &self,
1909 name: &str,
1910 shader_type: GLenum,
1911 source: &String,
1912 ) -> Result<GLuint, ShaderError> {
1913 debug!("compile {}", name);
1914 let id = self.gl.create_shader(shader_type);
1915
1916 let mut new_source = Cow::from(source.as_str());
1917 if self.requires_null_terminated_shader_source {
1920 new_source.to_mut().push('\0');
1921 }
1922
1923 self.gl.shader_source(id, &[new_source.as_bytes()]);
1924 self.gl.compile_shader(id);
1925 let log = self.gl.get_shader_info_log(id);
1926 let mut status = [0];
1927 unsafe {
1928 self.gl.get_shader_iv(id, gl::COMPILE_STATUS, &mut status);
1929 }
1930 if status[0] == 0 {
1931 let type_str = match shader_type {
1932 gl::VERTEX_SHADER => "vertex",
1933 gl::FRAGMENT_SHADER => "fragment",
1934 _ => panic!("Unexpected shader type {:x}", shader_type),
1935 };
1936 error!("Failed to compile {} shader: {}\n{}", type_str, name, log);
1937 #[cfg(debug_assertions)]
1938 Self::print_shader_errors(source, &log);
1939 Err(ShaderError::Compilation(name.to_string(), log))
1940 } else {
1941 if !log.is_empty() {
1942 warn!("Warnings detected on shader: {}\n{}", name, log);
1943 }
1944 Ok(id)
1945 }
1946 }
1947
1948 pub fn begin_frame(&mut self) -> GpuFrameId {
1949 debug_assert!(!self.inside_frame);
1950 self.inside_frame = true;
1951 #[cfg(debug_assertions)]
1952 {
1953 self.shader_is_ready = false;
1954 }
1955
1956 let mut default_read_fbo = [0];
1958 unsafe {
1959 self.gl.get_integer_v(gl::READ_FRAMEBUFFER_BINDING, &mut default_read_fbo);
1960 }
1961 self.default_read_fbo = FBOId(default_read_fbo[0] as GLuint);
1962 let mut default_draw_fbo = [0];
1963 unsafe {
1964 self.gl.get_integer_v(gl::DRAW_FRAMEBUFFER_BINDING, &mut default_draw_fbo);
1965 }
1966 self.default_draw_fbo = FBOId(default_draw_fbo[0] as GLuint);
1967
1968 self.bound_program = 0;
1970 self.program_mode_id = UniformLocation::INVALID;
1971 self.gl.use_program(0);
1972
1973 self.reset_state();
1975
1976 self.gl.pixel_store_i(gl::UNPACK_ALIGNMENT, 1);
1978 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
1979
1980 self.gl.active_texture(gl::TEXTURE0);
1982
1983 self.frame_id
1984 }
1985
1986 fn bind_texture_impl(
1987 &mut self, slot: TextureSlot, id: GLuint, target: GLenum, set_swizzle: Option<Swizzle>
1988 ) {
1989 debug_assert!(self.inside_frame);
1990
1991 if self.bound_textures[slot.0] != id || set_swizzle.is_some() {
1992 self.gl.active_texture(gl::TEXTURE0 + slot.0 as GLuint);
1993 if target == gl::TEXTURE_2D && self.requires_texture_external_unbind {
1996 self.gl.bind_texture(gl::TEXTURE_EXTERNAL_OES, 0);
1997 }
1998 self.gl.bind_texture(target, id);
1999 if let Some(swizzle) = set_swizzle {
2000 if self.capabilities.supports_texture_swizzle {
2001 let components = match swizzle {
2002 Swizzle::Rgba => [gl::RED, gl::GREEN, gl::BLUE, gl::ALPHA],
2003 Swizzle::Bgra => [gl::BLUE, gl::GREEN, gl::RED, gl::ALPHA],
2004 };
2005 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_R, components[0] as i32);
2006 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_G, components[1] as i32);
2007 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_B, components[2] as i32);
2008 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_A, components[3] as i32);
2009 } else {
2010 debug_assert_eq!(swizzle, Swizzle::default());
2011 }
2012 }
2013 self.gl.active_texture(gl::TEXTURE0);
2014 self.bound_textures[slot.0] = id;
2015 }
2016 }
2017
2018 pub fn bind_texture<S>(&mut self, slot: S, texture: &Texture, swizzle: Swizzle)
2019 where
2020 S: Into<TextureSlot>,
2021 {
2022 let old_swizzle = texture.active_swizzle.replace(swizzle);
2023 let set_swizzle = if old_swizzle != swizzle {
2024 Some(swizzle)
2025 } else {
2026 None
2027 };
2028 self.bind_texture_impl(slot.into(), texture.id, texture.target, set_swizzle);
2029 }
2030
2031 pub fn bind_external_texture<S>(&mut self, slot: S, external_texture: &ExternalTexture)
2032 where
2033 S: Into<TextureSlot>,
2034 {
2035 self.bind_texture_impl(slot.into(), external_texture.id, external_texture.target, None);
2036 }
2037
2038 pub fn bind_read_target_impl(
2039 &mut self,
2040 fbo_id: FBOId,
2041 offset: DeviceIntPoint,
2042 ) {
2043 debug_assert!(self.inside_frame);
2044
2045 if self.bound_read_fbo != (fbo_id, offset) {
2046 fbo_id.bind(self.gl(), FBOTarget::Read);
2047 }
2048
2049 self.bound_read_fbo = (fbo_id, offset);
2050 }
2051
2052 pub fn bind_read_target(&mut self, target: ReadTarget) {
2053 let fbo_id = match target {
2054 ReadTarget::Default => self.default_read_fbo,
2055 ReadTarget::Texture { fbo_id } => fbo_id,
2056 ReadTarget::External { fbo } => fbo,
2057 ReadTarget::NativeSurface { fbo_id, .. } => fbo_id,
2058 };
2059
2060 self.bind_read_target_impl(fbo_id, target.offset())
2061 }
2062
2063 fn bind_draw_target_impl(&mut self, fbo_id: FBOId) {
2064 debug_assert!(self.inside_frame);
2065
2066 if self.bound_draw_fbo != fbo_id {
2067 self.bound_draw_fbo = fbo_id;
2068 fbo_id.bind(self.gl(), FBOTarget::Draw);
2069 }
2070 }
2071
2072 pub fn reset_read_target(&mut self) {
2073 let fbo = self.default_read_fbo;
2074 self.bind_read_target_impl(fbo, DeviceIntPoint::zero());
2075 }
2076
2077
2078 pub fn reset_draw_target(&mut self) {
2079 let fbo = self.default_draw_fbo;
2080 self.bind_draw_target_impl(fbo);
2081 self.depth_available = true;
2082 }
2083
2084 pub fn bind_draw_target(
2085 &mut self,
2086 target: DrawTarget,
2087 ) {
2088 let (fbo_id, rect, depth_available) = match target {
2089 DrawTarget::Default { rect, .. } => {
2090 (self.default_draw_fbo, rect, false)
2091 }
2092 DrawTarget::Texture { dimensions, fbo_id, with_depth, .. } => {
2093 let rect = FramebufferIntRect::from_size(
2094 device_size_as_framebuffer_size(dimensions),
2095 );
2096 (fbo_id, rect, with_depth)
2097 },
2098 DrawTarget::External { fbo, size } => {
2099 (fbo, size.into(), false)
2100 }
2101 DrawTarget::NativeSurface { external_fbo_id, offset, dimensions, .. } => {
2102 (
2103 FBOId(external_fbo_id),
2104 device_rect_as_framebuffer_rect(&DeviceIntRect::from_origin_and_size(offset, dimensions)),
2105 true
2106 )
2107 }
2108 };
2109
2110 self.depth_available = depth_available;
2111 self.bind_draw_target_impl(fbo_id);
2112 self.gl.viewport(
2113 rect.min.x,
2114 rect.min.y,
2115 rect.width(),
2116 rect.height(),
2117 );
2118 }
2119
2120 pub fn create_fbo(&mut self) -> FBOId {
2123 FBOId(self.gl.gen_framebuffers(1)[0])
2124 }
2125
2126 pub fn create_fbo_for_external_texture(&mut self, texture_id: u32) -> FBOId {
2128 let fbo = self.create_fbo();
2129 fbo.bind(self.gl(), FBOTarget::Draw);
2130 self.gl.framebuffer_texture_2d(
2131 gl::DRAW_FRAMEBUFFER,
2132 gl::COLOR_ATTACHMENT0,
2133 gl::TEXTURE_2D,
2134 texture_id,
2135 0,
2136 );
2137 debug_assert_eq!(
2138 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
2139 gl::FRAMEBUFFER_COMPLETE,
2140 "Incomplete framebuffer",
2141 );
2142 self.bound_draw_fbo.bind(self.gl(), FBOTarget::Draw);
2143 fbo
2144 }
2145
2146 pub fn delete_fbo(&mut self, fbo: FBOId) {
2147 self.gl.delete_framebuffers(&[fbo.0]);
2148 }
2149
2150 pub fn bind_external_draw_target(&mut self, fbo_id: FBOId) {
2151 debug_assert!(self.inside_frame);
2152
2153 if self.bound_draw_fbo != fbo_id {
2154 self.bound_draw_fbo = fbo_id;
2155 fbo_id.bind(self.gl(), FBOTarget::Draw);
2156 }
2157 }
2158
2159 pub fn link_program(
2171 &mut self,
2172 program: &mut Program,
2173 descriptor: &VertexDescriptor,
2174 ) -> Result<(), ShaderError> {
2175 let _guard = CrashAnnotatorGuard::new(
2176 &self.crash_annotator,
2177 CrashAnnotation::CompileShader,
2178 &program.source_info.full_name_cstr
2179 );
2180
2181 assert!(!program.is_initialized());
2182 let mut build_program = true;
2183 let info = &program.source_info;
2184
2185 if let Some(ref cached_programs) = self.cached_programs {
2187 if cached_programs.entries.borrow().get(&program.source_info.digest).is_none() {
2189 if let Some(ref handler) = cached_programs.program_cache_handler {
2190 handler.try_load_shader_from_disk(&program.source_info.digest, cached_programs);
2191 if let Some(entry) = cached_programs.entries.borrow().get(&program.source_info.digest) {
2192 self.gl.program_binary(program.id, entry.binary.format, &entry.binary.bytes);
2193 }
2194 }
2195 }
2196
2197 if let Some(entry) = cached_programs.entries.borrow_mut().get_mut(&info.digest) {
2198 let mut link_status = [0];
2199 unsafe {
2200 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
2201 }
2202 if link_status[0] == 0 {
2203 let error_log = self.gl.get_program_info_log(program.id);
2204 error!(
2205 "Failed to load a program object with a program binary: {} renderer {}\n{}",
2206 &info.base_filename,
2207 self.capabilities.renderer_name,
2208 error_log
2209 );
2210 if let Some(ref program_cache_handler) = cached_programs.program_cache_handler {
2211 program_cache_handler.notify_program_binary_failed(&entry.binary);
2212 }
2213 } else {
2214 entry.linked = true;
2215 build_program = false;
2216 }
2217 }
2218 }
2219
2220 if build_program {
2222 let vs_source = info.compute_source(self, ShaderKind::Vertex);
2224 let vs_id = match self.compile_shader(&info.full_name(), gl::VERTEX_SHADER, &vs_source) {
2225 Ok(vs_id) => vs_id,
2226 Err(err) => return Err(err),
2227 };
2228
2229 let fs_source = info.compute_source(self, ShaderKind::Fragment);
2231 let fs_id =
2232 match self.compile_shader(&info.full_name(), gl::FRAGMENT_SHADER, &fs_source) {
2233 Ok(fs_id) => fs_id,
2234 Err(err) => {
2235 self.gl.delete_shader(vs_id);
2236 return Err(err);
2237 }
2238 };
2239
2240 if Some(info.base_filename) == self.dump_shader_source.as_ref().map(String::as_ref) {
2242 let path = std::path::Path::new(info.base_filename);
2243 std::fs::write(path.with_extension("vert"), vs_source).unwrap();
2244 std::fs::write(path.with_extension("frag"), fs_source).unwrap();
2245 }
2246
2247 self.gl.attach_shader(program.id, vs_id);
2249 self.gl.attach_shader(program.id, fs_id);
2250
2251 for (i, attr) in descriptor
2253 .vertex_attributes
2254 .iter()
2255 .chain(descriptor.instance_attributes.iter())
2256 .enumerate()
2257 {
2258 self.gl
2259 .bind_attrib_location(program.id, i as GLuint, attr.name);
2260 }
2261
2262 if self.cached_programs.is_some() {
2263 self.gl.program_parameter_i(program.id, gl::PROGRAM_BINARY_RETRIEVABLE_HINT, gl::TRUE as GLint);
2264 }
2265
2266 self.gl.link_program(program.id);
2268
2269 if cfg!(debug_assertions) {
2270 for (i, attr) in descriptor
2272 .vertex_attributes
2273 .iter()
2274 .chain(descriptor.instance_attributes.iter())
2275 .enumerate()
2276 {
2277 let location = self.gl.get_attrib_location(program.id, attr.name);
2280 if location != i as GLint {
2281 warn!("Attribute {:?} is not found in the shader {}. Expected at {}, found at {}",
2282 attr, program.source_info.base_filename, i, location);
2283 }
2284 }
2285 }
2286
2287 self.gl.detach_shader(program.id, vs_id);
2291 self.gl.detach_shader(program.id, fs_id);
2292 self.gl.delete_shader(vs_id);
2293 self.gl.delete_shader(fs_id);
2294
2295 let mut link_status = [0];
2296 unsafe {
2297 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
2298 }
2299 if link_status[0] == 0 {
2300 let error_log = self.gl.get_program_info_log(program.id);
2301 error!(
2302 "Failed to link shader program: {}\n{}",
2303 &info.base_filename,
2304 error_log
2305 );
2306 self.gl.delete_program(program.id);
2307 return Err(ShaderError::Link(info.base_filename.to_owned(), error_log));
2308 }
2309
2310 if let Some(ref cached_programs) = self.cached_programs {
2311 if !cached_programs.entries.borrow().contains_key(&info.digest) {
2312 let (buffer, format) = self.gl.get_program_binary(program.id);
2313 if buffer.len() > 0 {
2314 let binary = Arc::new(ProgramBinary::new(buffer, format, info.digest.clone()));
2315 cached_programs.add_new_program_binary(binary);
2316 }
2317 }
2318 }
2319 }
2320
2321 program.is_initialized = true;
2323 program.u_transform = self.gl.get_uniform_location(program.id, "uTransform");
2324 program.u_mode = self.gl.get_uniform_location(program.id, "uMode");
2325 program.u_texture_size = self.gl.get_uniform_location(program.id, "uTextureSize");
2326
2327 Ok(())
2328 }
2329
2330 pub fn bind_program(&mut self, program: &Program) -> bool {
2331 debug_assert!(self.inside_frame);
2332 debug_assert!(program.is_initialized());
2333 if !program.is_initialized() {
2334 return false;
2335 }
2336 #[cfg(debug_assertions)]
2337 {
2338 self.shader_is_ready = true;
2339 }
2340
2341 if self.bound_program != program.id {
2342 self.gl.use_program(program.id);
2343 self.bound_program = program.id;
2344 self.bound_program_name = program.source_info.full_name_cstr.clone();
2345 self.program_mode_id = UniformLocation(program.u_mode);
2346 }
2347 true
2348 }
2349
2350 pub fn create_texture(
2351 &mut self,
2352 target: ImageBufferKind,
2353 format: ImageFormat,
2354 mut width: i32,
2355 mut height: i32,
2356 filter: TextureFilter,
2357 render_target: Option<RenderTargetInfo>,
2358 ) -> Texture {
2359 debug_assert!(self.inside_frame);
2360
2361 if width > self.max_texture_size || height > self.max_texture_size {
2362 error!("Attempting to allocate a texture of size {}x{} above the limit, trimming", width, height);
2363 width = width.min(self.max_texture_size);
2364 height = height.min(self.max_texture_size);
2365 }
2366
2367 let mut texture = Texture {
2369 id: self.gl.gen_textures(1)[0],
2370 target: get_gl_target(target),
2371 size: DeviceIntSize::new(width, height),
2372 format,
2373 filter,
2374 active_swizzle: Cell::default(),
2375 fbo: None,
2376 fbo_with_depth: None,
2377 last_frame_used: self.frame_id,
2378 flags: TextureFlags::default(),
2379 };
2380 self.bind_texture(DEFAULT_TEXTURE, &texture, Swizzle::default());
2381 self.set_texture_parameters(texture.target, filter);
2382
2383 if self.capabilities.supports_texture_usage && render_target.is_some() {
2384 self.gl.tex_parameter_i(texture.target, gl::TEXTURE_USAGE_ANGLE, gl::FRAMEBUFFER_ATTACHMENT_ANGLE as GLint);
2385 }
2386
2387 let desc = self.gl_describe_format(texture.format);
2389
2390 let mipmap_levels = if texture.filter == TextureFilter::Trilinear {
2394 let max_dimension = cmp::max(width, height);
2395 ((max_dimension) as f64).log2() as GLint + 1
2396 } else {
2397 1
2398 };
2399
2400 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2402
2403 let use_texture_storage = match self.texture_storage_usage {
2407 TexStorageUsage::Always => true,
2408 TexStorageUsage::NonBGRA8 => texture.format != ImageFormat::BGRA8,
2409 TexStorageUsage::Never => false,
2410 };
2411 if use_texture_storage {
2412 self.gl.tex_storage_2d(
2413 texture.target,
2414 mipmap_levels,
2415 desc.internal,
2416 texture.size.width as GLint,
2417 texture.size.height as GLint,
2418 );
2419 } else {
2420 self.gl.tex_image_2d(
2421 texture.target,
2422 0,
2423 desc.internal as GLint,
2424 texture.size.width as GLint,
2425 texture.size.height as GLint,
2426 0,
2427 desc.external,
2428 desc.pixel_type,
2429 None,
2430 );
2431 }
2432
2433 if let Some(rt_info) = render_target {
2435 self.init_fbos(&mut texture, false);
2436 if rt_info.has_depth {
2437 self.init_fbos(&mut texture, true);
2438 }
2439 }
2440
2441 texture
2442 }
2443
2444 fn set_texture_parameters(&mut self, target: GLuint, filter: TextureFilter) {
2445 let mag_filter = match filter {
2446 TextureFilter::Nearest => gl::NEAREST,
2447 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2448 };
2449
2450 let min_filter = match filter {
2451 TextureFilter::Nearest => gl::NEAREST,
2452 TextureFilter::Linear => gl::LINEAR,
2453 TextureFilter::Trilinear => gl::LINEAR_MIPMAP_LINEAR,
2454 };
2455
2456 self.gl
2457 .tex_parameter_i(target, gl::TEXTURE_MAG_FILTER, mag_filter as GLint);
2458 self.gl
2459 .tex_parameter_i(target, gl::TEXTURE_MIN_FILTER, min_filter as GLint);
2460
2461 self.gl
2462 .tex_parameter_i(target, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as GLint);
2463 self.gl
2464 .tex_parameter_i(target, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as GLint);
2465 }
2466
2467 pub fn copy_entire_texture(
2471 &mut self,
2472 dst: &mut Texture,
2473 src: &Texture,
2474 ) {
2475 debug_assert!(self.inside_frame);
2476 debug_assert!(dst.size.width >= src.size.width);
2477 debug_assert!(dst.size.height >= src.size.height);
2478
2479 self.copy_texture_sub_region(
2480 src,
2481 0,
2482 0,
2483 dst,
2484 0,
2485 0,
2486 src.size.width as _,
2487 src.size.height as _,
2488 );
2489 }
2490
2491 pub fn copy_texture_sub_region(
2493 &mut self,
2494 src_texture: &Texture,
2495 src_x: usize,
2496 src_y: usize,
2497 dest_texture: &Texture,
2498 dest_x: usize,
2499 dest_y: usize,
2500 width: usize,
2501 height: usize,
2502 ) {
2503 if self.capabilities.supports_copy_image_sub_data {
2504 assert_ne!(
2505 src_texture.id, dest_texture.id,
2506 "glCopyImageSubData's behaviour is undefined if src and dst images are identical and the rectangles overlap."
2507 );
2508 unsafe {
2509 self.gl.copy_image_sub_data(
2510 src_texture.id,
2511 src_texture.target,
2512 0,
2513 src_x as _,
2514 src_y as _,
2515 0,
2516 dest_texture.id,
2517 dest_texture.target,
2518 0,
2519 dest_x as _,
2520 dest_y as _,
2521 0,
2522 width as _,
2523 height as _,
2524 1,
2525 );
2526 }
2527 } else {
2528 let src_offset = FramebufferIntPoint::new(src_x as i32, src_y as i32);
2529 let dest_offset = FramebufferIntPoint::new(dest_x as i32, dest_y as i32);
2530 let size = FramebufferIntSize::new(width as i32, height as i32);
2531
2532 self.blit_render_target(
2533 ReadTarget::from_texture(src_texture),
2534 FramebufferIntRect::from_origin_and_size(src_offset, size),
2535 DrawTarget::from_texture(dest_texture, false),
2536 FramebufferIntRect::from_origin_and_size(dest_offset, size),
2537 TextureFilter::Nearest,
2541 );
2542 }
2543 }
2544
2545 pub fn invalidate_render_target(&mut self, texture: &Texture) {
2548 let (fbo, attachments) = if texture.supports_depth() {
2549 (&texture.fbo_with_depth,
2550 &[gl::COLOR_ATTACHMENT0, gl::DEPTH_ATTACHMENT] as &[GLenum])
2551 } else {
2552 (&texture.fbo, &[gl::COLOR_ATTACHMENT0] as &[GLenum])
2553 };
2554
2555 if let Some(fbo_id) = fbo {
2556 let original_bound_fbo = self.bound_draw_fbo;
2557 self.bind_external_draw_target(*fbo_id);
2561 self.gl.invalidate_framebuffer(gl::FRAMEBUFFER, attachments);
2562 self.bind_external_draw_target(original_bound_fbo);
2563 }
2564 }
2565
2566 pub fn invalidate_depth_target(&mut self) {
2572 assert!(self.depth_available);
2573 let attachments = if self.bound_draw_fbo == self.default_draw_fbo {
2574 &[gl::DEPTH] as &[GLenum]
2575 } else {
2576 &[gl::DEPTH_ATTACHMENT] as &[GLenum]
2577 };
2578 self.gl.invalidate_framebuffer(gl::DRAW_FRAMEBUFFER, attachments);
2579 }
2580
2581 pub fn reuse_render_target<T: Texel>(
2585 &mut self,
2586 texture: &mut Texture,
2587 rt_info: RenderTargetInfo,
2588 ) {
2589 texture.last_frame_used = self.frame_id;
2590
2591 if rt_info.has_depth && !texture.supports_depth() {
2593 self.init_fbos(texture, true);
2594 }
2595 }
2596
2597 fn init_fbos(&mut self, texture: &mut Texture, with_depth: bool) {
2598 let (fbo, depth_rb) = if with_depth {
2599 let depth_target = self.acquire_depth_target(texture.get_dimensions());
2600 (&mut texture.fbo_with_depth, Some(depth_target))
2601 } else {
2602 (&mut texture.fbo, None)
2603 };
2604
2605 assert!(fbo.is_none());
2607 let fbo_id = FBOId(*self.gl.gen_framebuffers(1).first().unwrap());
2608 *fbo = Some(fbo_id);
2609
2610 let original_bound_fbo = self.bound_draw_fbo;
2612
2613 self.bind_external_draw_target(fbo_id);
2614
2615 self.gl.framebuffer_texture_2d(
2616 gl::DRAW_FRAMEBUFFER,
2617 gl::COLOR_ATTACHMENT0,
2618 texture.target,
2619 texture.id,
2620 0,
2621 );
2622
2623 if let Some(depth_rb) = depth_rb {
2624 self.gl.framebuffer_renderbuffer(
2625 gl::DRAW_FRAMEBUFFER,
2626 gl::DEPTH_ATTACHMENT,
2627 gl::RENDERBUFFER,
2628 depth_rb.0,
2629 );
2630 }
2631
2632 debug_assert_eq!(
2633 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
2634 gl::FRAMEBUFFER_COMPLETE,
2635 "Incomplete framebuffer",
2636 );
2637
2638 self.bind_external_draw_target(original_bound_fbo);
2639 }
2640
2641 fn acquire_depth_target(&mut self, dimensions: DeviceIntSize) -> RBOId {
2642 let gl = &self.gl;
2643 let depth_format = self.depth_format;
2644 let target = self.depth_targets.entry(dimensions).or_insert_with(|| {
2645 let renderbuffer_ids = gl.gen_renderbuffers(1);
2646 let depth_rb = renderbuffer_ids[0];
2647 gl.bind_renderbuffer(gl::RENDERBUFFER, depth_rb);
2648 gl.renderbuffer_storage(
2649 gl::RENDERBUFFER,
2650 depth_format,
2651 dimensions.width as _,
2652 dimensions.height as _,
2653 );
2654 SharedDepthTarget {
2655 rbo_id: RBOId(depth_rb),
2656 refcount: 0,
2657 }
2658 });
2659 target.refcount += 1;
2660 target.rbo_id
2661 }
2662
2663 fn release_depth_target(&mut self, dimensions: DeviceIntSize) {
2664 let mut entry = match self.depth_targets.entry(dimensions) {
2665 Entry::Occupied(x) => x,
2666 Entry::Vacant(..) => panic!("Releasing unknown depth target"),
2667 };
2668 debug_assert!(entry.get().refcount != 0);
2669 entry.get_mut().refcount -= 1;
2670 if entry.get().refcount == 0 {
2671 let (_, target) = entry.remove_entry();
2672 self.gl.delete_renderbuffers(&[target.rbo_id.0]);
2673 }
2674 }
2675
2676 fn blit_render_target_impl(
2678 &mut self,
2679 src_rect: FramebufferIntRect,
2680 dest_rect: FramebufferIntRect,
2681 filter: TextureFilter,
2682 ) {
2683 debug_assert!(self.inside_frame);
2684
2685 let filter = match filter {
2686 TextureFilter::Nearest => gl::NEAREST,
2687 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2688 };
2689
2690 let src_x0 = src_rect.min.x + self.bound_read_fbo.1.x;
2691 let src_y0 = src_rect.min.y + self.bound_read_fbo.1.y;
2692
2693 self.gl.blit_framebuffer(
2694 src_x0,
2695 src_y0,
2696 src_x0 + src_rect.width(),
2697 src_y0 + src_rect.height(),
2698 dest_rect.min.x,
2699 dest_rect.min.y,
2700 dest_rect.max.x,
2701 dest_rect.max.y,
2702 gl::COLOR_BUFFER_BIT,
2703 filter,
2704 );
2705 }
2706
2707 pub fn blit_render_target(
2710 &mut self,
2711 src_target: ReadTarget,
2712 src_rect: FramebufferIntRect,
2713 dest_target: DrawTarget,
2714 dest_rect: FramebufferIntRect,
2715 filter: TextureFilter,
2716 ) {
2717 debug_assert!(self.inside_frame);
2718
2719 self.bind_read_target(src_target);
2720
2721 self.bind_draw_target(dest_target);
2722
2723 self.blit_render_target_impl(src_rect, dest_rect, filter);
2724 }
2725
2726 pub fn blit_render_target_invert_y(
2730 &mut self,
2731 src_target: ReadTarget,
2732 src_rect: FramebufferIntRect,
2733 dest_target: DrawTarget,
2734 dest_rect: FramebufferIntRect,
2735 ) {
2736 debug_assert!(self.inside_frame);
2737
2738 let mut inverted_dest_rect = dest_rect;
2739 inverted_dest_rect.min.y = dest_rect.max.y;
2740 inverted_dest_rect.max.y = dest_rect.min.y;
2741
2742 self.blit_render_target(
2743 src_target,
2744 src_rect,
2745 dest_target,
2746 inverted_dest_rect,
2747 TextureFilter::Linear,
2748 );
2749 }
2750
2751 pub fn delete_texture(&mut self, mut texture: Texture) {
2752 debug_assert!(self.inside_frame);
2753 let had_depth = texture.supports_depth();
2754 if let Some(fbo) = texture.fbo {
2755 self.gl.delete_framebuffers(&[fbo.0]);
2756 texture.fbo = None;
2757 }
2758 if let Some(fbo) = texture.fbo_with_depth {
2759 self.gl.delete_framebuffers(&[fbo.0]);
2760 texture.fbo_with_depth = None;
2761 }
2762
2763 if had_depth {
2764 self.release_depth_target(texture.get_dimensions());
2765 }
2766
2767 self.gl.delete_textures(&[texture.id]);
2768
2769 for bound_texture in &mut self.bound_textures {
2770 if *bound_texture == texture.id {
2771 *bound_texture = 0;
2772 }
2773 }
2774
2775 texture.id = 0;
2777 }
2778
2779 #[cfg(feature = "replay")]
2780 pub fn delete_external_texture(&mut self, mut external: ExternalTexture) {
2781 self.gl.delete_textures(&[external.id]);
2782 external.id = 0;
2783 }
2784
2785 pub fn delete_program(&mut self, mut program: Program) {
2786 self.gl.delete_program(program.id);
2787 program.id = 0;
2788 }
2789
2790 pub fn create_program_linked(
2792 &mut self,
2793 base_filename: &'static str,
2794 features: &[&'static str],
2795 descriptor: &VertexDescriptor,
2796 ) -> Result<Program, ShaderError> {
2797 let mut program = self.create_program(base_filename, features)?;
2798 self.link_program(&mut program, descriptor)?;
2799 Ok(program)
2800 }
2801
2802 pub fn create_program(
2808 &mut self,
2809 base_filename: &'static str,
2810 features: &[&'static str],
2811 ) -> Result<Program, ShaderError> {
2812 debug_assert!(self.inside_frame);
2813
2814 let source_info = ProgramSourceInfo::new(self, base_filename, features);
2815
2816 let pid = self.gl.create_program();
2818
2819 if let Some(ref cached_programs) = self.cached_programs {
2821 if let Some(entry) = cached_programs.entries.borrow().get(&source_info.digest) {
2822 self.gl.program_binary(pid, entry.binary.format, &entry.binary.bytes);
2823 }
2824 }
2825
2826 let program = Program {
2828 id: pid,
2829 u_transform: 0,
2830 u_mode: 0,
2831 u_texture_size: 0,
2832 source_info,
2833 is_initialized: false,
2834 };
2835
2836 Ok(program)
2837 }
2838
2839 fn build_shader_string<F: FnMut(&str)>(
2840 &self,
2841 features: &[&'static str],
2842 kind: ShaderKind,
2843 base_filename: &str,
2844 output: F,
2845 ) {
2846 do_build_shader_string(
2847 get_shader_version(&*self.gl),
2848 features,
2849 kind,
2850 base_filename,
2851 &|f| get_unoptimized_shader_source(f, self.resource_override_path.as_ref()),
2852 output,
2853 )
2854 }
2855
2856 pub fn bind_shader_samplers<S>(&mut self, program: &Program, bindings: &[(&'static str, S)])
2857 where
2858 S: Into<TextureSlot> + Copy,
2859 {
2860 assert_eq!(self.bound_program, program.id);
2862
2863 for binding in bindings {
2864 let u_location = self.gl.get_uniform_location(program.id, binding.0);
2865 if u_location != -1 {
2866 self.bind_program(program);
2867 self.gl
2868 .uniform_1i(u_location, binding.1.into().0 as GLint);
2869 }
2870 }
2871 }
2872
2873 pub fn get_uniform_location(&self, program: &Program, name: &str) -> UniformLocation {
2874 UniformLocation(self.gl.get_uniform_location(program.id, name))
2875 }
2876
2877 pub fn set_uniforms(
2878 &self,
2879 program: &Program,
2880 transform: &Transform3D<f32>,
2881 ) {
2882 debug_assert!(self.inside_frame);
2883 #[cfg(debug_assertions)]
2884 debug_assert!(self.shader_is_ready);
2885
2886 self.gl
2887 .uniform_matrix_4fv(program.u_transform, false, &transform.to_array());
2888 }
2889
2890 pub fn switch_mode(&self, mode: i32) {
2891 debug_assert!(self.inside_frame);
2892 #[cfg(debug_assertions)]
2893 debug_assert!(self.shader_is_ready);
2894
2895 self.gl.uniform_1i(self.program_mode_id.0, mode);
2896 }
2897
2898 pub fn set_shader_texture_size(
2901 &self,
2902 program: &Program,
2903 texture_size: DeviceSize,
2904 ) {
2905 debug_assert!(self.inside_frame);
2906 #[cfg(debug_assertions)]
2907 debug_assert!(self.shader_is_ready);
2908
2909 if program.u_texture_size != -1 {
2910 self.gl.uniform_2f(program.u_texture_size, texture_size.width, texture_size.height);
2911 }
2912 }
2913
2914 pub fn create_pbo(&mut self) -> PBO {
2915 let id = self.gl.gen_buffers(1)[0];
2916 PBO {
2917 id,
2918 reserved_size: 0,
2919 }
2920 }
2921
2922 pub fn create_pbo_with_size(&mut self, size: usize) -> PBO {
2923 let mut pbo = self.create_pbo();
2924
2925 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
2926 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
2927 self.gl.buffer_data_untyped(
2928 gl::PIXEL_PACK_BUFFER,
2929 size as _,
2930 ptr::null(),
2931 gl::STREAM_READ,
2932 );
2933 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2934
2935 pbo.reserved_size = size;
2936 pbo
2937 }
2938
2939 pub fn read_pixels_into_pbo(
2940 &mut self,
2941 read_target: ReadTarget,
2942 rect: DeviceIntRect,
2943 format: ImageFormat,
2944 pbo: &PBO,
2945 ) {
2946 let byte_size = rect.area() as usize * format.bytes_per_pixel() as usize;
2947
2948 assert!(byte_size <= pbo.reserved_size);
2949
2950 self.bind_read_target(read_target);
2951
2952 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
2953 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
2954
2955 let gl_format = self.gl_describe_format(format);
2956
2957 unsafe {
2958 self.gl.read_pixels_into_pbo(
2959 rect.min.x as _,
2960 rect.min.y as _,
2961 rect.width() as _,
2962 rect.height() as _,
2963 gl_format.read,
2964 gl_format.pixel_type,
2965 );
2966 }
2967
2968 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
2969 }
2970
2971 pub fn map_pbo_for_readback<'a>(&'a mut self, pbo: &'a PBO) -> Option<BoundPBO<'a>> {
2972 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
2973
2974 let buf_ptr = match self.gl.get_type() {
2975 GlType::Gl => {
2976 self.gl.map_buffer(gl::PIXEL_PACK_BUFFER, gl::READ_ONLY)
2977 }
2978
2979 GlType::GlEs => {
2980 self.gl.map_buffer_range(
2981 gl::PIXEL_PACK_BUFFER,
2982 0,
2983 pbo.reserved_size as _,
2984 gl::MAP_READ_BIT)
2985 }
2986 };
2987
2988 if buf_ptr.is_null() {
2989 return None;
2990 }
2991
2992 let buffer = unsafe { slice::from_raw_parts(buf_ptr as *const u8, pbo.reserved_size) };
2993
2994 Some(BoundPBO {
2995 device: self,
2996 data: buffer,
2997 })
2998 }
2999
3000 pub fn delete_pbo(&mut self, mut pbo: PBO) {
3001 self.gl.delete_buffers(&[pbo.id]);
3002 pbo.id = 0;
3003 pbo.reserved_size = 0
3004 }
3005
3006 pub fn required_upload_size_and_stride(&self, size: DeviceIntSize, format: ImageFormat) -> (usize, usize) {
3009 assert!(size.width >= 0);
3010 assert!(size.height >= 0);
3011
3012 let bytes_pp = format.bytes_per_pixel() as usize;
3013 let width_bytes = size.width as usize * bytes_pp;
3014
3015 let dst_stride = round_up_to_multiple(width_bytes, self.required_pbo_stride.num_bytes(format));
3016
3017 let dst_size = dst_stride * size.height as usize;
3023
3024 (dst_size, dst_stride)
3025 }
3026
3027 pub fn upload_texture<'a>(
3030 &mut self,
3031 pbo_pool: &'a mut UploadPBOPool,
3032 ) -> TextureUploader<'a> {
3033 debug_assert!(self.inside_frame);
3034
3035 pbo_pool.begin_frame(self);
3036
3037 TextureUploader {
3038 buffers: Vec::new(),
3039 pbo_pool,
3040 }
3041 }
3042
3043 pub fn upload_texture_immediate<T: Texel>(
3045 &mut self,
3046 texture: &Texture,
3047 pixels: &[T]
3048 ) {
3049 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
3050 let desc = self.gl_describe_format(texture.format);
3051 self.gl.tex_sub_image_2d(
3052 texture.target,
3053 0,
3054 0,
3055 0,
3056 texture.size.width as GLint,
3057 texture.size.height as GLint,
3058 desc.external,
3059 desc.pixel_type,
3060 texels_to_u8_slice(pixels),
3061 );
3062 }
3063
3064 pub fn read_pixels(&mut self, img_desc: &ImageDescriptor) -> Vec<u8> {
3065 let desc = self.gl_describe_format(img_desc.format);
3066 self.gl.read_pixels(
3067 0, 0,
3068 img_desc.size.width as i32,
3069 img_desc.size.height as i32,
3070 desc.read,
3071 desc.pixel_type,
3072 )
3073 }
3074
3075 pub fn read_pixels_into(
3077 &mut self,
3078 rect: FramebufferIntRect,
3079 format: ImageFormat,
3080 output: &mut [u8],
3081 ) {
3082 let bytes_per_pixel = format.bytes_per_pixel();
3083 let desc = self.gl_describe_format(format);
3084 let size_in_bytes = (bytes_per_pixel * rect.area()) as usize;
3085 assert_eq!(output.len(), size_in_bytes);
3086
3087 self.gl.flush();
3088 self.gl.read_pixels_into_buffer(
3089 rect.min.x as _,
3090 rect.min.y as _,
3091 rect.width() as _,
3092 rect.height() as _,
3093 desc.read,
3094 desc.pixel_type,
3095 output,
3096 );
3097 }
3098
3099 pub fn get_tex_image_into(
3101 &mut self,
3102 texture: &Texture,
3103 format: ImageFormat,
3104 output: &mut [u8],
3105 ) {
3106 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
3107 let desc = self.gl_describe_format(format);
3108 self.gl.get_tex_image_into_buffer(
3109 texture.target,
3110 0,
3111 desc.external,
3112 desc.pixel_type,
3113 output,
3114 );
3115 }
3116
3117 fn attach_read_texture_raw(&mut self, texture_id: GLuint, target: GLuint) {
3119 self.gl.framebuffer_texture_2d(
3120 gl::READ_FRAMEBUFFER,
3121 gl::COLOR_ATTACHMENT0,
3122 target,
3123 texture_id,
3124 0,
3125 )
3126 }
3127
3128 pub fn attach_read_texture_external(
3129 &mut self, texture_id: GLuint, target: ImageBufferKind
3130 ) {
3131 self.attach_read_texture_raw(texture_id, get_gl_target(target))
3132 }
3133
3134 pub fn attach_read_texture(&mut self, texture: &Texture) {
3135 self.attach_read_texture_raw(texture.id, texture.target)
3136 }
3137
3138 fn bind_vao_impl(&mut self, id: GLuint) {
3139 debug_assert!(self.inside_frame);
3140
3141 if self.bound_vao != id {
3142 self.bound_vao = id;
3143 self.gl.bind_vertex_array(id);
3144 }
3145 }
3146
3147 pub fn bind_vao(&mut self, vao: &VAO) {
3148 self.bind_vao_impl(vao.id)
3149 }
3150
3151 pub fn bind_custom_vao(&mut self, vao: &CustomVAO) {
3152 self.bind_vao_impl(vao.id)
3153 }
3154
3155 fn create_vao_with_vbos(
3156 &mut self,
3157 descriptor: &VertexDescriptor,
3158 main_vbo_id: VBOId,
3159 instance_vbo_id: VBOId,
3160 instance_divisor: u32,
3161 ibo_id: IBOId,
3162 owns_vertices_and_indices: bool,
3163 ) -> VAO {
3164 let instance_stride = descriptor.instance_stride() as usize;
3165 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3166
3167 self.bind_vao_impl(vao_id);
3168
3169 descriptor.bind(self.gl(), main_vbo_id, instance_vbo_id, instance_divisor);
3170 ibo_id.bind(self.gl()); VAO {
3173 id: vao_id,
3174 ibo_id,
3175 main_vbo_id,
3176 instance_vbo_id,
3177 instance_stride,
3178 instance_divisor,
3179 owns_vertices_and_indices,
3180 }
3181 }
3182
3183 pub fn create_custom_vao(
3184 &mut self,
3185 streams: &[Stream],
3186 ) -> CustomVAO {
3187 debug_assert!(self.inside_frame);
3188
3189 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3190 self.bind_vao_impl(vao_id);
3191
3192 let mut attrib_index = 0;
3193 for stream in streams {
3194 VertexDescriptor::bind_attributes(
3195 stream.attributes,
3196 attrib_index,
3197 0,
3198 self.gl(),
3199 stream.vbo,
3200 );
3201 attrib_index += stream.attributes.len();
3202 }
3203
3204 CustomVAO {
3205 id: vao_id,
3206 }
3207 }
3208
3209 pub fn delete_custom_vao(&mut self, mut vao: CustomVAO) {
3210 self.gl.delete_vertex_arrays(&[vao.id]);
3211 vao.id = 0;
3212 }
3213
3214 pub fn create_vbo<T>(&mut self) -> VBO<T> {
3215 let ids = self.gl.gen_buffers(1);
3216 VBO {
3217 id: ids[0],
3218 target: gl::ARRAY_BUFFER,
3219 allocated_count: 0,
3220 marker: PhantomData,
3221 }
3222 }
3223
3224 pub fn delete_vbo<T>(&mut self, mut vbo: VBO<T>) {
3225 self.gl.delete_buffers(&[vbo.id]);
3226 vbo.id = 0;
3227 }
3228
3229 pub fn create_vao(&mut self, descriptor: &VertexDescriptor, instance_divisor: u32) -> VAO {
3230 debug_assert!(self.inside_frame);
3231
3232 let buffer_ids = self.gl.gen_buffers(3);
3233 let ibo_id = IBOId(buffer_ids[0]);
3234 let main_vbo_id = VBOId(buffer_ids[1]);
3235 let intance_vbo_id = VBOId(buffer_ids[2]);
3236
3237 self.create_vao_with_vbos(descriptor, main_vbo_id, intance_vbo_id, instance_divisor, ibo_id, true)
3238 }
3239
3240 pub fn delete_vao(&mut self, mut vao: VAO) {
3241 self.gl.delete_vertex_arrays(&[vao.id]);
3242 vao.id = 0;
3243
3244 if vao.owns_vertices_and_indices {
3245 self.gl.delete_buffers(&[vao.ibo_id.0]);
3246 self.gl.delete_buffers(&[vao.main_vbo_id.0]);
3247 }
3248
3249 self.gl.delete_buffers(&[vao.instance_vbo_id.0])
3250 }
3251
3252 pub fn allocate_vbo<V>(
3253 &mut self,
3254 vbo: &mut VBO<V>,
3255 count: usize,
3256 usage_hint: VertexUsageHint,
3257 ) {
3258 debug_assert!(self.inside_frame);
3259 vbo.allocated_count = count;
3260
3261 self.gl.bind_buffer(vbo.target, vbo.id);
3262 self.gl.buffer_data_untyped(
3263 vbo.target,
3264 (count * mem::size_of::<V>()) as _,
3265 ptr::null(),
3266 usage_hint.to_gl(),
3267 );
3268 }
3269
3270 pub fn fill_vbo<V>(
3271 &mut self,
3272 vbo: &VBO<V>,
3273 data: &[V],
3274 offset: usize,
3275 ) {
3276 debug_assert!(self.inside_frame);
3277 assert!(offset + data.len() <= vbo.allocated_count);
3278 let stride = mem::size_of::<V>();
3279
3280 self.gl.bind_buffer(vbo.target, vbo.id);
3281 self.gl.buffer_sub_data_untyped(
3282 vbo.target,
3283 (offset * stride) as _,
3284 (data.len() * stride) as _,
3285 data.as_ptr() as _,
3286 );
3287 }
3288
3289 fn update_vbo_data<V>(
3290 &mut self,
3291 vbo: VBOId,
3292 vertices: &[V],
3293 usage_hint: VertexUsageHint,
3294 ) {
3295 debug_assert!(self.inside_frame);
3296
3297 vbo.bind(self.gl());
3298 self.gl.buffer_data_untyped(
3299 gl::ARRAY_BUFFER,
3300 (vertices.len() * mem::size_of::<V>()) as isize,
3301 vertices.as_ptr() as *const GLvoid,
3302 usage_hint.to_gl()
3303 );
3304 }
3305
3306 pub fn create_vao_with_new_instances(
3307 &mut self,
3308 descriptor: &VertexDescriptor,
3309 base_vao: &VAO,
3310 ) -> VAO {
3311 debug_assert!(self.inside_frame);
3312
3313 let buffer_ids = self.gl.gen_buffers(1);
3314 let intance_vbo_id = VBOId(buffer_ids[0]);
3315
3316 self.create_vao_with_vbos(
3317 descriptor,
3318 base_vao.main_vbo_id,
3319 intance_vbo_id,
3320 base_vao.instance_divisor,
3321 base_vao.ibo_id,
3322 false,
3323 )
3324 }
3325
3326 pub fn update_vao_main_vertices<V>(
3327 &mut self,
3328 vao: &VAO,
3329 vertices: &[V],
3330 usage_hint: VertexUsageHint,
3331 ) {
3332 debug_assert_eq!(self.bound_vao, vao.id);
3333 self.update_vbo_data(vao.main_vbo_id, vertices, usage_hint)
3334 }
3335
3336 pub fn update_vao_instances<V: Clone>(
3337 &mut self,
3338 vao: &VAO,
3339 instances: &[V],
3340 usage_hint: VertexUsageHint,
3341 repeat: Option<NonZeroUsize>,
3343 ) {
3344 debug_assert_eq!(self.bound_vao, vao.id);
3345 debug_assert_eq!(vao.instance_stride as usize, mem::size_of::<V>());
3346
3347 match repeat {
3348 Some(count) => {
3349 let target = gl::ARRAY_BUFFER;
3350 self.gl.bind_buffer(target, vao.instance_vbo_id.0);
3351 let size = instances.len() * count.get() * mem::size_of::<V>();
3352 self.gl.buffer_data_untyped(
3353 target,
3354 size as _,
3355 ptr::null(),
3356 usage_hint.to_gl(),
3357 );
3358
3359 let ptr = match self.gl.get_type() {
3360 GlType::Gl => {
3361 self.gl.map_buffer(target, gl::WRITE_ONLY)
3362 }
3363 GlType::GlEs => {
3364 self.gl.map_buffer_range(target, 0, size as _, gl::MAP_WRITE_BIT)
3365 }
3366 };
3367 assert!(!ptr.is_null());
3368
3369 let buffer_slice = unsafe {
3370 slice::from_raw_parts_mut(ptr as *mut V, instances.len() * count.get())
3371 };
3372 for (quad, instance) in buffer_slice.chunks_mut(4).zip(instances) {
3373 quad[0] = instance.clone();
3374 quad[1] = instance.clone();
3375 quad[2] = instance.clone();
3376 quad[3] = instance.clone();
3377 }
3378 self.gl.unmap_buffer(target);
3379 }
3380 None => {
3381 self.update_vbo_data(vao.instance_vbo_id, instances, usage_hint);
3382 }
3383 }
3384
3385 if self.capabilities.requires_vao_rebind_after_orphaning {
3389 self.bind_vao_impl(0);
3390 self.bind_vao_impl(vao.id);
3391 }
3392 }
3393
3394 pub fn update_vao_indices<I>(&mut self, vao: &VAO, indices: &[I], usage_hint: VertexUsageHint) {
3395 debug_assert!(self.inside_frame);
3396 debug_assert_eq!(self.bound_vao, vao.id);
3397
3398 vao.ibo_id.bind(self.gl());
3399 self.gl.buffer_data_untyped(
3400 gl::ELEMENT_ARRAY_BUFFER,
3401 (indices.len() * mem::size_of::<I>()) as isize,
3402 indices.as_ptr() as *const GLvoid,
3403 usage_hint.to_gl(),
3404 );
3405 }
3406
3407 pub fn draw_triangles_u16(&mut self, first_vertex: i32, index_count: i32) {
3408 debug_assert!(self.inside_frame);
3409 #[cfg(debug_assertions)]
3410 debug_assert!(self.shader_is_ready);
3411
3412 let _guard = if self.annotate_draw_call_crashes {
3413 Some(CrashAnnotatorGuard::new(
3414 &self.crash_annotator,
3415 CrashAnnotation::DrawShader,
3416 &self.bound_program_name,
3417 ))
3418 } else {
3419 None
3420 };
3421
3422 self.gl.draw_elements(
3423 gl::TRIANGLES,
3424 index_count,
3425 gl::UNSIGNED_SHORT,
3426 first_vertex as u32 * 2,
3427 );
3428 }
3429
3430 pub fn draw_triangles_u32(&mut self, first_vertex: i32, index_count: i32) {
3431 debug_assert!(self.inside_frame);
3432 #[cfg(debug_assertions)]
3433 debug_assert!(self.shader_is_ready);
3434
3435 let _guard = if self.annotate_draw_call_crashes {
3436 Some(CrashAnnotatorGuard::new(
3437 &self.crash_annotator,
3438 CrashAnnotation::DrawShader,
3439 &self.bound_program_name,
3440 ))
3441 } else {
3442 None
3443 };
3444
3445 self.gl.draw_elements(
3446 gl::TRIANGLES,
3447 index_count,
3448 gl::UNSIGNED_INT,
3449 first_vertex as u32 * 4,
3450 );
3451 }
3452
3453 pub fn draw_nonindexed_points(&mut self, first_vertex: i32, vertex_count: i32) {
3454 debug_assert!(self.inside_frame);
3455 #[cfg(debug_assertions)]
3456 debug_assert!(self.shader_is_ready);
3457
3458 let _guard = if self.annotate_draw_call_crashes {
3459 Some(CrashAnnotatorGuard::new(
3460 &self.crash_annotator,
3461 CrashAnnotation::DrawShader,
3462 &self.bound_program_name,
3463 ))
3464 } else {
3465 None
3466 };
3467
3468 self.gl.draw_arrays(gl::POINTS, first_vertex, vertex_count);
3469 }
3470
3471 pub fn draw_nonindexed_lines(&mut self, first_vertex: i32, vertex_count: i32) {
3472 debug_assert!(self.inside_frame);
3473 #[cfg(debug_assertions)]
3474 debug_assert!(self.shader_is_ready);
3475
3476 let _guard = if self.annotate_draw_call_crashes {
3477 Some(CrashAnnotatorGuard::new(
3478 &self.crash_annotator,
3479 CrashAnnotation::DrawShader,
3480 &self.bound_program_name,
3481 ))
3482 } else {
3483 None
3484 };
3485
3486 self.gl.draw_arrays(gl::LINES, first_vertex, vertex_count);
3487 }
3488
3489 pub fn draw_indexed_triangles(&mut self, index_count: i32) {
3490 debug_assert!(self.inside_frame);
3491 #[cfg(debug_assertions)]
3492 debug_assert!(self.shader_is_ready);
3493
3494 let _guard = if self.annotate_draw_call_crashes {
3495 Some(CrashAnnotatorGuard::new(
3496 &self.crash_annotator,
3497 CrashAnnotation::DrawShader,
3498 &self.bound_program_name,
3499 ))
3500 } else {
3501 None
3502 };
3503
3504 self.gl.draw_elements(
3505 gl::TRIANGLES,
3506 index_count,
3507 gl::UNSIGNED_SHORT,
3508 0,
3509 );
3510 }
3511
3512 pub fn draw_indexed_triangles_instanced_u16(&mut self, index_count: i32, instance_count: i32) {
3513 debug_assert!(self.inside_frame);
3514 #[cfg(debug_assertions)]
3515 debug_assert!(self.shader_is_ready);
3516
3517 let _guard = if self.annotate_draw_call_crashes {
3518 Some(CrashAnnotatorGuard::new(
3519 &self.crash_annotator,
3520 CrashAnnotation::DrawShader,
3521 &self.bound_program_name,
3522 ))
3523 } else {
3524 None
3525 };
3526
3527 self.gl.draw_elements_instanced(
3528 gl::TRIANGLES,
3529 index_count,
3530 gl::UNSIGNED_SHORT,
3531 0,
3532 instance_count,
3533 );
3534 }
3535
3536 pub fn end_frame(&mut self) {
3537 self.reset_draw_target();
3538 self.reset_read_target();
3539
3540 debug_assert!(self.inside_frame);
3541 self.inside_frame = false;
3542
3543 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3544 self.gl.use_program(0);
3545
3546 for i in 0 .. self.bound_textures.len() {
3547 self.gl.active_texture(gl::TEXTURE0 + i as GLuint);
3548 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3549 }
3550
3551 self.gl.active_texture(gl::TEXTURE0);
3552
3553 self.frame_id.0 += 1;
3554
3555 if let Some(ref cache) = self.cached_programs {
3559 cache.update_disk_cache(self.frame_id.0 == 10);
3560 }
3561 }
3562
3563 pub fn clear_target(
3564 &self,
3565 color: Option<[f32; 4]>,
3566 depth: Option<f32>,
3567 rect: Option<FramebufferIntRect>,
3568 ) {
3569 let mut clear_bits = 0;
3570
3571 if let Some(color) = color {
3572 self.gl.clear_color(color[0], color[1], color[2], color[3]);
3573 clear_bits |= gl::COLOR_BUFFER_BIT;
3574 }
3575
3576 if let Some(depth) = depth {
3577 if cfg!(debug_assertions) {
3578 let mut mask = [0];
3579 unsafe {
3580 self.gl.get_boolean_v(gl::DEPTH_WRITEMASK, &mut mask);
3581 }
3582 assert_ne!(mask[0], 0);
3583 }
3584 self.gl.clear_depth(depth as f64);
3585 clear_bits |= gl::DEPTH_BUFFER_BIT;
3586 }
3587
3588 if clear_bits != 0 {
3589 match rect {
3590 Some(rect) => {
3591 self.gl.enable(gl::SCISSOR_TEST);
3592 self.gl.scissor(
3593 rect.min.x,
3594 rect.min.y,
3595 rect.width(),
3596 rect.height(),
3597 );
3598 self.gl.clear(clear_bits);
3599 self.gl.disable(gl::SCISSOR_TEST);
3600 }
3601 None => {
3602 self.gl.clear(clear_bits);
3603 }
3604 }
3605 }
3606 }
3607
3608 pub fn enable_depth(&self, depth_func: DepthFunction) {
3609 assert!(self.depth_available, "Enabling depth test without depth target");
3610 self.gl.enable(gl::DEPTH_TEST);
3611 self.gl.depth_func(depth_func as GLuint);
3612 }
3613
3614 pub fn disable_depth(&self) {
3615 self.gl.disable(gl::DEPTH_TEST);
3616 }
3617
3618 pub fn enable_depth_write(&self) {
3619 assert!(self.depth_available, "Enabling depth write without depth target");
3620 self.gl.depth_mask(true);
3621 }
3622
3623 pub fn disable_depth_write(&self) {
3624 self.gl.depth_mask(false);
3625 }
3626
3627 pub fn disable_stencil(&self) {
3628 self.gl.disable(gl::STENCIL_TEST);
3629 }
3630
3631 pub fn set_scissor_rect(&self, rect: FramebufferIntRect) {
3632 self.gl.scissor(
3633 rect.min.x,
3634 rect.min.y,
3635 rect.width(),
3636 rect.height(),
3637 );
3638 }
3639
3640 pub fn enable_scissor(&self) {
3641 self.gl.enable(gl::SCISSOR_TEST);
3642 }
3643
3644 pub fn disable_scissor(&self) {
3645 self.gl.disable(gl::SCISSOR_TEST);
3646 }
3647
3648 pub fn enable_color_write(&self) {
3649 self.gl.color_mask(true, true, true, true);
3650 }
3651
3652 pub fn disable_color_write(&self) {
3653 self.gl.color_mask(false, false, false, false);
3654 }
3655
3656 pub fn set_blend(&mut self, enable: bool) {
3657 if enable {
3658 self.gl.enable(gl::BLEND);
3659 } else {
3660 self.gl.disable(gl::BLEND);
3661 }
3662 #[cfg(debug_assertions)]
3663 {
3664 self.shader_is_ready = false;
3665 }
3666 }
3667
3668 fn set_blend_factors(
3669 &mut self,
3670 color: (GLenum, GLenum),
3671 alpha: (GLenum, GLenum),
3672 ) {
3673 self.gl.blend_equation(gl::FUNC_ADD);
3674 if color == alpha {
3675 self.gl.blend_func(color.0, color.1);
3676 } else {
3677 self.gl.blend_func_separate(color.0, color.1, alpha.0, alpha.1);
3678 }
3679 #[cfg(debug_assertions)]
3680 {
3681 self.shader_is_ready = false;
3682 }
3683 }
3684
3685 pub fn set_blend_mode_alpha(&mut self) {
3686 self.set_blend_factors(
3687 (gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA),
3688 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3689 );
3690 }
3691
3692 pub fn set_blend_mode_premultiplied_alpha(&mut self) {
3693 self.set_blend_factors(
3694 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3695 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3696 );
3697 }
3698
3699 pub fn set_blend_mode_premultiplied_dest_out(&mut self) {
3700 self.set_blend_factors(
3701 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3702 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3703 );
3704 }
3705
3706 pub fn set_blend_mode_multiply(&mut self) {
3707 self.set_blend_factors(
3708 (gl::ZERO, gl::SRC_COLOR),
3709 (gl::ZERO, gl::SRC_ALPHA),
3710 );
3711 }
3712 pub fn set_blend_mode_subpixel_pass0(&mut self) {
3713 self.set_blend_factors(
3714 (gl::ZERO, gl::ONE_MINUS_SRC_COLOR),
3715 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3716 );
3717 }
3718 pub fn set_blend_mode_subpixel_pass1(&mut self) {
3719 self.set_blend_factors(
3720 (gl::ONE, gl::ONE),
3721 (gl::ONE, gl::ONE),
3722 );
3723 }
3724 pub fn set_blend_mode_subpixel_with_bg_color_pass0(&mut self) {
3725 self.set_blend_factors(
3726 (gl::ZERO, gl::ONE_MINUS_SRC_COLOR),
3727 (gl::ZERO, gl::ONE),
3728 );
3729 }
3730 pub fn set_blend_mode_subpixel_with_bg_color_pass1(&mut self) {
3731 self.set_blend_factors(
3732 (gl::ONE_MINUS_DST_ALPHA, gl::ONE),
3733 (gl::ZERO, gl::ONE),
3734 );
3735 }
3736 pub fn set_blend_mode_subpixel_with_bg_color_pass2(&mut self) {
3737 self.set_blend_factors(
3738 (gl::ONE, gl::ONE),
3739 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3740 );
3741 }
3742 pub fn set_blend_mode_subpixel_constant_text_color(&mut self, color: ColorF) {
3743 self.gl.blend_color(color.r, color.g, color.b, 1.0);
3745 self.set_blend_factors(
3746 (gl::CONSTANT_COLOR, gl::ONE_MINUS_SRC_COLOR),
3747 (gl::CONSTANT_ALPHA, gl::ONE_MINUS_SRC_ALPHA),
3748 );
3749 }
3750 pub fn set_blend_mode_subpixel_dual_source(&mut self) {
3751 self.set_blend_factors(
3752 (gl::ONE, gl::ONE_MINUS_SRC1_COLOR),
3753 (gl::ONE, gl::ONE_MINUS_SRC1_ALPHA),
3754 );
3755 }
3756 pub fn set_blend_mode_multiply_dual_source(&mut self) {
3757 self.set_blend_factors(
3758 (gl::ONE_MINUS_DST_ALPHA, gl::ONE_MINUS_SRC1_COLOR),
3759 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3760 );
3761 }
3762 pub fn set_blend_mode_screen(&mut self) {
3763 self.set_blend_factors(
3764 (gl::ONE, gl::ONE_MINUS_SRC_COLOR),
3765 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3766 );
3767 }
3768 pub fn set_blend_mode_exclusion(&mut self) {
3769 self.set_blend_factors(
3770 (gl::ONE_MINUS_DST_COLOR, gl::ONE_MINUS_SRC_COLOR),
3771 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3772 );
3773 }
3774 pub fn set_blend_mode_show_overdraw(&mut self) {
3775 self.set_blend_factors(
3776 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3777 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3778 );
3779 }
3780
3781 pub fn set_blend_mode_max(&mut self) {
3782 self.gl
3783 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3784 self.gl.blend_equation_separate(gl::MAX, gl::FUNC_ADD);
3785 #[cfg(debug_assertions)]
3786 {
3787 self.shader_is_ready = false;
3788 }
3789 }
3790 pub fn set_blend_mode_min(&mut self) {
3791 self.gl
3792 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3793 self.gl.blend_equation_separate(gl::MIN, gl::FUNC_ADD);
3794 #[cfg(debug_assertions)]
3795 {
3796 self.shader_is_ready = false;
3797 }
3798 }
3799 pub fn set_blend_mode_advanced(&mut self, mode: MixBlendMode) {
3800 self.gl.blend_equation(match mode {
3801 MixBlendMode::Normal => {
3802 self.gl.blend_func_separate(gl::ZERO, gl::SRC_COLOR, gl::ZERO, gl::SRC_ALPHA);
3804 gl::FUNC_ADD
3805 },
3806 MixBlendMode::Multiply => gl::MULTIPLY_KHR,
3807 MixBlendMode::Screen => gl::SCREEN_KHR,
3808 MixBlendMode::Overlay => gl::OVERLAY_KHR,
3809 MixBlendMode::Darken => gl::DARKEN_KHR,
3810 MixBlendMode::Lighten => gl::LIGHTEN_KHR,
3811 MixBlendMode::ColorDodge => gl::COLORDODGE_KHR,
3812 MixBlendMode::ColorBurn => gl::COLORBURN_KHR,
3813 MixBlendMode::HardLight => gl::HARDLIGHT_KHR,
3814 MixBlendMode::SoftLight => gl::SOFTLIGHT_KHR,
3815 MixBlendMode::Difference => gl::DIFFERENCE_KHR,
3816 MixBlendMode::Exclusion => gl::EXCLUSION_KHR,
3817 MixBlendMode::Hue => gl::HSL_HUE_KHR,
3818 MixBlendMode::Saturation => gl::HSL_SATURATION_KHR,
3819 MixBlendMode::Color => gl::HSL_COLOR_KHR,
3820 MixBlendMode::Luminosity => gl::HSL_LUMINOSITY_KHR,
3821 });
3822 #[cfg(debug_assertions)]
3823 {
3824 self.shader_is_ready = false;
3825 }
3826 }
3827
3828 pub fn supports_extension(&self, extension: &str) -> bool {
3829 supports_extension(&self.extensions, extension)
3830 }
3831
3832 pub fn echo_driver_messages(&self) {
3833 if self.capabilities.supports_khr_debug {
3834 Device::log_driver_messages(self.gl());
3835 }
3836 }
3837
3838 fn log_driver_messages(gl: &GenericGlContext) {
3839 for msg in gl.get_debug_messages() {
3840 let level = match msg.severity {
3841 gl::DEBUG_SEVERITY_HIGH => Level::Error,
3842 gl::DEBUG_SEVERITY_MEDIUM => Level::Warn,
3843 gl::DEBUG_SEVERITY_LOW => Level::Info,
3844 gl::DEBUG_SEVERITY_NOTIFICATION => Level::Debug,
3845 _ => Level::Trace,
3846 };
3847 let ty = match msg.ty {
3848 gl::DEBUG_TYPE_ERROR => "error",
3849 gl::DEBUG_TYPE_DEPRECATED_BEHAVIOR => "deprecated",
3850 gl::DEBUG_TYPE_UNDEFINED_BEHAVIOR => "undefined",
3851 gl::DEBUG_TYPE_PORTABILITY => "portability",
3852 gl::DEBUG_TYPE_PERFORMANCE => "perf",
3853 gl::DEBUG_TYPE_MARKER => "marker",
3854 gl::DEBUG_TYPE_PUSH_GROUP => "group push",
3855 gl::DEBUG_TYPE_POP_GROUP => "group pop",
3856 gl::DEBUG_TYPE_OTHER => "other",
3857 _ => "?",
3858 };
3859 log!(level, "({}) {}", ty, msg.message);
3860 }
3861 }
3862
3863 pub fn gl_describe_format(&self, format: ImageFormat) -> FormatDesc {
3864 match format {
3865 ImageFormat::R8 => FormatDesc {
3866 internal: gl::R8,
3867 external: gl::RED,
3868 read: gl::RED,
3869 pixel_type: gl::UNSIGNED_BYTE,
3870 },
3871 ImageFormat::R16 => FormatDesc {
3872 internal: gl::R16,
3873 external: gl::RED,
3874 read: gl::RED,
3875 pixel_type: gl::UNSIGNED_SHORT,
3876 },
3877 ImageFormat::BGRA8 => {
3878 FormatDesc {
3879 internal: self.bgra_formats.internal,
3880 external: self.bgra_formats.external,
3881 read: gl::BGRA,
3882 pixel_type: self.bgra_pixel_type,
3883 }
3884 },
3885 ImageFormat::RGBA8 => {
3886 FormatDesc {
3887 internal: gl::RGBA8,
3888 external: gl::RGBA,
3889 read: gl::RGBA,
3890 pixel_type: gl::UNSIGNED_BYTE,
3891 }
3892 },
3893 ImageFormat::RGBAF32 => FormatDesc {
3894 internal: gl::RGBA32F,
3895 external: gl::RGBA,
3896 read: gl::RGBA,
3897 pixel_type: gl::FLOAT,
3898 },
3899 ImageFormat::RGBAI32 => FormatDesc {
3900 internal: gl::RGBA32I,
3901 external: gl::RGBA_INTEGER,
3902 read: gl::RGBA_INTEGER,
3903 pixel_type: gl::INT,
3904 },
3905 ImageFormat::RG8 => FormatDesc {
3906 internal: gl::RG8,
3907 external: gl::RG,
3908 read: gl::RG,
3909 pixel_type: gl::UNSIGNED_BYTE,
3910 },
3911 ImageFormat::RG16 => FormatDesc {
3912 internal: gl::RG16,
3913 external: gl::RG,
3914 read: gl::RG,
3915 pixel_type: gl::UNSIGNED_SHORT,
3916 },
3917 }
3918 }
3919
3920 pub fn report_memory(&self, size_op_funs: &MallocSizeOfOps, swgl: *mut c_void) -> MemoryReport {
3922 let mut report = MemoryReport::default();
3923 report.depth_target_textures += self.depth_targets_memory();
3924
3925 #[cfg(feature = "sw_compositor")]
3926 if !swgl.is_null() {
3927 report.swgl += swgl::Context::from(swgl).report_memory(size_op_funs.size_of_op);
3928 }
3929 let _ = size_op_funs;
3931 let _ = swgl;
3932 report
3933 }
3934
3935 pub fn depth_targets_memory(&self) -> usize {
3936 let mut total = 0;
3937 for dim in self.depth_targets.keys() {
3938 total += depth_target_size_in_bytes(dim);
3939 }
3940
3941 total
3942 }
3943}
3944
3945pub struct FormatDesc {
3946 pub internal: GLenum,
3948 pub external: GLuint,
3950 pub read: GLuint,
3953 pub pixel_type: GLuint,
3955}
3956
3957#[derive(Debug)]
3958struct UploadChunk<'a> {
3959 rect: DeviceIntRect,
3960 stride: Option<i32>,
3961 offset: usize,
3962 format_override: Option<ImageFormat>,
3963 texture: &'a Texture,
3964}
3965
3966#[derive(Debug)]
3967struct PixelBuffer<'a> {
3968 size_used: usize,
3969 chunks: SmallVec<[UploadChunk<'a>; 1]>,
3971 inner: UploadPBO,
3972 mapping: &'a mut [mem::MaybeUninit<u8>],
3973}
3974
3975impl<'a> PixelBuffer<'a> {
3976 fn new(
3977 pbo: UploadPBO,
3978 ) -> Self {
3979 let mapping = unsafe {
3980 slice::from_raw_parts_mut(pbo.mapping.get_ptr().as_ptr(), pbo.pbo.reserved_size)
3981 };
3982 Self {
3983 size_used: 0,
3984 chunks: SmallVec::new(),
3985 inner: pbo,
3986 mapping,
3987 }
3988 }
3989
3990 fn flush_chunks(&mut self, device: &mut Device) {
3991 for chunk in self.chunks.drain(..) {
3992 TextureUploader::update_impl(device, chunk);
3993 }
3994 }
3995}
3996
3997impl<'a> Drop for PixelBuffer<'a> {
3998 fn drop(&mut self) {
3999 assert_eq!(self.chunks.len(), 0, "PixelBuffer must be flushed before dropping.");
4000 }
4001}
4002
4003#[derive(Debug)]
4004enum PBOMapping {
4005 Unmapped,
4006 Transient(ptr::NonNull<mem::MaybeUninit<u8>>),
4007 Persistent(ptr::NonNull<mem::MaybeUninit<u8>>),
4008}
4009
4010impl PBOMapping {
4011 fn get_ptr(&self) -> ptr::NonNull<mem::MaybeUninit<u8>> {
4012 match self {
4013 PBOMapping::Unmapped => unreachable!("Cannot get pointer to unmapped PBO."),
4014 PBOMapping::Transient(ptr) => *ptr,
4015 PBOMapping::Persistent(ptr) => *ptr,
4016 }
4017 }
4018}
4019
4020#[derive(Debug)]
4022struct UploadPBO {
4023 pbo: PBO,
4024 mapping: PBOMapping,
4025 can_recycle: bool,
4026}
4027
4028impl UploadPBO {
4029 fn empty() -> Self {
4030 Self {
4031 pbo: PBO {
4032 id: 0,
4033 reserved_size: 0,
4034 },
4035 mapping: PBOMapping::Unmapped,
4036 can_recycle: false,
4037 }
4038 }
4039}
4040
4041pub struct UploadPBOPool {
4045 usage_hint: VertexUsageHint,
4047 default_size: usize,
4049 available_buffers: Vec<UploadPBO>,
4051 returned_buffers: Vec<UploadPBO>,
4054 waiting_buffers: Vec<(GLsync, Vec<UploadPBO>)>,
4057 orphaned_buffers: Vec<PBO>,
4060}
4061
4062impl UploadPBOPool {
4063 pub fn new(device: &mut Device, default_size: usize) -> Self {
4064 let usage_hint = match device.upload_method {
4065 UploadMethod::Immediate => VertexUsageHint::Stream,
4066 UploadMethod::PixelBuffer(usage_hint) => usage_hint,
4067 };
4068 Self {
4069 usage_hint,
4070 default_size,
4071 available_buffers: Vec::new(),
4072 returned_buffers: Vec::new(),
4073 waiting_buffers: Vec::new(),
4074 orphaned_buffers: Vec::new(),
4075 }
4076 }
4077
4078 pub fn begin_frame(&mut self, device: &mut Device) {
4081 let mut first_not_signalled = self.waiting_buffers.len();
4086 for (i, (sync, buffers)) in self.waiting_buffers.iter_mut().enumerate() {
4087 match device.gl.client_wait_sync(*sync, 0, 0) {
4088 gl::TIMEOUT_EXPIRED => {
4089 first_not_signalled = i;
4090 break;
4091 },
4092 gl::ALREADY_SIGNALED | gl::CONDITION_SATISFIED => {
4093 self.available_buffers.extend(buffers.drain(..));
4094 }
4095 gl::WAIT_FAILED | _ => {
4096 warn!("glClientWaitSync error in UploadPBOPool::begin_frame()");
4097 for buffer in buffers.drain(..) {
4098 device.delete_pbo(buffer.pbo);
4099 }
4100 }
4101 }
4102 }
4103
4104 for (sync, _) in self.waiting_buffers.drain(0..first_not_signalled) {
4106 device.gl.delete_sync(sync);
4107 }
4108 }
4109
4110 pub fn end_frame(&mut self, device: &mut Device) {
4113 if !self.returned_buffers.is_empty() {
4114 let sync = device.gl.fence_sync(gl::SYNC_GPU_COMMANDS_COMPLETE, 0);
4115 if !sync.is_null() {
4116 self.waiting_buffers.push((sync, mem::replace(&mut self.returned_buffers, Vec::new())))
4117 } else {
4118 warn!("glFenceSync error in UploadPBOPool::end_frame()");
4119
4120 for buffer in self.returned_buffers.drain(..) {
4121 device.delete_pbo(buffer.pbo);
4122 }
4123 }
4124 }
4125 }
4126
4127 fn get_pbo(&mut self, device: &mut Device, min_size: usize) -> Result<UploadPBO, ()> {
4131
4132 let (can_recycle, size) = if min_size <= self.default_size && device.capabilities.supports_nonzero_pbo_offsets {
4137 (true, self.default_size)
4138 } else {
4139 (false, min_size)
4140 };
4141
4142 if can_recycle {
4144 if let Some(mut buffer) = self.available_buffers.pop() {
4145 assert_eq!(buffer.pbo.reserved_size, size);
4146 assert!(buffer.can_recycle);
4147
4148 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.pbo.id);
4149
4150 match buffer.mapping {
4151 PBOMapping::Unmapped => {
4152 let ptr = device.gl.map_buffer_range(
4154 gl::PIXEL_UNPACK_BUFFER,
4155 0,
4156 buffer.pbo.reserved_size as _,
4157 gl::MAP_WRITE_BIT | gl::MAP_UNSYNCHRONIZED_BIT,
4158 ) as *mut _;
4159
4160 let ptr = ptr::NonNull::new(ptr).ok_or_else(|| {
4161 error!("Failed to transiently map PBO of size {} bytes", buffer.pbo.reserved_size);
4162 })?;
4163
4164 buffer.mapping = PBOMapping::Transient(ptr);
4165 }
4166 PBOMapping::Transient(_) => {
4167 unreachable!("Transiently mapped UploadPBO must be unmapped before returning to pool.");
4168 }
4169 PBOMapping::Persistent(_) => {
4170 }
4171 }
4172
4173 return Ok(buffer);
4174 }
4175 }
4176
4177 let mut pbo = match self.orphaned_buffers.pop() {
4180 Some(pbo) => pbo,
4181 None => device.create_pbo(),
4182 };
4183
4184 assert_eq!(pbo.reserved_size, 0);
4185 pbo.reserved_size = size;
4186
4187 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, pbo.id);
4188 let mapping = if device.capabilities.supports_buffer_storage && can_recycle {
4189 device.gl.buffer_storage(
4190 gl::PIXEL_UNPACK_BUFFER,
4191 pbo.reserved_size as _,
4192 ptr::null(),
4193 gl::MAP_WRITE_BIT | gl::MAP_PERSISTENT_BIT,
4194 );
4195 let ptr = device.gl.map_buffer_range(
4196 gl::PIXEL_UNPACK_BUFFER,
4197 0,
4198 pbo.reserved_size as _,
4199 gl::MAP_WRITE_BIT | gl::MAP_PERSISTENT_BIT | gl::MAP_FLUSH_EXPLICIT_BIT,
4203 ) as *mut _;
4204
4205 let ptr = ptr::NonNull::new(ptr).ok_or_else(|| {
4206 error!("Failed to persistently map PBO of size {} bytes", pbo.reserved_size);
4207 })?;
4208
4209 PBOMapping::Persistent(ptr)
4210 } else {
4211 device.gl.buffer_data_untyped(
4212 gl::PIXEL_UNPACK_BUFFER,
4213 pbo.reserved_size as _,
4214 ptr::null(),
4215 self.usage_hint.to_gl(),
4216 );
4217 let ptr = device.gl.map_buffer_range(
4218 gl::PIXEL_UNPACK_BUFFER,
4219 0,
4220 pbo.reserved_size as _,
4221 gl::MAP_WRITE_BIT,
4224 ) as *mut _;
4225
4226 let ptr = ptr::NonNull::new(ptr).ok_or_else(|| {
4227 error!("Failed to transiently map PBO of size {} bytes", pbo.reserved_size);
4228 })?;
4229
4230 PBOMapping::Transient(ptr)
4231 };
4232
4233 Ok(UploadPBO { pbo, mapping, can_recycle })
4234 }
4235
4236 fn return_pbo(&mut self, device: &mut Device, mut buffer: UploadPBO) {
4239 assert!(
4240 !matches!(buffer.mapping, PBOMapping::Transient(_)),
4241 "Transiently mapped UploadPBO must be unmapped before returning to pool.",
4242 );
4243
4244 if buffer.can_recycle {
4245 self.returned_buffers.push(buffer);
4246 } else {
4247 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.pbo.id);
4248 device.gl.buffer_data_untyped(
4249 gl::PIXEL_UNPACK_BUFFER,
4250 0,
4251 ptr::null(),
4252 gl::STREAM_DRAW,
4253 );
4254 buffer.pbo.reserved_size = 0;
4255 self.orphaned_buffers.push(buffer.pbo);
4256 }
4257
4258 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
4259 }
4260
4261 pub fn on_memory_pressure(&mut self, device: &mut Device) {
4263 for buffer in self.available_buffers.drain(..) {
4264 device.delete_pbo(buffer.pbo);
4265 }
4266 for buffer in self.returned_buffers.drain(..) {
4267 device.delete_pbo(buffer.pbo)
4268 }
4269 for (sync, buffers) in self.waiting_buffers.drain(..) {
4270 device.gl.delete_sync(sync);
4271 for buffer in buffers {
4272 device.delete_pbo(buffer.pbo)
4273 }
4274 }
4275 }
4277
4278 pub fn report_memory(&self) -> MemoryReport {
4280 let mut report = MemoryReport::default();
4281 for buffer in &self.available_buffers {
4282 report.texture_upload_pbos += buffer.pbo.reserved_size;
4283 }
4284 for buffer in &self.returned_buffers {
4285 report.texture_upload_pbos += buffer.pbo.reserved_size;
4286 }
4287 for (_, buffers) in &self.waiting_buffers {
4288 for buffer in buffers {
4289 report.texture_upload_pbos += buffer.pbo.reserved_size;
4290 }
4291 }
4292 report
4293 }
4294
4295 pub fn deinit(&mut self, device: &mut Device) {
4296 for buffer in self.available_buffers.drain(..) {
4297 device.delete_pbo(buffer.pbo);
4298 }
4299 for buffer in self.returned_buffers.drain(..) {
4300 device.delete_pbo(buffer.pbo)
4301 }
4302 for (sync, buffers) in self.waiting_buffers.drain(..) {
4303 device.gl.delete_sync(sync);
4304 for buffer in buffers {
4305 device.delete_pbo(buffer.pbo)
4306 }
4307 }
4308 for pbo in self.orphaned_buffers.drain(..) {
4309 device.delete_pbo(pbo);
4310 }
4311 }
4312}
4313
4314pub struct TextureUploader<'a> {
4318 buffers: Vec<PixelBuffer<'a>>,
4320 pub pbo_pool: &'a mut UploadPBOPool,
4322}
4323
4324impl<'a> Drop for TextureUploader<'a> {
4325 fn drop(&mut self) {
4326 assert!(
4327 thread::panicking() || self.buffers.is_empty(),
4328 "TextureUploader must be flushed before it is dropped."
4329 );
4330 }
4331}
4332
4333#[derive(Debug)]
4336pub struct UploadStagingBuffer<'a> {
4337 buffer: PixelBuffer<'a>,
4339 offset: usize,
4341 size: usize,
4343 stride: usize,
4345}
4346
4347impl<'a> UploadStagingBuffer<'a> {
4348 pub fn get_stride(&self) -> usize {
4350 self.stride
4351 }
4352
4353 pub fn get_mapping(&mut self) -> &mut [mem::MaybeUninit<u8>] {
4355 &mut self.buffer.mapping[self.offset..self.offset + self.size]
4356 }
4357}
4358
4359impl<'a> TextureUploader<'a> {
4360 pub fn stage(
4363 &mut self,
4364 device: &mut Device,
4365 format: ImageFormat,
4366 size: DeviceIntSize,
4367 ) -> Result<UploadStagingBuffer<'a>, ()> {
4368 assert!(matches!(device.upload_method, UploadMethod::PixelBuffer(_)), "Texture uploads should only be staged when using pixel buffers.");
4369
4370 let (dst_size, dst_stride) = device.required_upload_size_and_stride(
4373 size,
4374 format,
4375 );
4376
4377 let buffer_index = self.buffers.iter().position(|buffer| {
4379 buffer.size_used + dst_size <= buffer.inner.pbo.reserved_size
4380 });
4381 let buffer = match buffer_index {
4382 Some(i) => self.buffers.swap_remove(i),
4383 None => PixelBuffer::new(self.pbo_pool.get_pbo(device, dst_size)?),
4384 };
4385
4386 if !device.capabilities.supports_nonzero_pbo_offsets {
4387 assert_eq!(buffer.size_used, 0, "PBO uploads from non-zero offset are not supported.");
4388 }
4389 assert!(buffer.size_used + dst_size <= buffer.inner.pbo.reserved_size, "PixelBuffer is too small");
4390
4391 let offset = buffer.size_used;
4392
4393 Ok(UploadStagingBuffer {
4394 buffer,
4395 offset,
4396 size: dst_size,
4397 stride: dst_stride,
4398 })
4399 }
4400
4401 pub fn upload_staged(
4403 &mut self,
4404 device: &mut Device,
4405 texture: &'a Texture,
4406 rect: DeviceIntRect,
4407 format_override: Option<ImageFormat>,
4408 mut staging_buffer: UploadStagingBuffer<'a>,
4409 ) -> usize {
4410 let size = staging_buffer.size;
4411
4412 staging_buffer.buffer.chunks.push(UploadChunk {
4413 rect,
4414 stride: Some(staging_buffer.stride as i32),
4415 offset: staging_buffer.offset,
4416 format_override,
4417 texture,
4418 });
4419 staging_buffer.buffer.size_used += staging_buffer.size;
4420
4421 if staging_buffer.buffer.size_used < staging_buffer.buffer.inner.pbo.reserved_size {
4423 self.buffers.push(staging_buffer.buffer);
4424 } else {
4425 Self::flush_buffer(device, self.pbo_pool, staging_buffer.buffer);
4426 }
4427
4428 size
4429 }
4430
4431 pub fn upload<T>(
4433 &mut self,
4434 device: &mut Device,
4435 texture: &'a Texture,
4436 mut rect: DeviceIntRect,
4437 stride: Option<i32>,
4438 format_override: Option<ImageFormat>,
4439 data: *const T,
4440 len: usize,
4441 ) -> usize {
4442 let cropped = rect.intersection(
4445 &DeviceIntRect::from_size(texture.get_dimensions())
4446 );
4447 if cfg!(debug_assertions) && cropped.map_or(true, |r| r != rect) {
4448 warn!("Cropping texture upload {:?} to {:?}", rect, cropped);
4449 }
4450 rect = match cropped {
4451 None => return 0,
4452 Some(r) => r,
4453 };
4454
4455 let bytes_pp = texture.format.bytes_per_pixel() as usize;
4456 let width_bytes = rect.width() as usize * bytes_pp;
4457
4458 let src_stride = stride.map_or(width_bytes, |stride| {
4459 assert!(stride >= 0);
4460 stride as usize
4461 });
4462 let src_size = (rect.height() as usize - 1) * src_stride + width_bytes;
4463 assert!(src_size <= len * mem::size_of::<T>());
4464
4465 match device.upload_method {
4466 UploadMethod::Immediate => {
4467 if cfg!(debug_assertions) {
4468 let mut bound_buffer = [0];
4469 unsafe {
4470 device.gl.get_integer_v(gl::PIXEL_UNPACK_BUFFER_BINDING, &mut bound_buffer);
4471 }
4472 assert_eq!(bound_buffer[0], 0, "GL_PIXEL_UNPACK_BUFFER must not be bound for immediate uploads.");
4473 }
4474
4475 Self::update_impl(device, UploadChunk {
4476 rect,
4477 stride: Some(src_stride as i32),
4478 offset: data as _,
4479 format_override,
4480 texture,
4481 });
4482
4483 width_bytes * rect.height() as usize
4484 }
4485 UploadMethod::PixelBuffer(_) => {
4486 let mut staging_buffer = match self.stage(device, texture.format, rect.size()) {
4487 Ok(staging_buffer) => staging_buffer,
4488 Err(_) => return 0,
4489 };
4490 let dst_stride = staging_buffer.get_stride();
4491
4492 unsafe {
4493 let src: &[mem::MaybeUninit<u8>] = slice::from_raw_parts(data as *const _, src_size);
4494
4495 if src_stride == dst_stride {
4496 staging_buffer.get_mapping()[..src_size].copy_from_slice(src);
4499 } else {
4500 for y in 0..rect.height() as usize {
4503 let src_start = y * src_stride;
4504 let src_end = src_start + width_bytes;
4505 let dst_start = y * staging_buffer.get_stride();
4506 let dst_end = dst_start + width_bytes;
4507
4508 staging_buffer.get_mapping()[dst_start..dst_end].copy_from_slice(&src[src_start..src_end])
4509 }
4510 }
4511 }
4512
4513 self.upload_staged(device, texture, rect, format_override, staging_buffer)
4514 }
4515 }
4516 }
4517
4518 fn flush_buffer(device: &mut Device, pbo_pool: &mut UploadPBOPool, mut buffer: PixelBuffer) {
4519 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, buffer.inner.pbo.id);
4520 match buffer.inner.mapping {
4521 PBOMapping::Unmapped => unreachable!("UploadPBO should be mapped at this stage."),
4522 PBOMapping::Transient(_) => {
4523 device.gl.unmap_buffer(gl::PIXEL_UNPACK_BUFFER);
4524 buffer.inner.mapping = PBOMapping::Unmapped;
4525 }
4526 PBOMapping::Persistent(_) => {
4527 device.gl.flush_mapped_buffer_range(gl::PIXEL_UNPACK_BUFFER, 0, buffer.size_used as _);
4528 }
4529 }
4530 buffer.flush_chunks(device);
4531 let pbo = mem::replace(&mut buffer.inner, UploadPBO::empty());
4532 pbo_pool.return_pbo(device, pbo);
4533 }
4534
4535 pub fn flush(mut self, device: &mut Device) {
4538 for buffer in self.buffers.drain(..) {
4539 Self::flush_buffer(device, self.pbo_pool, buffer);
4540 }
4541
4542 device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
4543 }
4544
4545 fn update_impl(device: &mut Device, chunk: UploadChunk) {
4546 device.bind_texture(DEFAULT_TEXTURE, chunk.texture, Swizzle::default());
4547
4548 let format = chunk.format_override.unwrap_or(chunk.texture.format);
4549 let (gl_format, bpp, data_type) = match format {
4550 ImageFormat::R8 => (gl::RED, 1, gl::UNSIGNED_BYTE),
4551 ImageFormat::R16 => (gl::RED, 2, gl::UNSIGNED_SHORT),
4552 ImageFormat::BGRA8 => (device.bgra_formats.external, 4, device.bgra_pixel_type),
4553 ImageFormat::RGBA8 => (gl::RGBA, 4, gl::UNSIGNED_BYTE),
4554 ImageFormat::RG8 => (gl::RG, 2, gl::UNSIGNED_BYTE),
4555 ImageFormat::RG16 => (gl::RG, 4, gl::UNSIGNED_SHORT),
4556 ImageFormat::RGBAF32 => (gl::RGBA, 16, gl::FLOAT),
4557 ImageFormat::RGBAI32 => (gl::RGBA_INTEGER, 16, gl::INT),
4558 };
4559
4560 let row_length = match chunk.stride {
4561 Some(value) => value / bpp,
4562 None => chunk.texture.size.width,
4563 };
4564
4565 if chunk.stride.is_some() {
4566 device.gl.pixel_store_i(
4567 gl::UNPACK_ROW_LENGTH,
4568 row_length as _,
4569 );
4570 }
4571
4572 let pos = chunk.rect.min;
4573 let size = chunk.rect.size();
4574
4575 match chunk.texture.target {
4576 gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => {
4577 device.gl.tex_sub_image_2d_pbo(
4578 chunk.texture.target,
4579 0,
4580 pos.x as _,
4581 pos.y as _,
4582 size.width as _,
4583 size.height as _,
4584 gl_format,
4585 data_type,
4586 chunk.offset,
4587 );
4588 }
4589 _ => panic!("BUG: Unexpected texture target!"),
4590 }
4591
4592 if chunk.texture.filter == TextureFilter::Trilinear {
4594 device.gl.generate_mipmap(chunk.texture.target);
4595 }
4596
4597 if chunk.stride.is_some() {
4599 device.gl.pixel_store_i(gl::UNPACK_ROW_LENGTH, 0 as _);
4600 }
4601 }
4602}
4603
4604fn texels_to_u8_slice<T: Texel>(texels: &[T]) -> &[u8] {
4605 unsafe {
4606 slice::from_raw_parts(texels.as_ptr() as *const u8, texels.len() * mem::size_of::<T>())
4607 }
4608}