1use super::super::shader_source::SHADERS;
6use api::{ColorF, ImageDescriptor, ImageFormat, MemoryReport};
7use api::{MixBlendMode, TextureTarget, VoidPtrToSizeFn};
8use api::units::*;
9use euclid::default::Transform3D;
10use gleam::gl;
11use crate::internal_types::{FastHashMap, LayerIndex, RenderTargetInfo, Swizzle, SwizzleSettings};
12use crate::util::round_up_to_multiple;
13use crate::profiler;
14use log::Level;
15use sha2::{Digest, Sha256};
16use smallvec::SmallVec;
17use std::{
18 borrow::Cow,
19 cell::{Cell, RefCell},
20 cmp,
21 collections::hash_map::Entry,
22 marker::PhantomData,
23 mem,
24 num::NonZeroUsize,
25 os::raw::c_void,
26 ops::Add,
27 path::PathBuf,
28 ptr,
29 rc::Rc,
30 slice,
31 sync::Arc,
32 sync::atomic::{AtomicUsize, Ordering},
33 thread,
34 time::Duration,
35};
36use webrender_build::shader::ProgramSourceDigest;
37use webrender_build::shader::{ShaderSourceParser, shader_source_from_file};
38
39#[derive(Debug, Copy, Clone, PartialEq, Ord, Eq, PartialOrd)]
41#[cfg_attr(feature = "capture", derive(Serialize))]
42#[cfg_attr(feature = "replay", derive(Deserialize))]
43pub struct GpuFrameId(usize);
44
45static GPU_BYTES_ALLOCATED: AtomicUsize = AtomicUsize::new(0);
52
53pub fn total_gpu_bytes_allocated() -> usize {
55 GPU_BYTES_ALLOCATED.load(Ordering::Relaxed)
56}
57
58fn record_gpu_alloc(num_bytes: usize) {
60 GPU_BYTES_ALLOCATED.fetch_add(num_bytes, Ordering::Relaxed);
61}
62
63fn record_gpu_free(num_bytes: usize) {
65 let old = GPU_BYTES_ALLOCATED.fetch_sub(num_bytes, Ordering::Relaxed);
66 assert!(old >= num_bytes, "Freeing {} bytes but only {} allocated", num_bytes, old);
67}
68
69impl GpuFrameId {
70 pub fn new(value: usize) -> Self {
71 GpuFrameId(value)
72 }
73}
74
75impl Add<usize> for GpuFrameId {
76 type Output = GpuFrameId;
77
78 fn add(self, other: usize) -> GpuFrameId {
79 GpuFrameId(self.0 + other)
80 }
81}
82
83const SHADER_VERSION_GL: &str = "#version 150\n";
84const SHADER_VERSION_GLES: &str = "#version 300 es\n";
85
86const SHADER_KIND_VERTEX: &str = "#define WR_VERTEX_SHADER\n";
87const SHADER_KIND_FRAGMENT: &str = "#define WR_FRAGMENT_SHADER\n";
88
89pub struct TextureSlot(pub usize);
90
91const DEFAULT_TEXTURE: TextureSlot = TextureSlot(0);
93
94#[repr(u32)]
95pub enum DepthFunction {
96 Less = gl::LESS,
97 LessEqual = gl::LEQUAL,
98}
99
100#[repr(u32)]
101#[derive(Copy, Clone, Debug, PartialEq)]
102#[cfg_attr(feature = "capture", derive(Serialize))]
103#[cfg_attr(feature = "replay", derive(Deserialize))]
104pub enum TextureFilter {
105 Nearest,
106 Linear,
107 Trilinear,
108}
109
110#[derive(Clone, Debug)]
112#[cfg_attr(feature = "capture", derive(Serialize))]
113#[cfg_attr(feature = "replay", derive(Deserialize))]
114pub struct TextureFormatPair<T> {
115 pub internal: T,
117 pub external: T,
119}
120
121impl<T: Copy> From<T> for TextureFormatPair<T> {
122 fn from(value: T) -> Self {
123 TextureFormatPair {
124 internal: value,
125 external: value,
126 }
127 }
128}
129
130#[derive(Debug)]
131pub enum VertexAttributeKind {
132 F32,
133 U8Norm,
134 U16Norm,
135 I32,
136 U16,
137}
138
139#[derive(Debug)]
140pub struct VertexAttribute {
141 pub name: &'static str,
142 pub count: u32,
143 pub kind: VertexAttributeKind,
144}
145
146#[derive(Debug)]
147pub struct VertexDescriptor {
148 pub vertex_attributes: &'static [VertexAttribute],
149 pub instance_attributes: &'static [VertexAttribute],
150}
151
152enum FBOTarget {
153 Read,
154 Draw,
155}
156
157#[derive(Debug, Clone)]
159pub enum UploadMethod {
160 Immediate,
162 PixelBuffer(VertexUsageHint),
164}
165
166pub unsafe trait Texel: Copy {}
168unsafe impl Texel for u8 {}
169unsafe impl Texel for f32 {}
170
171fn depth_target_size_in_bytes(dimensions: &DeviceIntSize) -> usize {
173 let pixels = dimensions.width * dimensions.height;
176 (pixels as usize) * 4
177}
178
179pub fn get_gl_target(target: TextureTarget) -> gl::GLuint {
180 match target {
181 TextureTarget::Default => gl::TEXTURE_2D,
182 TextureTarget::Array => gl::TEXTURE_2D_ARRAY,
183 TextureTarget::Rect => gl::TEXTURE_RECTANGLE,
184 TextureTarget::External => gl::TEXTURE_EXTERNAL_OES,
185 }
186}
187
188fn supports_extension(extensions: &[String], extension: &str) -> bool {
189 extensions.iter().any(|s| s == extension)
190}
191
192fn get_shader_version(gl: &dyn gl::Gl) -> &'static str {
193 match gl.get_type() {
194 gl::GlType::Gl => SHADER_VERSION_GL,
195 gl::GlType::Gles => SHADER_VERSION_GLES,
196 }
197}
198
199fn get_shader_source(shader_name: &str, base_path: Option<&PathBuf>) -> Cow<'static, str> {
202 if let Some(ref base) = base_path {
203 let shader_path = base.join(&format!("{}.glsl", shader_name));
204 Cow::Owned(shader_source_from_file(&shader_path))
205 } else {
206 Cow::Borrowed(
207 SHADERS
208 .get(shader_name)
209 .expect("Shader not found")
210 .source
211 )
212 }
213}
214
215pub fn build_shader_strings(
218 gl_version_string: &str,
219 features: &str,
220 base_filename: &str,
221 override_path: Option<&PathBuf>,
222) -> (String, String) {
223 let mut vs_source = String::new();
224 do_build_shader_string(
225 gl_version_string,
226 features,
227 SHADER_KIND_VERTEX,
228 base_filename,
229 override_path,
230 |s| vs_source.push_str(s),
231 );
232
233 let mut fs_source = String::new();
234 do_build_shader_string(
235 gl_version_string,
236 features,
237 SHADER_KIND_FRAGMENT,
238 base_filename,
239 override_path,
240 |s| fs_source.push_str(s),
241 );
242
243 (vs_source, fs_source)
244}
245
246fn do_build_shader_string<F: FnMut(&str)>(
250 gl_version_string: &str,
251 features: &str,
252 kind: &str,
253 base_filename: &str,
254 override_path: Option<&PathBuf>,
255 mut output: F,
256) {
257 build_shader_prefix_string(gl_version_string, features, kind, base_filename, &mut output);
258 build_shader_main_string(base_filename, override_path, &mut output);
259}
260
261fn build_shader_prefix_string<F: FnMut(&str)>(
264 gl_version_string: &str,
265 features: &str,
266 kind: &str,
267 base_filename: &str,
268 output: &mut F,
269) {
270 output(gl_version_string);
272
273 let name_string = format!("// {}\n", base_filename);
275 output(&name_string);
276
277 output(kind);
279
280 output(features);
282}
283
284fn build_shader_main_string<F: FnMut(&str)>(
286 base_filename: &str,
287 override_path: Option<&PathBuf>,
288 output: &mut F,
289) {
290 let shared_source = get_shader_source(base_filename, override_path);
291 ShaderSourceParser::new().parse(
292 shared_source,
293 &|f| get_shader_source(f, override_path),
294 output
295 );
296}
297
298pub trait FileWatcherHandler: Send {
299 fn file_changed(&self, path: PathBuf);
300}
301
302impl VertexAttributeKind {
303 fn size_in_bytes(&self) -> u32 {
304 match *self {
305 VertexAttributeKind::F32 => 4,
306 VertexAttributeKind::U8Norm => 1,
307 VertexAttributeKind::U16Norm => 2,
308 VertexAttributeKind::I32 => 4,
309 VertexAttributeKind::U16 => 2,
310 }
311 }
312}
313
314impl VertexAttribute {
315 fn size_in_bytes(&self) -> u32 {
316 self.count * self.kind.size_in_bytes()
317 }
318
319 fn bind_to_vao(
320 &self,
321 attr_index: gl::GLuint,
322 divisor: gl::GLuint,
323 stride: gl::GLint,
324 offset: gl::GLuint,
325 gl: &dyn gl::Gl,
326 ) {
327 gl.enable_vertex_attrib_array(attr_index);
328 gl.vertex_attrib_divisor(attr_index, divisor);
329
330 match self.kind {
331 VertexAttributeKind::F32 => {
332 gl.vertex_attrib_pointer(
333 attr_index,
334 self.count as gl::GLint,
335 gl::FLOAT,
336 false,
337 stride,
338 offset,
339 );
340 }
341 VertexAttributeKind::U8Norm => {
342 gl.vertex_attrib_pointer(
343 attr_index,
344 self.count as gl::GLint,
345 gl::UNSIGNED_BYTE,
346 true,
347 stride,
348 offset,
349 );
350 }
351 VertexAttributeKind::U16Norm => {
352 gl.vertex_attrib_pointer(
353 attr_index,
354 self.count as gl::GLint,
355 gl::UNSIGNED_SHORT,
356 true,
357 stride,
358 offset,
359 );
360 }
361 VertexAttributeKind::I32 => {
362 gl.vertex_attrib_i_pointer(
363 attr_index,
364 self.count as gl::GLint,
365 gl::INT,
366 stride,
367 offset,
368 );
369 }
370 VertexAttributeKind::U16 => {
371 gl.vertex_attrib_i_pointer(
372 attr_index,
373 self.count as gl::GLint,
374 gl::UNSIGNED_SHORT,
375 stride,
376 offset,
377 );
378 }
379 }
380 }
381}
382
383impl VertexDescriptor {
384 fn instance_stride(&self) -> u32 {
385 self.instance_attributes
386 .iter()
387 .map(|attr| attr.size_in_bytes())
388 .sum()
389 }
390
391 fn bind_attributes(
392 attributes: &[VertexAttribute],
393 start_index: usize,
394 divisor: u32,
395 gl: &dyn gl::Gl,
396 vbo: VBOId,
397 ) {
398 vbo.bind(gl);
399
400 let stride: u32 = attributes
401 .iter()
402 .map(|attr| attr.size_in_bytes())
403 .sum();
404
405 let mut offset = 0;
406 for (i, attr) in attributes.iter().enumerate() {
407 let attr_index = (start_index + i) as gl::GLuint;
408 attr.bind_to_vao(attr_index, divisor, stride as _, offset, gl);
409 offset += attr.size_in_bytes();
410 }
411 }
412
413 fn bind(&self, gl: &dyn gl::Gl, main: VBOId, instance: VBOId) {
414 Self::bind_attributes(self.vertex_attributes, 0, 0, gl, main);
415
416 if !self.instance_attributes.is_empty() {
417 Self::bind_attributes(
418 self.instance_attributes,
419 self.vertex_attributes.len(),
420 1, gl, instance,
421 );
422 }
423 }
424}
425
426impl VBOId {
427 fn bind(&self, gl: &dyn gl::Gl) {
428 gl.bind_buffer(gl::ARRAY_BUFFER, self.0);
429 }
430}
431
432impl IBOId {
433 fn bind(&self, gl: &dyn gl::Gl) {
434 gl.bind_buffer(gl::ELEMENT_ARRAY_BUFFER, self.0);
435 }
436}
437
438impl FBOId {
439 fn bind(&self, gl: &dyn gl::Gl, target: FBOTarget) {
440 let target = match target {
441 FBOTarget::Read => gl::READ_FRAMEBUFFER,
442 FBOTarget::Draw => gl::DRAW_FRAMEBUFFER,
443 };
444 gl.bind_framebuffer(target, self.0);
445 }
446}
447
448pub struct Stream<'a> {
449 attributes: &'a [VertexAttribute],
450 vbo: VBOId,
451}
452
453pub struct VBO<V> {
454 id: gl::GLuint,
455 target: gl::GLenum,
456 allocated_count: usize,
457 marker: PhantomData<V>,
458}
459
460impl<V> VBO<V> {
461 pub fn allocated_count(&self) -> usize {
462 self.allocated_count
463 }
464
465 pub fn stream_with<'a>(&self, attributes: &'a [VertexAttribute]) -> Stream<'a> {
466 debug_assert_eq!(
467 mem::size_of::<V>(),
468 attributes.iter().map(|a| a.size_in_bytes() as usize).sum::<usize>()
469 );
470 Stream {
471 attributes,
472 vbo: VBOId(self.id),
473 }
474 }
475}
476
477impl<T> Drop for VBO<T> {
478 fn drop(&mut self) {
479 debug_assert!(thread::panicking() || self.id == 0);
480 }
481}
482
483#[cfg_attr(feature = "replay", derive(Clone))]
484#[derive(Debug)]
485pub struct ExternalTexture {
486 id: gl::GLuint,
487 target: gl::GLuint,
488 swizzle: Swizzle,
489}
490
491impl ExternalTexture {
492 pub fn new(id: u32, target: TextureTarget, swizzle: Swizzle) -> Self {
493 ExternalTexture {
494 id,
495 target: get_gl_target(target),
496 swizzle,
497 }
498 }
499
500 #[cfg(feature = "replay")]
501 pub fn internal_id(&self) -> gl::GLuint {
502 self.id
503 }
504}
505
506bitflags! {
507 #[derive(Default)]
508 pub struct TextureFlags: u32 {
509 const IS_SHARED_TEXTURE_CACHE = 1 << 0;
511 }
512}
513
514#[derive(Debug)]
520pub struct Texture {
521 id: gl::GLuint,
522 target: gl::GLuint,
523 layer_count: i32,
524 format: ImageFormat,
525 size: DeviceIntSize,
526 filter: TextureFilter,
527 flags: TextureFlags,
528 active_swizzle: Cell<Swizzle>,
530 fbos: Vec<FBOId>,
534 fbos_with_depth: Vec<FBOId>,
552 blit_workaround_buffer: Option<(RBOId, FBOId)>,
555 last_frame_used: GpuFrameId,
556}
557
558impl Texture {
559 pub fn get_dimensions(&self) -> DeviceIntSize {
560 self.size
561 }
562
563 pub fn get_layer_count(&self) -> i32 {
564 self.layer_count
565 }
566
567 pub fn get_format(&self) -> ImageFormat {
568 self.format
569 }
570
571 pub fn get_filter(&self) -> TextureFilter {
572 self.filter
573 }
574
575 pub fn supports_depth(&self) -> bool {
576 !self.fbos_with_depth.is_empty()
577 }
578
579 pub fn used_in_frame(&self, frame_id: GpuFrameId) -> bool {
580 self.last_frame_used == frame_id
581 }
582
583 pub fn used_recently(&self, current_frame_id: GpuFrameId, threshold: usize) -> bool {
586 self.last_frame_used + threshold >= current_frame_id
587 }
588
589 pub fn flags(&self) -> &TextureFlags {
591 &self.flags
592 }
593
594 pub fn flags_mut(&mut self) -> &mut TextureFlags {
596 &mut self.flags
597 }
598
599 pub fn layer_size_in_bytes(&self) -> usize {
602 assert!(self.layer_count > 0 || self.size.width + self.size.height == 0);
603 let bpp = self.format.bytes_per_pixel() as usize;
604 let w = self.size.width as usize;
605 let h = self.size.height as usize;
606 bpp * w * h
607 }
608
609 pub fn size_in_bytes(&self) -> usize {
612 self.layer_size_in_bytes() * (self.layer_count as usize)
613 }
614
615 #[cfg(feature = "replay")]
616 pub fn into_external(mut self) -> ExternalTexture {
617 let ext = ExternalTexture {
618 id: self.id,
619 target: self.target,
620 swizzle: Swizzle::default(),
621 };
622 self.id = 0; ext
624 }
625}
626
627impl Drop for Texture {
628 fn drop(&mut self) {
629 debug_assert!(thread::panicking() || self.id == 0);
630 }
631}
632
633pub struct Program {
634 id: gl::GLuint,
635 u_transform: gl::GLint,
636 u_mode: gl::GLint,
637 source_info: ProgramSourceInfo,
638 is_initialized: bool,
639}
640
641impl Program {
642 pub fn is_initialized(&self) -> bool {
643 self.is_initialized
644 }
645}
646
647impl Drop for Program {
648 fn drop(&mut self) {
649 debug_assert!(
650 thread::panicking() || self.id == 0,
651 "renderer::deinit not called"
652 );
653 }
654}
655
656pub struct CustomVAO {
657 id: gl::GLuint,
658}
659
660impl Drop for CustomVAO {
661 fn drop(&mut self) {
662 debug_assert!(
663 thread::panicking() || self.id == 0,
664 "renderer::deinit not called"
665 );
666 }
667}
668
669pub struct VAO {
670 id: gl::GLuint,
671 ibo_id: IBOId,
672 main_vbo_id: VBOId,
673 instance_vbo_id: VBOId,
674 instance_stride: usize,
675 owns_vertices_and_indices: bool,
676}
677
678impl Drop for VAO {
679 fn drop(&mut self) {
680 debug_assert!(
681 thread::panicking() || self.id == 0,
682 "renderer::deinit not called"
683 );
684 }
685}
686
687pub struct PBO {
688 id: gl::GLuint,
689 reserved_size: usize,
690}
691
692impl PBO {
693 pub fn get_reserved_size(&self) -> usize {
694 self.reserved_size
695 }
696}
697
698impl Drop for PBO {
699 fn drop(&mut self) {
700 debug_assert!(
701 thread::panicking() || self.id == 0,
702 "renderer::deinit not called"
703 );
704 }
705}
706
707pub struct BoundPBO<'a> {
708 device: &'a mut Device,
709 pub data: &'a [u8]
710}
711
712impl<'a> Drop for BoundPBO<'a> {
713 fn drop(&mut self) {
714 self.device.gl.unmap_buffer(gl::PIXEL_PACK_BUFFER);
715 self.device.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
716 }
717}
718
719#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
720pub struct FBOId(gl::GLuint);
721
722#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
723pub struct RBOId(gl::GLuint);
724
725#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
726pub struct VBOId(gl::GLuint);
727
728#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
729struct IBOId(gl::GLuint);
730
731#[derive(Clone, PartialEq, Eq, Hash, Debug)]
732pub struct ProgramSourceInfo {
733 base_filename: &'static str,
734 features: String,
735 digest: ProgramSourceDigest,
736}
737
738impl ProgramSourceInfo {
739 fn new(
740 device: &Device,
741 name: &'static str,
742 features: String,
743 ) -> Self {
744 let mut hasher = Sha256::new();
758 let version_str = get_shader_version(&*device.gl());
759 let override_path = device.resource_override_path.as_ref();
760 let source_and_digest = SHADERS.get(&name).expect("Shader not found");
761
762 hasher.input(device.renderer_name.as_bytes());
764
765 build_shader_prefix_string(
767 version_str,
768 &features,
769 &"DUMMY",
770 &name,
771 &mut |s| hasher.input(s.as_bytes()),
772 );
773
774 if override_path.is_some() || cfg!(debug_assertions) {
777 let mut h = Sha256::new();
778 build_shader_main_string(&name, override_path, &mut |s| h.input(s.as_bytes()));
779 let d: ProgramSourceDigest = h.into();
780 let digest = format!("{}", d);
781 debug_assert!(override_path.is_some() || digest == source_and_digest.digest);
782 hasher.input(digest.as_bytes());
783 } else {
784 hasher.input(source_and_digest.digest.as_bytes());
785 };
786
787 ProgramSourceInfo {
789 base_filename: name,
790 features,
791 digest: hasher.into(),
792 }
793 }
794
795 fn compute_source(&self, device: &Device, kind: &str) -> String {
796 let mut src = String::new();
797 device.build_shader_string(
798 &self.features,
799 kind,
800 self.base_filename,
801 |s| src.push_str(s),
802 );
803 src
804 }
805}
806
807#[cfg_attr(feature = "serialize_program", derive(Deserialize, Serialize))]
808pub struct ProgramBinary {
809 bytes: Vec<u8>,
810 format: gl::GLenum,
811 source_digest: ProgramSourceDigest,
812}
813
814impl ProgramBinary {
815 fn new(bytes: Vec<u8>,
816 format: gl::GLenum,
817 source_digest: ProgramSourceDigest) -> Self {
818 ProgramBinary {
819 bytes,
820 format,
821 source_digest,
822 }
823 }
824
825 pub fn source_digest(&self) -> &ProgramSourceDigest {
827 &self.source_digest
828 }
829}
830
831pub trait ProgramCacheObserver {
833 fn update_disk_cache(&self, entries: Vec<Arc<ProgramBinary>>);
834 fn try_load_shader_from_disk(&self, digest: &ProgramSourceDigest, program_cache: &Rc<ProgramCache>);
835 fn notify_program_binary_failed(&self, program_binary: &Arc<ProgramBinary>);
836}
837
838struct ProgramCacheEntry {
839 binary: Arc<ProgramBinary>,
841 linked: bool,
843}
844
845pub struct ProgramCache {
846 entries: RefCell<FastHashMap<ProgramSourceDigest, ProgramCacheEntry>>,
847
848 updated_disk_cache: Cell<bool>,
850
851 program_cache_handler: Option<Box<dyn ProgramCacheObserver>>,
854}
855
856impl ProgramCache {
857 pub fn new(program_cache_observer: Option<Box<dyn ProgramCacheObserver>>) -> Rc<Self> {
858 Rc::new(
859 ProgramCache {
860 entries: RefCell::new(FastHashMap::default()),
861 updated_disk_cache: Cell::new(false),
862 program_cache_handler: program_cache_observer,
863 }
864 )
865 }
866
867 fn startup_complete(&self) {
871 if self.updated_disk_cache.get() {
872 return;
873 }
874
875 if let Some(ref handler) = self.program_cache_handler {
876 let active_shaders = self.entries.borrow().values()
877 .filter(|e| e.linked).map(|e| e.binary.clone())
878 .collect::<Vec<_>>();
879 handler.update_disk_cache(active_shaders);
880 self.updated_disk_cache.set(true);
881 }
882 }
883
884 #[cfg(feature = "serialize_program")]
887 pub fn load_program_binary(&self, program_binary: Arc<ProgramBinary>) {
888 let digest = program_binary.source_digest.clone();
889 let entry = ProgramCacheEntry {
890 binary: program_binary,
891 linked: false,
892 };
893 self.entries.borrow_mut().insert(digest, entry);
894 }
895
896 pub fn report_memory(&self, op: VoidPtrToSizeFn) -> usize {
898 self.entries.borrow().values()
899 .map(|e| unsafe { op(e.binary.bytes.as_ptr() as *const c_void ) })
900 .sum()
901 }
902}
903
904#[derive(Debug, Copy, Clone)]
905pub enum VertexUsageHint {
906 Static,
907 Dynamic,
908 Stream,
909}
910
911impl VertexUsageHint {
912 fn to_gl(&self) -> gl::GLuint {
913 match *self {
914 VertexUsageHint::Static => gl::STATIC_DRAW,
915 VertexUsageHint::Dynamic => gl::DYNAMIC_DRAW,
916 VertexUsageHint::Stream => gl::STREAM_DRAW,
917 }
918 }
919}
920
921#[derive(Copy, Clone, Debug)]
922pub struct UniformLocation(gl::GLint);
923
924impl UniformLocation {
925 pub const INVALID: Self = UniformLocation(-1);
926}
927
928#[derive(Debug)]
929pub struct Capabilities {
930 pub supports_multisampling: bool,
932 pub supports_copy_image_sub_data: bool,
934 pub supports_blit_to_texture_array: bool,
938 pub supports_pixel_local_storage: bool,
942 pub supports_advanced_blend_equation: bool,
944 pub supports_khr_debug: bool,
947 pub supports_texture_swizzle: bool,
949 pub supports_nonzero_pbo_offsets: bool,
952}
953
954#[derive(Clone, Debug)]
955pub enum ShaderError {
956 Compilation(String, String), Link(String, String), }
959
960struct SharedDepthTarget {
963 rbo_id: RBOId,
965 refcount: usize,
967}
968
969#[cfg(debug_assertions)]
970impl Drop for SharedDepthTarget {
971 fn drop(&mut self) {
972 debug_assert!(thread::panicking() || self.refcount == 0);
973 }
974}
975
976#[derive(PartialEq, Debug)]
979enum TexStorageUsage {
980 Never,
981 NonBGRA8,
982 Always,
983}
984
985
986pub struct Device {
987 gl: Rc<dyn gl::Gl>,
988
989 base_gl: Option<Rc<dyn gl::Gl>>,
992
993 bound_textures: [gl::GLuint; 16],
995 bound_program: gl::GLuint,
996 bound_vao: gl::GLuint,
997 bound_read_fbo: FBOId,
998 bound_draw_fbo: FBOId,
999 program_mode_id: UniformLocation,
1000 default_read_fbo: FBOId,
1001 default_draw_fbo: FBOId,
1002
1003 depth_available: bool,
1006
1007 upload_method: UploadMethod,
1008
1009 capabilities: Capabilities,
1011
1012 color_formats: TextureFormatPair<ImageFormat>,
1013 bgra_formats: TextureFormatPair<gl::GLuint>,
1014 swizzle_settings: SwizzleSettings,
1015
1016 depth_targets: FastHashMap<DeviceIntSize, SharedDepthTarget>,
1021
1022 inside_frame: bool,
1024
1025 resource_override_path: Option<PathBuf>,
1027
1028 max_texture_size: i32,
1029 max_texture_layers: u32,
1030 renderer_name: String,
1031 cached_programs: Option<Rc<ProgramCache>>,
1032
1033 frame_id: GpuFrameId,
1036
1037 texture_storage_usage: TexStorageUsage,
1043
1044 optimal_pbo_stride: NonZeroUsize,
1045
1046 requires_null_terminated_shader_source: bool,
1049
1050 extensions: Vec<String>,
1052
1053 dump_shader_source: Option<String>,
1055
1056 surface_origin_is_top_left: bool,
1057
1058 #[cfg(debug_assertions)]
1071 shader_is_ready: bool,
1072}
1073
1074#[derive(Clone, Copy, Debug)]
1076pub enum DrawTarget {
1077 Default {
1080 rect: FramebufferIntRect,
1082 total_size: FramebufferIntSize,
1084 surface_origin_is_top_left: bool,
1085 },
1086 Texture {
1088 dimensions: DeviceIntSize,
1090 layer: LayerIndex,
1092 with_depth: bool,
1094 blit_workaround_buffer: Option<(RBOId, FBOId)>,
1096 fbo_id: FBOId,
1098 id: gl::GLuint,
1100 target: gl::GLuint,
1102 },
1103 External {
1105 fbo: FBOId,
1106 size: FramebufferIntSize,
1107 },
1108 NativeSurface {
1110 offset: DeviceIntPoint,
1111 external_fbo_id: u32,
1112 dimensions: DeviceIntSize,
1113 },
1114}
1115
1116impl DrawTarget {
1117 pub fn new_default(size: DeviceIntSize, surface_origin_is_top_left: bool) -> Self {
1118 let total_size = FramebufferIntSize::from_untyped(size.to_untyped());
1119 DrawTarget::Default {
1120 rect: total_size.into(),
1121 total_size,
1122 surface_origin_is_top_left,
1123 }
1124 }
1125
1126 pub fn is_default(&self) -> bool {
1128 match *self {
1129 DrawTarget::Default {..} => true,
1130 _ => false,
1131 }
1132 }
1133
1134 pub fn from_texture(
1135 texture: &Texture,
1136 layer: usize,
1137 with_depth: bool,
1138 ) -> Self {
1139 let fbo_id = if with_depth {
1140 texture.fbos_with_depth[layer]
1141 } else {
1142 texture.fbos[layer]
1143 };
1144
1145 DrawTarget::Texture {
1146 dimensions: texture.get_dimensions(),
1147 fbo_id,
1148 with_depth,
1149 layer,
1150 blit_workaround_buffer: texture.blit_workaround_buffer,
1151 id: texture.id,
1152 target: texture.target,
1153 }
1154 }
1155
1156 pub fn dimensions(&self) -> DeviceIntSize {
1158 match *self {
1159 DrawTarget::Default { total_size, .. } => DeviceIntSize::from_untyped(total_size.to_untyped()),
1160 DrawTarget::Texture { dimensions, .. } => dimensions,
1161 DrawTarget::External { size, .. } => DeviceIntSize::from_untyped(size.to_untyped()),
1162 DrawTarget::NativeSurface { dimensions, .. } => dimensions,
1163 }
1164 }
1165
1166 pub fn to_framebuffer_rect(&self, device_rect: DeviceIntRect) -> FramebufferIntRect {
1167 let mut fb_rect = FramebufferIntRect::from_untyped(&device_rect.to_untyped());
1168 match *self {
1169 DrawTarget::Default { ref rect, surface_origin_is_top_left, .. } => {
1170 if !surface_origin_is_top_left {
1172 fb_rect.origin.y = rect.origin.y + rect.size.height - fb_rect.origin.y - fb_rect.size.height;
1173 fb_rect.origin.x += rect.origin.x;
1174 }
1175 }
1176 DrawTarget::Texture { .. } | DrawTarget::External { .. } => (),
1177 DrawTarget::NativeSurface { .. } => {
1178 panic!("bug: is this ever used for native surfaces?");
1179 }
1180 }
1181 fb_rect
1182 }
1183
1184 pub fn build_scissor_rect(
1188 &self,
1189 scissor_rect: Option<DeviceIntRect>,
1190 content_origin: DeviceIntPoint,
1191 ) -> FramebufferIntRect {
1192 let dimensions = self.dimensions();
1193
1194 match scissor_rect {
1195 Some(scissor_rect) => match *self {
1196 DrawTarget::Default { ref rect, .. } => {
1197 self.to_framebuffer_rect(scissor_rect.translate(-content_origin.to_vector()))
1198 .intersection(rect)
1199 .unwrap_or_else(FramebufferIntRect::zero)
1200 }
1201 DrawTarget::NativeSurface { offset, .. } => {
1202 FramebufferIntRect::from_untyped(&scissor_rect.translate(offset.to_vector()).to_untyped())
1203 }
1204 DrawTarget::Texture { .. } | DrawTarget::External { .. } => {
1205 FramebufferIntRect::from_untyped(&scissor_rect.to_untyped())
1206 }
1207 }
1208 None => {
1209 FramebufferIntRect::new(
1210 FramebufferIntPoint::zero(),
1211 FramebufferIntSize::from_untyped(dimensions.to_untyped()),
1212 )
1213 }
1214 }
1215 }
1216}
1217
1218#[derive(Clone, Copy, Debug)]
1220pub enum ReadTarget {
1221 Default,
1223 Texture {
1225 fbo_id: FBOId,
1227 },
1228 External {
1230 fbo: FBOId,
1231 },
1232}
1233
1234impl ReadTarget {
1235 pub fn from_texture(
1236 texture: &Texture,
1237 layer: usize,
1238 ) -> Self {
1239 ReadTarget::Texture {
1240 fbo_id: texture.fbos[layer],
1241 }
1242 }
1243}
1244
1245impl From<DrawTarget> for ReadTarget {
1246 fn from(t: DrawTarget) -> Self {
1247 match t {
1248 DrawTarget::Default { .. } => ReadTarget::Default,
1249 DrawTarget::NativeSurface { .. } => {
1250 unreachable!("bug: native surfaces cannot be read targets");
1251 }
1252 DrawTarget::Texture { fbo_id, .. } =>
1253 ReadTarget::Texture { fbo_id },
1254 DrawTarget::External { fbo, .. } =>
1255 ReadTarget::External { fbo },
1256 }
1257 }
1258}
1259
1260impl Device {
1261 pub fn new(
1262 mut gl: Rc<dyn gl::Gl>,
1263 resource_override_path: Option<PathBuf>,
1264 upload_method: UploadMethod,
1265 cached_programs: Option<Rc<ProgramCache>>,
1266 allow_pixel_local_storage_support: bool,
1267 allow_texture_storage_support: bool,
1268 allow_texture_swizzling: bool,
1269 dump_shader_source: Option<String>,
1270 surface_origin_is_top_left: bool,
1271 panic_on_gl_error: bool,
1272 ) -> Device {
1273 let mut max_texture_size = [0];
1274 let mut max_texture_layers = [0];
1275 unsafe {
1276 gl.get_integer_v(gl::MAX_TEXTURE_SIZE, &mut max_texture_size);
1277 gl.get_integer_v(gl::MAX_ARRAY_TEXTURE_LAYERS, &mut max_texture_layers);
1278 }
1279
1280 let max_texture_size = max_texture_size[0];
1281 let max_texture_layers = max_texture_layers[0] as u32;
1282 let renderer_name = gl.get_string(gl::RENDERER);
1283
1284 let mut extension_count = [0];
1285 unsafe {
1286 gl.get_integer_v(gl::NUM_EXTENSIONS, &mut extension_count);
1287 }
1288 let extension_count = extension_count[0] as gl::GLuint;
1289 let mut extensions = Vec::new();
1290 for i in 0 .. extension_count {
1291 extensions.push(gl.get_string_i(gl::EXTENSIONS, i));
1292 }
1293
1294 let supports_khr_debug = supports_extension(&extensions, "GL_KHR_debug");
1298 if panic_on_gl_error || cfg!(debug_assertions) {
1299 gl = gl::ErrorReactingGl::wrap(gl, move |gl, name, code| {
1300 if supports_khr_debug {
1301 Self::log_driver_messages(gl);
1302 }
1303 println!("Caught GL error {:x} at {}", code, name);
1304 panic!("Caught GL error {:x} at {}", code, name);
1305 });
1306 }
1307
1308 if supports_extension(&extensions, "GL_ANGLE_provoking_vertex") {
1309 gl.provoking_vertex_angle(gl::FIRST_VERTEX_CONVENTION);
1310 }
1311
1312 let supports_gles_bgra = supports_extension(&extensions, "GL_EXT_texture_format_BGRA8888");
1355
1356 let is_emulator = renderer_name.starts_with("Android Emulator");
1359 let avoid_tex_image = is_emulator;
1360
1361 let (color_formats, bgra_formats, bgra8_sampling_swizzle, texture_storage_usage) = match gl.get_type() {
1362 gl::GlType::Gl if
1364 allow_texture_storage_support &&
1365 allow_texture_swizzling &&
1366 supports_extension(&extensions, "GL_ARB_texture_storage")
1367 => (
1368 TextureFormatPair::from(ImageFormat::RGBA8),
1369 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1370 Swizzle::Bgra, TexStorageUsage::Always
1372 ),
1373 gl::GlType::Gl => (
1375 TextureFormatPair { internal: ImageFormat::RGBA8, external: ImageFormat::BGRA8 },
1376 TextureFormatPair { internal: gl::RGBA, external: gl::BGRA },
1377 Swizzle::Rgba, TexStorageUsage::Never
1379 ),
1380 gl::GlType::Gles if supports_gles_bgra && allow_texture_storage_support && supports_extension(&extensions, "GL_EXT_texture_storage") => (
1382 TextureFormatPair::from(ImageFormat::BGRA8),
1383 TextureFormatPair { internal: gl::BGRA8_EXT, external: gl::BGRA_EXT },
1384 Swizzle::Rgba, TexStorageUsage::Always,
1386 ),
1387 gl::GlType::Gles if supports_gles_bgra && !avoid_tex_image => (
1392 TextureFormatPair::from(ImageFormat::RGBA8),
1393 TextureFormatPair::from(gl::BGRA_EXT),
1394 Swizzle::Rgba, TexStorageUsage::NonBGRA8,
1396 ),
1397 gl::GlType::Gles if allow_texture_swizzling => (
1400 TextureFormatPair::from(ImageFormat::RGBA8),
1401 TextureFormatPair { internal: gl::RGBA8, external: gl::RGBA },
1402 Swizzle::Bgra, TexStorageUsage::Always,
1404 ),
1405 gl::GlType::Gles => (
1407 TextureFormatPair::from(ImageFormat::RGBA8),
1408 TextureFormatPair { internal: gl::RGBA8, external: gl::BGRA },
1409 Swizzle::Rgba,
1410 TexStorageUsage::Always,
1411 ),
1412 };
1413
1414 info!("GL texture cache {:?}, bgra {:?} swizzle {:?}, texture storage {:?}",
1415 color_formats, bgra_formats, bgra8_sampling_swizzle, texture_storage_usage);
1416 let supports_copy_image_sub_data = supports_extension(&extensions, "GL_EXT_copy_image") ||
1417 supports_extension(&extensions, "GL_ARB_copy_image");
1418
1419 let supports_blit_to_texture_array = !renderer_name.starts_with("Adreno");
1422
1423 let ext_pixel_local_storage = supports_extension(&extensions, "GL_EXT_shader_pixel_local_storage");
1428 let ext_framebuffer_fetch = supports_extension(&extensions, "GL_ARM_shader_framebuffer_fetch");
1429 let supports_pixel_local_storage =
1430 allow_pixel_local_storage_support &&
1431 ext_framebuffer_fetch &&
1432 ext_pixel_local_storage;
1433
1434 let is_adreno = renderer_name.starts_with("Adreno");
1435
1436 let supports_advanced_blend_equation =
1440 supports_extension(&extensions, "GL_KHR_blend_equation_advanced") &&
1441 !is_adreno;
1442
1443 let supports_texture_swizzle = allow_texture_swizzling &&
1444 (gl.get_type() == gl::GlType::Gles || supports_extension(&extensions, "GL_ARB_texture_storage"));
1445
1446
1447 let requires_null_terminated_shader_source = is_emulator;
1450
1451 let is_amd_macos = cfg!(target_os = "macos") && renderer_name.starts_with("AMD");
1457 let optimal_pbo_stride = if is_adreno || is_amd_macos {
1458 NonZeroUsize::new(256).unwrap()
1459 } else {
1460 NonZeroUsize::new(4).unwrap()
1461 };
1462
1463 let supports_nonzero_pbo_offsets = !is_amd_macos;
1466
1467 Device {
1468 gl,
1469 base_gl: None,
1470 resource_override_path,
1471 upload_method,
1472 inside_frame: false,
1473
1474 capabilities: Capabilities {
1475 supports_multisampling: false, supports_copy_image_sub_data,
1477 supports_blit_to_texture_array,
1478 supports_pixel_local_storage,
1479 supports_advanced_blend_equation,
1480 supports_khr_debug,
1481 supports_texture_swizzle,
1482 supports_nonzero_pbo_offsets,
1483 },
1484
1485 color_formats,
1486 bgra_formats,
1487 swizzle_settings: SwizzleSettings {
1488 bgra8_sampling_swizzle,
1489 },
1490
1491 depth_targets: FastHashMap::default(),
1492
1493 bound_textures: [0; 16],
1494 bound_program: 0,
1495 bound_vao: 0,
1496 bound_read_fbo: FBOId(0),
1497 bound_draw_fbo: FBOId(0),
1498 program_mode_id: UniformLocation::INVALID,
1499 default_read_fbo: FBOId(0),
1500 default_draw_fbo: FBOId(0),
1501
1502 depth_available: true,
1503
1504 max_texture_size,
1505 max_texture_layers,
1506 renderer_name,
1507 cached_programs,
1508 frame_id: GpuFrameId(0),
1509 extensions,
1510 texture_storage_usage,
1511 requires_null_terminated_shader_source,
1512 optimal_pbo_stride,
1513 dump_shader_source,
1514 surface_origin_is_top_left,
1515
1516 #[cfg(debug_assertions)]
1517 shader_is_ready: false,
1518 }
1519 }
1520
1521 pub fn gl(&self) -> &dyn gl::Gl {
1522 &*self.gl
1523 }
1524
1525 pub fn rc_gl(&self) -> &Rc<dyn gl::Gl> {
1526 &self.gl
1527 }
1528
1529 pub fn update_program_cache(&mut self, cached_programs: Rc<ProgramCache>) {
1530 self.cached_programs = Some(cached_programs);
1531 }
1532
1533 pub fn clamp_max_texture_size(&mut self, size: i32) {
1537 self.max_texture_size = self.max_texture_size.min(size);
1538 }
1539
1540 pub fn max_texture_size(&self) -> i32 {
1542 self.max_texture_size
1543 }
1544
1545 pub fn surface_origin_is_top_left(&self) -> bool {
1546 self.surface_origin_is_top_left
1547 }
1548
1549 pub fn max_texture_layers(&self) -> usize {
1551 self.max_texture_layers as usize
1552 }
1553
1554 pub fn get_capabilities(&self) -> &Capabilities {
1555 &self.capabilities
1556 }
1557
1558 pub fn preferred_color_formats(&self) -> TextureFormatPair<ImageFormat> {
1559 self.color_formats.clone()
1560 }
1561
1562 pub fn swizzle_settings(&self) -> Option<SwizzleSettings> {
1563 if self.capabilities.supports_texture_swizzle {
1564 Some(self.swizzle_settings)
1565 } else {
1566 None
1567 }
1568 }
1569
1570 pub fn optimal_pbo_stride(&self) -> NonZeroUsize {
1571 self.optimal_pbo_stride
1572 }
1573
1574 pub fn reset_state(&mut self) {
1575 self.bound_textures = [0; 16];
1576 self.bound_vao = 0;
1577 self.bound_read_fbo = FBOId(0);
1578 self.bound_draw_fbo = FBOId(0);
1579 }
1580
1581 #[cfg(debug_assertions)]
1582 fn print_shader_errors(source: &str, log: &str) {
1583 if !log.starts_with("0:") && !log.starts_with("0(") {
1585 return;
1586 }
1587 let end_pos = match log[2..].chars().position(|c| !c.is_digit(10)) {
1588 Some(pos) => 2 + pos,
1589 None => return,
1590 };
1591 let base_line_number = match log[2 .. end_pos].parse::<usize>() {
1592 Ok(number) if number >= 2 => number - 2,
1593 _ => return,
1594 };
1595 for (line, prefix) in source.lines().skip(base_line_number).zip(&["|",">","|"]) {
1596 error!("{}\t{}", prefix, line);
1597 }
1598 }
1599
1600 pub fn compile_shader(
1601 gl: &dyn gl::Gl,
1602 name: &str,
1603 shader_type: gl::GLenum,
1604 source: &String,
1605 requires_null_terminated_shader_source: bool,
1606 ) -> Result<gl::GLuint, ShaderError> {
1607 debug!("compile {}", name);
1608 let id = gl.create_shader(shader_type);
1609 if requires_null_terminated_shader_source {
1610 use std::ffi::CString;
1613 let terminated_source = CString::new(source.as_bytes()).unwrap();
1614 gl.shader_source(id, &[terminated_source.as_bytes_with_nul()]);
1615 } else {
1616 gl.shader_source(id, &[source.as_bytes()]);
1617 }
1618 gl.compile_shader(id);
1619 let log = gl.get_shader_info_log(id);
1620 let mut status = [0];
1621 unsafe {
1622 gl.get_shader_iv(id, gl::COMPILE_STATUS, &mut status);
1623 }
1624 if status[0] == 0 {
1625 error!("Failed to compile shader: {}\n{}", name, log);
1626 #[cfg(debug_assertions)]
1627 Self::print_shader_errors(source, &log);
1628 Err(ShaderError::Compilation(name.to_string(), log))
1629 } else {
1630 if !log.is_empty() {
1631 warn!("Warnings detected on shader: {}\n{}", name, log);
1632 }
1633 Ok(id)
1634 }
1635 }
1636
1637 pub fn begin_frame(&mut self) -> GpuFrameId {
1638 debug_assert!(!self.inside_frame);
1639 self.inside_frame = true;
1640 #[cfg(debug_assertions)]
1641 {
1642 self.shader_is_ready = false;
1643 }
1644
1645 let being_profiled = profiler::thread_is_being_profiled();
1648 let using_wrapper = self.base_gl.is_some();
1649 if being_profiled && !using_wrapper {
1650 fn note(name: &str, duration: Duration) {
1651 profiler::add_text_marker(cstr!("OpenGL Calls"), name, duration);
1652 }
1653 let threshold = Duration::from_millis(1);
1654 let wrapped = gl::ProfilingGl::wrap(self.gl.clone(), threshold, note);
1655 let base = mem::replace(&mut self.gl, wrapped);
1656 self.base_gl = Some(base);
1657 } else if !being_profiled && using_wrapper {
1658 self.gl = self.base_gl.take().unwrap();
1659 }
1660
1661 let mut default_read_fbo = [0];
1663 unsafe {
1664 self.gl.get_integer_v(gl::READ_FRAMEBUFFER_BINDING, &mut default_read_fbo);
1665 }
1666 self.default_read_fbo = FBOId(default_read_fbo[0] as gl::GLuint);
1667 let mut default_draw_fbo = [0];
1668 unsafe {
1669 self.gl.get_integer_v(gl::DRAW_FRAMEBUFFER_BINDING, &mut default_draw_fbo);
1670 }
1671 self.default_draw_fbo = FBOId(default_draw_fbo[0] as gl::GLuint);
1672
1673 for i in 0 .. self.bound_textures.len() {
1675 self.bound_textures[i] = 0;
1676 self.gl.active_texture(gl::TEXTURE0 + i as gl::GLuint);
1677 self.gl.bind_texture(gl::TEXTURE_2D, 0);
1678 }
1679
1680 self.bound_program = 0;
1682 self.program_mode_id = UniformLocation::INVALID;
1683 self.gl.use_program(0);
1684
1685 self.bound_vao = 0;
1687 self.gl.bind_vertex_array(0);
1688
1689 self.bound_read_fbo = self.default_read_fbo;
1691 self.bound_draw_fbo = self.default_draw_fbo;
1692
1693 self.gl.pixel_store_i(gl::UNPACK_ALIGNMENT, 1);
1695 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
1696
1697 self.gl.active_texture(gl::TEXTURE0);
1699
1700 self.frame_id
1701 }
1702
1703 fn bind_texture_impl(
1704 &mut self, slot: TextureSlot, id: gl::GLuint, target: gl::GLenum, set_swizzle: Option<Swizzle>
1705 ) {
1706 debug_assert!(self.inside_frame);
1707
1708 if self.bound_textures[slot.0] != id || set_swizzle.is_some() {
1709 self.gl.active_texture(gl::TEXTURE0 + slot.0 as gl::GLuint);
1710 self.gl.bind_texture(target, id);
1711 if let Some(swizzle) = set_swizzle {
1712 if self.capabilities.supports_texture_swizzle {
1713 let components = match swizzle {
1714 Swizzle::Rgba => [gl::RED, gl::GREEN, gl::BLUE, gl::ALPHA],
1715 Swizzle::Bgra => [gl::BLUE, gl::GREEN, gl::RED, gl::ALPHA],
1716 };
1717 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_R, components[0] as i32);
1718 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_G, components[1] as i32);
1719 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_B, components[2] as i32);
1720 self.gl.tex_parameter_i(target, gl::TEXTURE_SWIZZLE_A, components[3] as i32);
1721 } else {
1722 debug_assert_eq!(swizzle, Swizzle::default());
1723 }
1724 }
1725 self.gl.active_texture(gl::TEXTURE0);
1726 self.bound_textures[slot.0] = id;
1727 }
1728 }
1729
1730 pub fn bind_texture<S>(&mut self, slot: S, texture: &Texture, swizzle: Swizzle)
1731 where
1732 S: Into<TextureSlot>,
1733 {
1734 let old_swizzle = texture.active_swizzle.replace(swizzle);
1735 let set_swizzle = if old_swizzle != swizzle {
1736 Some(swizzle)
1737 } else {
1738 None
1739 };
1740 self.bind_texture_impl(slot.into(), texture.id, texture.target, set_swizzle);
1741 }
1742
1743 pub fn bind_external_texture<S>(&mut self, slot: S, external_texture: &ExternalTexture)
1744 where
1745 S: Into<TextureSlot>,
1746 {
1747 self.bind_texture_impl(slot.into(), external_texture.id, external_texture.target, None);
1748 }
1749
1750 pub fn bind_read_target_impl(&mut self, fbo_id: FBOId) {
1751 debug_assert!(self.inside_frame);
1752
1753 if self.bound_read_fbo != fbo_id {
1754 self.bound_read_fbo = fbo_id;
1755 fbo_id.bind(self.gl(), FBOTarget::Read);
1756 }
1757 }
1758
1759 pub fn bind_read_target(&mut self, target: ReadTarget) {
1760 let fbo_id = match target {
1761 ReadTarget::Default => self.default_read_fbo,
1762 ReadTarget::Texture { fbo_id } => fbo_id,
1763 ReadTarget::External { fbo } => fbo,
1764 };
1765
1766 self.bind_read_target_impl(fbo_id)
1767 }
1768
1769 fn bind_draw_target_impl(&mut self, fbo_id: FBOId) {
1770 debug_assert!(self.inside_frame);
1771
1772 if self.bound_draw_fbo != fbo_id {
1773 self.bound_draw_fbo = fbo_id;
1774 fbo_id.bind(self.gl(), FBOTarget::Draw);
1775 }
1776 }
1777
1778 pub fn reset_read_target(&mut self) {
1779 let fbo = self.default_read_fbo;
1780 self.bind_read_target_impl(fbo);
1781 }
1782
1783
1784 pub fn reset_draw_target(&mut self) {
1785 let fbo = self.default_draw_fbo;
1786 self.bind_draw_target_impl(fbo);
1787 self.depth_available = true;
1788 }
1789
1790 pub fn bind_draw_target(
1791 &mut self,
1792 target: DrawTarget,
1793 ) {
1794 let (fbo_id, rect, depth_available) = match target {
1795 DrawTarget::Default { rect, .. } => {
1796 (self.default_draw_fbo, rect, true)
1797 }
1798 DrawTarget::Texture { dimensions, fbo_id, with_depth, .. } => {
1799 let rect = FramebufferIntRect::new(
1800 FramebufferIntPoint::zero(),
1801 FramebufferIntSize::from_untyped(dimensions.to_untyped()),
1802 );
1803 (fbo_id, rect, with_depth)
1804 },
1805 DrawTarget::External { fbo, size } => {
1806 (fbo, size.into(), false)
1807 }
1808 DrawTarget::NativeSurface { external_fbo_id, offset, dimensions, .. } => {
1809 (
1810 FBOId(external_fbo_id),
1811 FramebufferIntRect::new(
1812 FramebufferIntPoint::from_untyped(offset.to_untyped()),
1813 FramebufferIntSize::from_untyped(dimensions.to_untyped()),
1814 ),
1815 true
1816 )
1817 }
1818 };
1819
1820 self.depth_available = depth_available;
1821 self.bind_draw_target_impl(fbo_id);
1822 self.gl.viewport(
1823 rect.origin.x,
1824 rect.origin.y,
1825 rect.size.width,
1826 rect.size.height,
1827 );
1828 }
1829
1830 pub fn create_fbo(&mut self) -> FBOId {
1833 FBOId(self.gl.gen_framebuffers(1)[0])
1834 }
1835
1836 pub fn create_fbo_for_external_texture(&mut self, texture_id: u32) -> FBOId {
1838 let fbo = self.create_fbo();
1839 fbo.bind(self.gl(), FBOTarget::Draw);
1840 self.gl.framebuffer_texture_2d(
1841 gl::DRAW_FRAMEBUFFER,
1842 gl::COLOR_ATTACHMENT0,
1843 gl::TEXTURE_2D,
1844 texture_id,
1845 0,
1846 );
1847 debug_assert_eq!(
1848 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
1849 gl::FRAMEBUFFER_COMPLETE,
1850 "Incomplete framebuffer",
1851 );
1852 self.bound_draw_fbo.bind(self.gl(), FBOTarget::Draw);
1853 fbo
1854 }
1855
1856 pub fn delete_fbo(&mut self, fbo: FBOId) {
1857 self.gl.delete_framebuffers(&[fbo.0]);
1858 }
1859
1860 pub fn bind_external_draw_target(&mut self, fbo_id: FBOId) {
1861 debug_assert!(self.inside_frame);
1862
1863 if self.bound_draw_fbo != fbo_id {
1864 self.bound_draw_fbo = fbo_id;
1865 fbo_id.bind(self.gl(), FBOTarget::Draw);
1866 }
1867 }
1868
1869 pub fn link_program(
1881 &mut self,
1882 program: &mut Program,
1883 descriptor: &VertexDescriptor,
1884 ) -> Result<(), ShaderError> {
1885 assert!(!program.is_initialized());
1886 let mut build_program = true;
1887 let info = &program.source_info;
1888
1889 if let Some(ref cached_programs) = self.cached_programs {
1891 if cached_programs.entries.borrow().get(&program.source_info.digest).is_none() {
1893 if let Some(ref handler) = cached_programs.program_cache_handler {
1894 handler.try_load_shader_from_disk(&program.source_info.digest, cached_programs);
1895 if let Some(entry) = cached_programs.entries.borrow().get(&program.source_info.digest) {
1896 self.gl.program_binary(program.id, entry.binary.format, &entry.binary.bytes);
1897 }
1898 }
1899 }
1900
1901 if let Some(entry) = cached_programs.entries.borrow_mut().get_mut(&info.digest) {
1902 let mut link_status = [0];
1903 unsafe {
1904 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
1905 }
1906 if link_status[0] == 0 {
1907 let error_log = self.gl.get_program_info_log(program.id);
1908 error!(
1909 "Failed to load a program object with a program binary: {} renderer {}\n{}",
1910 &info.base_filename,
1911 self.renderer_name,
1912 error_log
1913 );
1914 if let Some(ref program_cache_handler) = cached_programs.program_cache_handler {
1915 program_cache_handler.notify_program_binary_failed(&entry.binary);
1916 }
1917 } else {
1918 entry.linked = true;
1919 build_program = false;
1920 }
1921 }
1922 }
1923
1924 if build_program {
1926 let vs_source = info.compute_source(self, SHADER_KIND_VERTEX);
1928 let vs_id = match Device::compile_shader(&*self.gl, &info.base_filename, gl::VERTEX_SHADER, &vs_source, self.requires_null_terminated_shader_source) {
1929 Ok(vs_id) => vs_id,
1930 Err(err) => return Err(err),
1931 };
1932
1933 let fs_source = info.compute_source(self, SHADER_KIND_FRAGMENT);
1935 let fs_id =
1936 match Device::compile_shader(&*self.gl, &info.base_filename, gl::FRAGMENT_SHADER, &fs_source, self.requires_null_terminated_shader_source) {
1937 Ok(fs_id) => fs_id,
1938 Err(err) => {
1939 self.gl.delete_shader(vs_id);
1940 return Err(err);
1941 }
1942 };
1943
1944 if Some(info.base_filename) == self.dump_shader_source.as_ref().map(String::as_ref) {
1946 let path = std::path::Path::new(info.base_filename);
1947 std::fs::write(path.with_extension("vert"), vs_source).unwrap();
1948 std::fs::write(path.with_extension("frag"), fs_source).unwrap();
1949 }
1950
1951 self.gl.attach_shader(program.id, vs_id);
1953 self.gl.attach_shader(program.id, fs_id);
1954
1955 for (i, attr) in descriptor
1957 .vertex_attributes
1958 .iter()
1959 .chain(descriptor.instance_attributes.iter())
1960 .enumerate()
1961 {
1962 self.gl
1963 .bind_attrib_location(program.id, i as gl::GLuint, attr.name);
1964 }
1965
1966 if self.cached_programs.is_some() {
1967 self.gl.program_parameter_i(program.id, gl::PROGRAM_BINARY_RETRIEVABLE_HINT, gl::TRUE as gl::GLint);
1968 }
1969
1970 self.gl.link_program(program.id);
1972
1973 if cfg!(debug_assertions) {
1974 for (i, attr) in descriptor
1976 .vertex_attributes
1977 .iter()
1978 .chain(descriptor.instance_attributes.iter())
1979 .enumerate()
1980 {
1981 let location = self.gl.get_attrib_location(program.id, attr.name);
1984 if location != i as gl::GLint {
1985 warn!("Attribute {:?} is not found in the shader {}. Expected at {}, found at {}",
1986 attr, program.source_info.base_filename, i, location);
1987 }
1988 }
1989 }
1990
1991 self.gl.detach_shader(program.id, vs_id);
1995 self.gl.detach_shader(program.id, fs_id);
1996 self.gl.delete_shader(vs_id);
1997 self.gl.delete_shader(fs_id);
1998
1999 let mut link_status = [0];
2000 unsafe {
2001 self.gl.get_program_iv(program.id, gl::LINK_STATUS, &mut link_status);
2002 }
2003 if link_status[0] == 0 {
2004 let error_log = self.gl.get_program_info_log(program.id);
2005 error!(
2006 "Failed to link shader program: {}\n{}",
2007 &info.base_filename,
2008 error_log
2009 );
2010 self.gl.delete_program(program.id);
2011 return Err(ShaderError::Link(info.base_filename.to_owned(), error_log));
2012 }
2013
2014 if let Some(ref cached_programs) = self.cached_programs {
2015 if !cached_programs.entries.borrow().contains_key(&info.digest) {
2016 let (buffer, format) = self.gl.get_program_binary(program.id);
2017 if buffer.len() > 0 {
2018 let entry = ProgramCacheEntry {
2019 binary: Arc::new(ProgramBinary::new(buffer, format, info.digest.clone())),
2020 linked: true,
2021 };
2022 cached_programs.entries.borrow_mut().insert(info.digest.clone(), entry);
2023 }
2024 }
2025 }
2026 }
2027
2028 program.is_initialized = true;
2030 program.u_transform = self.gl.get_uniform_location(program.id, "uTransform");
2031 program.u_mode = self.gl.get_uniform_location(program.id, "uMode");
2032
2033 Ok(())
2034 }
2035
2036 pub fn bind_program(&mut self, program: &Program) {
2037 debug_assert!(self.inside_frame);
2038 debug_assert!(program.is_initialized());
2039 #[cfg(debug_assertions)]
2040 {
2041 self.shader_is_ready = true;
2042 }
2043
2044 if self.bound_program != program.id {
2045 self.gl.use_program(program.id);
2046 self.bound_program = program.id;
2047 self.program_mode_id = UniformLocation(program.u_mode);
2048 }
2049 }
2050
2051 pub fn create_texture(
2052 &mut self,
2053 target: TextureTarget,
2054 format: ImageFormat,
2055 mut width: i32,
2056 mut height: i32,
2057 filter: TextureFilter,
2058 render_target: Option<RenderTargetInfo>,
2059 layer_count: i32,
2060 ) -> Texture {
2061 debug_assert!(self.inside_frame);
2062
2063 if width > self.max_texture_size || height > self.max_texture_size {
2064 error!("Attempting to allocate a texture of size {}x{} above the limit, trimming", width, height);
2065 width = width.min(self.max_texture_size);
2066 height = height.min(self.max_texture_size);
2067 }
2068
2069 let mut texture = Texture {
2071 id: self.gl.gen_textures(1)[0],
2072 target: get_gl_target(target),
2073 size: DeviceIntSize::new(width, height),
2074 layer_count,
2075 format,
2076 filter,
2077 active_swizzle: Cell::default(),
2078 fbos: vec![],
2079 fbos_with_depth: vec![],
2080 blit_workaround_buffer: None,
2081 last_frame_used: self.frame_id,
2082 flags: TextureFlags::default(),
2083 };
2084 self.bind_texture(DEFAULT_TEXTURE, &texture, Swizzle::default());
2085 self.set_texture_parameters(texture.target, filter);
2086
2087 let desc = self.gl_describe_format(texture.format);
2089 let is_array = match texture.target {
2090 gl::TEXTURE_2D_ARRAY => true,
2091 gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => false,
2092 _ => panic!("BUG: Unexpected texture target!"),
2093 };
2094 assert!(is_array || texture.layer_count == 1);
2095
2096 let mipmap_levels = if texture.filter == TextureFilter::Trilinear {
2100 let max_dimension = cmp::max(width, height);
2101 ((max_dimension) as f64).log2() as gl::GLint + 1
2102 } else {
2103 1
2104 };
2105
2106 let use_texture_storage = match self.texture_storage_usage {
2110 TexStorageUsage::Always => true,
2111 TexStorageUsage::NonBGRA8 => texture.format != ImageFormat::BGRA8,
2112 TexStorageUsage::Never => false,
2113 };
2114 match (use_texture_storage, is_array) {
2115 (true, true) =>
2116 self.gl.tex_storage_3d(
2117 gl::TEXTURE_2D_ARRAY,
2118 mipmap_levels,
2119 desc.internal,
2120 texture.size.width as gl::GLint,
2121 texture.size.height as gl::GLint,
2122 texture.layer_count,
2123 ),
2124 (true, false) =>
2125 self.gl.tex_storage_2d(
2126 texture.target,
2127 mipmap_levels,
2128 desc.internal,
2129 texture.size.width as gl::GLint,
2130 texture.size.height as gl::GLint,
2131 ),
2132 (false, true) =>
2133 self.gl.tex_image_3d(
2134 gl::TEXTURE_2D_ARRAY,
2135 0,
2136 desc.internal as gl::GLint,
2137 texture.size.width as gl::GLint,
2138 texture.size.height as gl::GLint,
2139 texture.layer_count,
2140 0,
2141 desc.external,
2142 desc.pixel_type,
2143 None,
2144 ),
2145 (false, false) =>
2146 self.gl.tex_image_2d(
2147 texture.target,
2148 0,
2149 desc.internal as gl::GLint,
2150 texture.size.width as gl::GLint,
2151 texture.size.height as gl::GLint,
2152 0,
2153 desc.external,
2154 desc.pixel_type,
2155 None,
2156 ),
2157 }
2158
2159 if let Some(rt_info) = render_target {
2161 self.init_fbos(&mut texture, false);
2162 if rt_info.has_depth {
2163 self.init_fbos(&mut texture, true);
2164 }
2165 }
2166
2167 if texture.layer_count > 1 && !self.capabilities.supports_blit_to_texture_array {
2169 let rbo = RBOId(self.gl.gen_renderbuffers(1)[0]);
2170 let fbo = FBOId(self.gl.gen_framebuffers(1)[0]);
2171 self.gl.bind_renderbuffer(gl::RENDERBUFFER, rbo.0);
2172 self.gl.renderbuffer_storage(
2173 gl::RENDERBUFFER,
2174 self.matching_renderbuffer_format(texture.format),
2175 texture.size.width as _,
2176 texture.size.height as _
2177 );
2178
2179 self.bind_draw_target_impl(fbo);
2180 self.gl.framebuffer_renderbuffer(
2181 gl::DRAW_FRAMEBUFFER,
2182 gl::COLOR_ATTACHMENT0,
2183 gl::RENDERBUFFER,
2184 rbo.0
2185 );
2186 texture.blit_workaround_buffer = Some((rbo, fbo));
2187 }
2188
2189 record_gpu_alloc(texture.size_in_bytes());
2190
2191 texture
2192 }
2193
2194 fn set_texture_parameters(&mut self, target: gl::GLuint, filter: TextureFilter) {
2195 let mag_filter = match filter {
2196 TextureFilter::Nearest => gl::NEAREST,
2197 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2198 };
2199
2200 let min_filter = match filter {
2201 TextureFilter::Nearest => gl::NEAREST,
2202 TextureFilter::Linear => gl::LINEAR,
2203 TextureFilter::Trilinear => gl::LINEAR_MIPMAP_LINEAR,
2204 };
2205
2206 self.gl
2207 .tex_parameter_i(target, gl::TEXTURE_MAG_FILTER, mag_filter as gl::GLint);
2208 self.gl
2209 .tex_parameter_i(target, gl::TEXTURE_MIN_FILTER, min_filter as gl::GLint);
2210
2211 self.gl
2212 .tex_parameter_i(target, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as gl::GLint);
2213 self.gl
2214 .tex_parameter_i(target, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as gl::GLint);
2215 }
2216
2217 pub fn blit_renderable_texture(
2219 &mut self,
2220 dst: &mut Texture,
2221 src: &Texture,
2222 ) {
2223 debug_assert!(self.inside_frame);
2224 debug_assert!(dst.size.width >= src.size.width);
2225 debug_assert!(dst.size.height >= src.size.height);
2226 debug_assert!(dst.layer_count >= src.layer_count);
2227
2228 if self.capabilities.supports_copy_image_sub_data {
2229 assert_ne!(src.id, dst.id,
2230 "glCopyImageSubData's behaviour is undefined if src and dst images are identical and the rectangles overlap.");
2231 unsafe {
2232 self.gl.copy_image_sub_data(src.id, src.target, 0,
2233 0, 0, 0,
2234 dst.id, dst.target, 0,
2235 0, 0, 0,
2236 src.size.width as _, src.size.height as _, src.layer_count);
2237 }
2238 } else {
2239 let rect = FramebufferIntRect::new(
2240 FramebufferIntPoint::zero(),
2241 FramebufferIntSize::from_untyped(src.get_dimensions().to_untyped()),
2242 );
2243 for layer in 0..src.layer_count.min(dst.layer_count) as LayerIndex {
2244 self.blit_render_target(
2245 ReadTarget::from_texture(src, layer),
2246 rect,
2247 DrawTarget::from_texture(dst, layer, false),
2248 rect,
2249 TextureFilter::Linear
2250 );
2251 }
2252 self.reset_draw_target();
2253 self.reset_read_target();
2254 }
2255 }
2256
2257 pub fn invalidate_render_target(&mut self, texture: &Texture) {
2263 let (fbos, attachments) = if texture.supports_depth() {
2264 (&texture.fbos_with_depth,
2265 &[gl::COLOR_ATTACHMENT0, gl::DEPTH_ATTACHMENT] as &[gl::GLenum])
2266 } else {
2267 (&texture.fbos, &[gl::COLOR_ATTACHMENT0] as &[gl::GLenum])
2268 };
2269
2270 let original_bound_fbo = self.bound_draw_fbo;
2271 for fbo_id in fbos.iter() {
2272 self.bind_external_draw_target(*fbo_id);
2276 self.gl.invalidate_framebuffer(gl::FRAMEBUFFER, attachments);
2277 }
2278 self.bind_external_draw_target(original_bound_fbo);
2279 }
2280
2281 pub fn reuse_render_target<T: Texel>(
2285 &mut self,
2286 texture: &mut Texture,
2287 rt_info: RenderTargetInfo,
2288 ) {
2289 texture.last_frame_used = self.frame_id;
2290
2291 if rt_info.has_depth && !texture.supports_depth() {
2293 self.init_fbos(texture, true);
2294 }
2295 }
2296
2297 fn init_fbos(&mut self, texture: &mut Texture, with_depth: bool) {
2298 let (fbos, depth_rb) = if with_depth {
2299 let depth_target = self.acquire_depth_target(texture.get_dimensions());
2300 (&mut texture.fbos_with_depth, Some(depth_target))
2301 } else {
2302 (&mut texture.fbos, None)
2303 };
2304
2305 assert!(fbos.is_empty());
2307 fbos.extend(self.gl.gen_framebuffers(texture.layer_count).into_iter().map(FBOId));
2308
2309 let original_bound_fbo = self.bound_draw_fbo;
2311 for (fbo_index, &fbo_id) in fbos.iter().enumerate() {
2312 self.bind_external_draw_target(fbo_id);
2313 match texture.target {
2314 gl::TEXTURE_2D_ARRAY => {
2315 self.gl.framebuffer_texture_layer(
2316 gl::DRAW_FRAMEBUFFER,
2317 gl::COLOR_ATTACHMENT0,
2318 texture.id,
2319 0,
2320 fbo_index as _,
2321 )
2322 }
2323 _ => {
2324 assert_eq!(fbo_index, 0);
2325 self.gl.framebuffer_texture_2d(
2326 gl::DRAW_FRAMEBUFFER,
2327 gl::COLOR_ATTACHMENT0,
2328 texture.target,
2329 texture.id,
2330 0,
2331 )
2332 }
2333 }
2334
2335 if let Some(depth_rb) = depth_rb {
2336 self.gl.framebuffer_renderbuffer(
2337 gl::DRAW_FRAMEBUFFER,
2338 gl::DEPTH_ATTACHMENT,
2339 gl::RENDERBUFFER,
2340 depth_rb.0,
2341 );
2342 }
2343
2344 debug_assert_eq!(
2345 self.gl.check_frame_buffer_status(gl::DRAW_FRAMEBUFFER),
2346 gl::FRAMEBUFFER_COMPLETE,
2347 "Incomplete framebuffer",
2348 );
2349 }
2350 self.bind_external_draw_target(original_bound_fbo);
2351 }
2352
2353 fn deinit_fbos(&mut self, fbos: &mut Vec<FBOId>) {
2354 if !fbos.is_empty() {
2355 let fbo_ids: SmallVec<[gl::GLuint; 8]> = fbos
2356 .drain(..)
2357 .map(|FBOId(fbo_id)| fbo_id)
2358 .collect();
2359 self.gl.delete_framebuffers(&fbo_ids[..]);
2360 }
2361 }
2362
2363 fn acquire_depth_target(&mut self, dimensions: DeviceIntSize) -> RBOId {
2364 let gl = &self.gl;
2365 let target = self.depth_targets.entry(dimensions).or_insert_with(|| {
2366 let renderbuffer_ids = gl.gen_renderbuffers(1);
2367 let depth_rb = renderbuffer_ids[0];
2368 gl.bind_renderbuffer(gl::RENDERBUFFER, depth_rb);
2369 gl.renderbuffer_storage(
2370 gl::RENDERBUFFER,
2371 gl::DEPTH_COMPONENT24,
2372 dimensions.width as _,
2373 dimensions.height as _,
2374 );
2375 SharedDepthTarget {
2376 rbo_id: RBOId(depth_rb),
2377 refcount: 0,
2378 }
2379 });
2380 if target.refcount == 0 {
2381 record_gpu_alloc(depth_target_size_in_bytes(&dimensions));
2382 }
2383 target.refcount += 1;
2384 target.rbo_id
2385 }
2386
2387 fn release_depth_target(&mut self, dimensions: DeviceIntSize) {
2388 let mut entry = match self.depth_targets.entry(dimensions) {
2389 Entry::Occupied(x) => x,
2390 Entry::Vacant(..) => panic!("Releasing unknown depth target"),
2391 };
2392 debug_assert!(entry.get().refcount != 0);
2393 entry.get_mut().refcount -= 1;
2394 if entry.get().refcount == 0 {
2395 let (dimensions, target) = entry.remove_entry();
2396 self.gl.delete_renderbuffers(&[target.rbo_id.0]);
2397 record_gpu_free(depth_target_size_in_bytes(&dimensions));
2398 }
2399 }
2400
2401 fn blit_render_target_impl(
2403 &mut self,
2404 src_rect: FramebufferIntRect,
2405 dest_rect: FramebufferIntRect,
2406 filter: TextureFilter,
2407 ) {
2408 debug_assert!(self.inside_frame);
2409
2410 let filter = match filter {
2411 TextureFilter::Nearest => gl::NEAREST,
2412 TextureFilter::Linear | TextureFilter::Trilinear => gl::LINEAR,
2413 };
2414
2415 self.gl.blit_framebuffer(
2416 src_rect.origin.x,
2417 src_rect.origin.y,
2418 src_rect.origin.x + src_rect.size.width,
2419 src_rect.origin.y + src_rect.size.height,
2420 dest_rect.origin.x,
2421 dest_rect.origin.y,
2422 dest_rect.origin.x + dest_rect.size.width,
2423 dest_rect.origin.y + dest_rect.size.height,
2424 gl::COLOR_BUFFER_BIT,
2425 filter,
2426 );
2427 }
2428
2429 pub fn blit_render_target(
2432 &mut self,
2433 src_target: ReadTarget,
2434 src_rect: FramebufferIntRect,
2435 dest_target: DrawTarget,
2436 dest_rect: FramebufferIntRect,
2437 filter: TextureFilter,
2438 ) {
2439 debug_assert!(self.inside_frame);
2440
2441 self.bind_read_target(src_target);
2442
2443 match dest_target {
2444 DrawTarget::Texture { layer, blit_workaround_buffer, dimensions, id, target, .. } if layer != 0 &&
2445 !self.capabilities.supports_blit_to_texture_array =>
2446 {
2447 let (_rbo, fbo) = blit_workaround_buffer.expect("Blit workaround buffer has not been initialized.");
2449
2450 self.bind_draw_target_impl(fbo);
2452 self.blit_render_target_impl(
2453 src_rect,
2454 dest_rect,
2455 filter
2456 );
2457
2458 let dest_bounds = DeviceIntRect::new(
2463 DeviceIntPoint::new(
2464 dest_rect.min_x().min(dest_rect.max_x()),
2465 dest_rect.min_y().min(dest_rect.max_y()),
2466 ),
2467 DeviceIntSize::new(
2468 dest_rect.size.width.abs(),
2469 dest_rect.size.height.abs(),
2470 ),
2471 ).intersection(&dimensions.into()).unwrap_or_else(DeviceIntRect::zero);
2472
2473 self.bind_read_target_impl(fbo);
2474 self.bind_texture_impl(
2475 DEFAULT_TEXTURE,
2476 id,
2477 target,
2478 None, );
2480
2481 self.gl.copy_tex_sub_image_3d(
2483 target, 0,
2484 dest_bounds.origin.x, dest_bounds.origin.y,
2485 layer as _,
2486 dest_bounds.origin.x, dest_bounds.origin.y,
2487 dest_bounds.size.width, dest_bounds.size.height,
2488 );
2489
2490 }
2491 _ => {
2492 self.bind_draw_target(dest_target);
2493
2494 self.blit_render_target_impl(src_rect, dest_rect, filter);
2495 }
2496 }
2497 }
2498
2499 pub fn blit_render_target_invert_y(
2503 &mut self,
2504 src_target: ReadTarget,
2505 src_rect: FramebufferIntRect,
2506 dest_target: DrawTarget,
2507 dest_rect: FramebufferIntRect,
2508 ) {
2509 debug_assert!(self.inside_frame);
2510
2511 let mut inverted_dest_rect = dest_rect;
2512 inverted_dest_rect.origin.y = dest_rect.max_y();
2513 inverted_dest_rect.size.height *= -1;
2514
2515 self.blit_render_target(
2516 src_target,
2517 src_rect,
2518 dest_target,
2519 inverted_dest_rect,
2520 TextureFilter::Linear,
2521 );
2522 }
2523
2524 pub fn delete_texture(&mut self, mut texture: Texture) {
2525 debug_assert!(self.inside_frame);
2526 record_gpu_free(texture.size_in_bytes());
2527 let had_depth = texture.supports_depth();
2528 self.deinit_fbos(&mut texture.fbos);
2529 self.deinit_fbos(&mut texture.fbos_with_depth);
2530 if had_depth {
2531 self.release_depth_target(texture.get_dimensions());
2532 }
2533 if let Some((rbo, fbo)) = texture.blit_workaround_buffer {
2534 self.gl.delete_framebuffers(&[fbo.0]);
2535 self.gl.delete_renderbuffers(&[rbo.0]);
2536 }
2537
2538 self.gl.delete_textures(&[texture.id]);
2539
2540 for bound_texture in &mut self.bound_textures {
2541 if *bound_texture == texture.id {
2542 *bound_texture = 0;
2543 }
2544 }
2545
2546 texture.id = 0;
2548 }
2549
2550 #[cfg(feature = "replay")]
2551 pub fn delete_external_texture(&mut self, mut external: ExternalTexture) {
2552 self.gl.delete_textures(&[external.id]);
2553 external.id = 0;
2554 }
2555
2556 pub fn delete_program(&mut self, mut program: Program) {
2557 self.gl.delete_program(program.id);
2558 program.id = 0;
2559 }
2560
2561 pub fn create_program_linked(
2563 &mut self,
2564 base_filename: &'static str,
2565 features: String,
2566 descriptor: &VertexDescriptor,
2567 ) -> Result<Program, ShaderError> {
2568 let mut program = self.create_program(base_filename, features)?;
2569 self.link_program(&mut program, descriptor)?;
2570 Ok(program)
2571 }
2572
2573 pub fn create_program(
2579 &mut self,
2580 base_filename: &'static str,
2581 features: String,
2582 ) -> Result<Program, ShaderError> {
2583 debug_assert!(self.inside_frame);
2584
2585 let source_info = ProgramSourceInfo::new(self, base_filename, features);
2586
2587 let pid = self.gl.create_program();
2589
2590 if let Some(ref cached_programs) = self.cached_programs {
2592 if let Some(entry) = cached_programs.entries.borrow().get(&source_info.digest) {
2593 self.gl.program_binary(pid, entry.binary.format, &entry.binary.bytes);
2594 }
2595 }
2596
2597 let program = Program {
2599 id: pid,
2600 u_transform: 0,
2601 u_mode: 0,
2602 source_info,
2603 is_initialized: false,
2604 };
2605
2606 Ok(program)
2607 }
2608
2609 fn build_shader_string<F: FnMut(&str)>(
2610 &self,
2611 features: &str,
2612 kind: &str,
2613 base_filename: &str,
2614 output: F,
2615 ) {
2616 do_build_shader_string(
2617 get_shader_version(&*self.gl),
2618 features,
2619 kind,
2620 base_filename,
2621 self.resource_override_path.as_ref(),
2622 output,
2623 )
2624 }
2625
2626 pub fn bind_shader_samplers<S>(&mut self, program: &Program, bindings: &[(&'static str, S)])
2627 where
2628 S: Into<TextureSlot> + Copy,
2629 {
2630 assert_eq!(self.bound_program, program.id);
2632
2633 for binding in bindings {
2634 let u_location = self.gl.get_uniform_location(program.id, binding.0);
2635 if u_location != -1 {
2636 self.bind_program(program);
2637 self.gl
2638 .uniform_1i(u_location, binding.1.into().0 as gl::GLint);
2639 }
2640 }
2641 }
2642
2643 pub fn get_uniform_location(&self, program: &Program, name: &str) -> UniformLocation {
2644 UniformLocation(self.gl.get_uniform_location(program.id, name))
2645 }
2646
2647 pub fn set_uniforms(
2648 &self,
2649 program: &Program,
2650 transform: &Transform3D<f32>,
2651 ) {
2652 debug_assert!(self.inside_frame);
2653 #[cfg(debug_assertions)]
2654 debug_assert!(self.shader_is_ready);
2655
2656 self.gl
2657 .uniform_matrix_4fv(program.u_transform, false, &transform.to_row_major_array());
2658 }
2659
2660 pub fn switch_mode(&self, mode: i32) {
2661 debug_assert!(self.inside_frame);
2662 #[cfg(debug_assertions)]
2663 debug_assert!(self.shader_is_ready);
2664
2665 self.gl.uniform_1i(self.program_mode_id.0, mode);
2666 }
2667
2668 pub fn create_pbo(&mut self) -> PBO {
2669 let id = self.gl.gen_buffers(1)[0];
2670 PBO {
2671 id,
2672 reserved_size: 0,
2673 }
2674 }
2675
2676 pub fn create_pbo_with_size(&mut self, size: usize) -> PBO {
2677 let mut pbo = self.create_pbo();
2678
2679 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
2680 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
2681 self.gl.buffer_data_untyped(
2682 gl::PIXEL_PACK_BUFFER,
2683 size as _,
2684 ptr::null(),
2685 gl::STREAM_READ,
2686 );
2687 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2688
2689 pbo.reserved_size = size;
2690 pbo
2691 }
2692
2693 pub fn read_pixels_into_pbo(
2694 &mut self,
2695 read_target: ReadTarget,
2696 rect: DeviceIntRect,
2697 format: ImageFormat,
2698 pbo: &PBO,
2699 ) {
2700 let byte_size = rect.size.area() as usize * format.bytes_per_pixel() as usize;
2701
2702 assert!(byte_size <= pbo.reserved_size);
2703
2704 self.bind_read_target(read_target);
2705
2706 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
2707 self.gl.pixel_store_i(gl::PACK_ALIGNMENT, 1);
2708
2709 let gl_format = self.gl_describe_format(format);
2710
2711 unsafe {
2712 self.gl.read_pixels_into_pbo(
2713 rect.origin.x as _,
2714 rect.origin.y as _,
2715 rect.size.width as _,
2716 rect.size.height as _,
2717 gl_format.read,
2718 gl_format.pixel_type,
2719 );
2720 }
2721
2722 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, 0);
2723 }
2724
2725 pub fn map_pbo_for_readback<'a>(&'a mut self, pbo: &'a PBO) -> Option<BoundPBO<'a>> {
2726 self.gl.bind_buffer(gl::PIXEL_PACK_BUFFER, pbo.id);
2727
2728 let buf_ptr = match self.gl.get_type() {
2729 gl::GlType::Gl => {
2730 self.gl.map_buffer(gl::PIXEL_PACK_BUFFER, gl::READ_ONLY)
2731 }
2732
2733 gl::GlType::Gles => {
2734 self.gl.map_buffer_range(
2735 gl::PIXEL_PACK_BUFFER,
2736 0,
2737 pbo.reserved_size as _,
2738 gl::MAP_READ_BIT)
2739 }
2740 };
2741
2742 if buf_ptr.is_null() {
2743 return None;
2744 }
2745
2746 let buffer = unsafe { slice::from_raw_parts(buf_ptr as *const u8, pbo.reserved_size) };
2747
2748 Some(BoundPBO {
2749 device: self,
2750 data: buffer,
2751 })
2752 }
2753
2754 pub fn delete_pbo(&mut self, mut pbo: PBO) {
2755 self.gl.delete_buffers(&[pbo.id]);
2756 pbo.id = 0;
2757 pbo.reserved_size = 0
2758 }
2759
2760 pub fn required_upload_size_and_stride(&self, size: DeviceIntSize, format: ImageFormat) -> (usize, usize) {
2763 assert!(size.width >= 0);
2764 assert!(size.height >= 0);
2765
2766 let bytes_pp = format.bytes_per_pixel() as usize;
2767 let width_bytes = size.width as usize * bytes_pp;
2768
2769 let dst_stride = round_up_to_multiple(width_bytes, self.optimal_pbo_stride);
2770
2771 let dst_size = dst_stride * size.height as usize;
2777
2778 (dst_size, dst_stride)
2779 }
2780
2781 fn create_upload_buffer<'a>(&mut self, hint: VertexUsageHint, size: usize) -> Result<PixelBuffer<'a>, ()> {
2786 self.gl.buffer_data_untyped(
2787 gl::PIXEL_UNPACK_BUFFER,
2788 size as _,
2789 ptr::null(),
2790 hint.to_gl(),
2791 );
2792 let ptr = self.gl.map_buffer_range(
2793 gl::PIXEL_UNPACK_BUFFER,
2794 0,
2795 size as _,
2796 gl::MAP_WRITE_BIT | gl::MAP_INVALIDATE_BUFFER_BIT,
2797 );
2798
2799 if ptr != ptr::null_mut() {
2800 let mapping = unsafe {
2801 slice::from_raw_parts_mut(ptr as *mut _, size)
2802 };
2803 Ok(PixelBuffer::new(size, mapping))
2804 } else {
2805 error!("Failed to map PBO of size {} bytes", size);
2806 Err(())
2807 }
2808 }
2809
2810 pub fn upload_texture<'a, T>(
2815 &'a mut self,
2816 texture: &'a Texture,
2817 pbo: &PBO,
2818 upload_size: usize,
2819 ) -> TextureUploader<'a, T> {
2820 debug_assert!(self.inside_frame);
2821 assert_ne!(upload_size, 0, "Must specify valid upload size");
2822
2823 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
2824
2825 let uploader_type = match self.upload_method {
2826 UploadMethod::Immediate => TextureUploaderType::Immediate,
2827 UploadMethod::PixelBuffer(hint) => {
2828 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, pbo.id);
2829 if self.capabilities.supports_nonzero_pbo_offsets {
2830 match self.create_upload_buffer(hint, upload_size) {
2831 Ok(buffer) => TextureUploaderType::MutliUseBuffer(buffer),
2832 Err(_) => {
2833 self.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
2835 TextureUploaderType::Immediate
2836 }
2837 }
2838 } else {
2839 TextureUploaderType::SingleUseBuffers(hint)
2842 }
2843 },
2844 };
2845
2846 TextureUploader {
2847 target: UploadTarget {
2848 device: self,
2849 texture,
2850 },
2851 uploader_type,
2852 marker: PhantomData,
2853 }
2854 }
2855
2856 pub fn upload_texture_immediate<T: Texel>(
2858 &mut self,
2859 texture: &Texture,
2860 pixels: &[T]
2861 ) {
2862 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
2863 let desc = self.gl_describe_format(texture.format);
2864 match texture.target {
2865 gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES =>
2866 self.gl.tex_sub_image_2d(
2867 texture.target,
2868 0,
2869 0,
2870 0,
2871 texture.size.width as gl::GLint,
2872 texture.size.height as gl::GLint,
2873 desc.external,
2874 desc.pixel_type,
2875 texels_to_u8_slice(pixels),
2876 ),
2877 gl::TEXTURE_2D_ARRAY =>
2878 self.gl.tex_sub_image_3d(
2879 texture.target,
2880 0,
2881 0,
2882 0,
2883 0,
2884 texture.size.width as gl::GLint,
2885 texture.size.height as gl::GLint,
2886 texture.layer_count as gl::GLint,
2887 desc.external,
2888 desc.pixel_type,
2889 texels_to_u8_slice(pixels),
2890 ),
2891 _ => panic!("BUG: Unexpected texture target!"),
2892 }
2893 }
2894
2895 pub fn read_pixels(&mut self, img_desc: &ImageDescriptor) -> Vec<u8> {
2896 let desc = self.gl_describe_format(img_desc.format);
2897 self.gl.read_pixels(
2898 0, 0,
2899 img_desc.size.width as i32,
2900 img_desc.size.height as i32,
2901 desc.read,
2902 desc.pixel_type,
2903 )
2904 }
2905
2906 pub fn read_pixels_into(
2908 &mut self,
2909 rect: FramebufferIntRect,
2910 format: ImageFormat,
2911 output: &mut [u8],
2912 ) {
2913 let bytes_per_pixel = format.bytes_per_pixel();
2914 let desc = self.gl_describe_format(format);
2915 let size_in_bytes = (bytes_per_pixel * rect.size.width * rect.size.height) as usize;
2916 assert_eq!(output.len(), size_in_bytes);
2917
2918 self.gl.flush();
2919 self.gl.read_pixels_into_buffer(
2920 rect.origin.x as _,
2921 rect.origin.y as _,
2922 rect.size.width as _,
2923 rect.size.height as _,
2924 desc.read,
2925 desc.pixel_type,
2926 output,
2927 );
2928 }
2929
2930 pub fn get_tex_image_into(
2932 &mut self,
2933 texture: &Texture,
2934 format: ImageFormat,
2935 output: &mut [u8],
2936 ) {
2937 self.bind_texture(DEFAULT_TEXTURE, texture, Swizzle::default());
2938 let desc = self.gl_describe_format(format);
2939 self.gl.get_tex_image_into_buffer(
2940 texture.target,
2941 0,
2942 desc.external,
2943 desc.pixel_type,
2944 output,
2945 );
2946 }
2947
2948 fn attach_read_texture_raw(
2950 &mut self, texture_id: gl::GLuint, target: gl::GLuint, layer_id: i32
2951 ) {
2952 match target {
2953 gl::TEXTURE_2D_ARRAY => {
2954 self.gl.framebuffer_texture_layer(
2955 gl::READ_FRAMEBUFFER,
2956 gl::COLOR_ATTACHMENT0,
2957 texture_id,
2958 0,
2959 layer_id,
2960 )
2961 }
2962 _ => {
2963 assert_eq!(layer_id, 0);
2964 self.gl.framebuffer_texture_2d(
2965 gl::READ_FRAMEBUFFER,
2966 gl::COLOR_ATTACHMENT0,
2967 target,
2968 texture_id,
2969 0,
2970 )
2971 }
2972 }
2973 }
2974
2975 pub fn attach_read_texture_external(
2976 &mut self, texture_id: gl::GLuint, target: TextureTarget, layer_id: i32
2977 ) {
2978 self.attach_read_texture_raw(texture_id, get_gl_target(target), layer_id)
2979 }
2980
2981 pub fn attach_read_texture(&mut self, texture: &Texture, layer_id: i32) {
2982 self.attach_read_texture_raw(texture.id, texture.target, layer_id)
2983 }
2984
2985 fn bind_vao_impl(&mut self, id: gl::GLuint) {
2986 debug_assert!(self.inside_frame);
2987
2988 if self.bound_vao != id {
2989 self.bound_vao = id;
2990 self.gl.bind_vertex_array(id);
2991 }
2992 }
2993
2994 pub fn bind_vao(&mut self, vao: &VAO) {
2995 self.bind_vao_impl(vao.id)
2996 }
2997
2998 pub fn bind_custom_vao(&mut self, vao: &CustomVAO) {
2999 self.bind_vao_impl(vao.id)
3000 }
3001
3002 fn create_vao_with_vbos(
3003 &mut self,
3004 descriptor: &VertexDescriptor,
3005 main_vbo_id: VBOId,
3006 instance_vbo_id: VBOId,
3007 ibo_id: IBOId,
3008 owns_vertices_and_indices: bool,
3009 ) -> VAO {
3010 let instance_stride = descriptor.instance_stride() as usize;
3011 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3012
3013 self.bind_vao_impl(vao_id);
3014
3015 descriptor.bind(self.gl(), main_vbo_id, instance_vbo_id);
3016 ibo_id.bind(self.gl()); VAO {
3019 id: vao_id,
3020 ibo_id,
3021 main_vbo_id,
3022 instance_vbo_id,
3023 instance_stride,
3024 owns_vertices_and_indices,
3025 }
3026 }
3027
3028 pub fn create_custom_vao(
3029 &mut self,
3030 streams: &[Stream],
3031 ) -> CustomVAO {
3032 debug_assert!(self.inside_frame);
3033
3034 let vao_id = self.gl.gen_vertex_arrays(1)[0];
3035 self.bind_vao_impl(vao_id);
3036
3037 let mut attrib_index = 0;
3038 for stream in streams {
3039 VertexDescriptor::bind_attributes(
3040 stream.attributes,
3041 attrib_index,
3042 0,
3043 self.gl(),
3044 stream.vbo,
3045 );
3046 attrib_index += stream.attributes.len();
3047 }
3048
3049 CustomVAO {
3050 id: vao_id,
3051 }
3052 }
3053
3054 pub fn delete_custom_vao(&mut self, mut vao: CustomVAO) {
3055 self.gl.delete_vertex_arrays(&[vao.id]);
3056 vao.id = 0;
3057 }
3058
3059 pub fn create_vbo<T>(&mut self) -> VBO<T> {
3060 let ids = self.gl.gen_buffers(1);
3061 VBO {
3062 id: ids[0],
3063 target: gl::ARRAY_BUFFER,
3064 allocated_count: 0,
3065 marker: PhantomData,
3066 }
3067 }
3068
3069 pub fn delete_vbo<T>(&mut self, mut vbo: VBO<T>) {
3070 self.gl.delete_buffers(&[vbo.id]);
3071 vbo.id = 0;
3072 }
3073
3074 pub fn create_vao(&mut self, descriptor: &VertexDescriptor) -> VAO {
3075 debug_assert!(self.inside_frame);
3076
3077 let buffer_ids = self.gl.gen_buffers(3);
3078 let ibo_id = IBOId(buffer_ids[0]);
3079 let main_vbo_id = VBOId(buffer_ids[1]);
3080 let intance_vbo_id = VBOId(buffer_ids[2]);
3081
3082 self.create_vao_with_vbos(descriptor, main_vbo_id, intance_vbo_id, ibo_id, true)
3083 }
3084
3085 pub fn delete_vao(&mut self, mut vao: VAO) {
3086 self.gl.delete_vertex_arrays(&[vao.id]);
3087 vao.id = 0;
3088
3089 if vao.owns_vertices_and_indices {
3090 self.gl.delete_buffers(&[vao.ibo_id.0]);
3091 self.gl.delete_buffers(&[vao.main_vbo_id.0]);
3092 }
3093
3094 self.gl.delete_buffers(&[vao.instance_vbo_id.0])
3095 }
3096
3097 pub fn allocate_vbo<V>(
3098 &mut self,
3099 vbo: &mut VBO<V>,
3100 count: usize,
3101 usage_hint: VertexUsageHint,
3102 ) {
3103 debug_assert!(self.inside_frame);
3104 vbo.allocated_count = count;
3105
3106 self.gl.bind_buffer(vbo.target, vbo.id);
3107 self.gl.buffer_data_untyped(
3108 vbo.target,
3109 (count * mem::size_of::<V>()) as _,
3110 ptr::null(),
3111 usage_hint.to_gl(),
3112 );
3113 }
3114
3115 pub fn fill_vbo<V>(
3116 &mut self,
3117 vbo: &VBO<V>,
3118 data: &[V],
3119 offset: usize,
3120 ) {
3121 debug_assert!(self.inside_frame);
3122 assert!(offset + data.len() <= vbo.allocated_count);
3123 let stride = mem::size_of::<V>();
3124
3125 self.gl.bind_buffer(vbo.target, vbo.id);
3126 self.gl.buffer_sub_data_untyped(
3127 vbo.target,
3128 (offset * stride) as _,
3129 (data.len() * stride) as _,
3130 data.as_ptr() as _,
3131 );
3132 }
3133
3134 fn update_vbo_data<V>(
3135 &mut self,
3136 vbo: VBOId,
3137 vertices: &[V],
3138 usage_hint: VertexUsageHint,
3139 ) {
3140 debug_assert!(self.inside_frame);
3141
3142 vbo.bind(self.gl());
3143 gl::buffer_data(self.gl(), gl::ARRAY_BUFFER, vertices, usage_hint.to_gl());
3144 }
3145
3146 pub fn create_vao_with_new_instances(
3147 &mut self,
3148 descriptor: &VertexDescriptor,
3149 base_vao: &VAO,
3150 ) -> VAO {
3151 debug_assert!(self.inside_frame);
3152
3153 let buffer_ids = self.gl.gen_buffers(1);
3154 let intance_vbo_id = VBOId(buffer_ids[0]);
3155
3156 self.create_vao_with_vbos(
3157 descriptor,
3158 base_vao.main_vbo_id,
3159 intance_vbo_id,
3160 base_vao.ibo_id,
3161 false,
3162 )
3163 }
3164
3165 pub fn update_vao_main_vertices<V>(
3166 &mut self,
3167 vao: &VAO,
3168 vertices: &[V],
3169 usage_hint: VertexUsageHint,
3170 ) {
3171 debug_assert_eq!(self.bound_vao, vao.id);
3172 self.update_vbo_data(vao.main_vbo_id, vertices, usage_hint)
3173 }
3174
3175 pub fn update_vao_instances<V>(
3176 &mut self,
3177 vao: &VAO,
3178 instances: &[V],
3179 usage_hint: VertexUsageHint,
3180 ) {
3181 debug_assert_eq!(self.bound_vao, vao.id);
3182 debug_assert_eq!(vao.instance_stride as usize, mem::size_of::<V>());
3183
3184 self.update_vbo_data(vao.instance_vbo_id, instances, usage_hint)
3185 }
3186
3187 pub fn update_vao_indices<I>(&mut self, vao: &VAO, indices: &[I], usage_hint: VertexUsageHint) {
3188 debug_assert!(self.inside_frame);
3189 debug_assert_eq!(self.bound_vao, vao.id);
3190
3191 vao.ibo_id.bind(self.gl());
3192 gl::buffer_data(
3193 self.gl(),
3194 gl::ELEMENT_ARRAY_BUFFER,
3195 indices,
3196 usage_hint.to_gl(),
3197 );
3198 }
3199
3200 pub fn draw_triangles_u16(&mut self, first_vertex: i32, index_count: i32) {
3201 debug_assert!(self.inside_frame);
3202 #[cfg(debug_assertions)]
3203 debug_assert!(self.shader_is_ready);
3204
3205 self.gl.draw_elements(
3206 gl::TRIANGLES,
3207 index_count,
3208 gl::UNSIGNED_SHORT,
3209 first_vertex as u32 * 2,
3210 );
3211 }
3212
3213 pub fn draw_triangles_u32(&mut self, first_vertex: i32, index_count: i32) {
3214 debug_assert!(self.inside_frame);
3215 #[cfg(debug_assertions)]
3216 debug_assert!(self.shader_is_ready);
3217
3218 self.gl.draw_elements(
3219 gl::TRIANGLES,
3220 index_count,
3221 gl::UNSIGNED_INT,
3222 first_vertex as u32 * 4,
3223 );
3224 }
3225
3226 pub fn draw_nonindexed_points(&mut self, first_vertex: i32, vertex_count: i32) {
3227 debug_assert!(self.inside_frame);
3228 #[cfg(debug_assertions)]
3229 debug_assert!(self.shader_is_ready);
3230
3231 self.gl.draw_arrays(gl::POINTS, first_vertex, vertex_count);
3232 }
3233
3234 pub fn draw_nonindexed_lines(&mut self, first_vertex: i32, vertex_count: i32) {
3235 debug_assert!(self.inside_frame);
3236 #[cfg(debug_assertions)]
3237 debug_assert!(self.shader_is_ready);
3238
3239 self.gl.draw_arrays(gl::LINES, first_vertex, vertex_count);
3240 }
3241
3242 pub fn draw_indexed_triangles_instanced_u16(&mut self, index_count: i32, instance_count: i32) {
3243 debug_assert!(self.inside_frame);
3244 #[cfg(debug_assertions)]
3245 debug_assert!(self.shader_is_ready);
3246
3247 self.gl.draw_elements_instanced(
3248 gl::TRIANGLES,
3249 index_count,
3250 gl::UNSIGNED_SHORT,
3251 0,
3252 instance_count,
3253 );
3254 }
3255
3256 pub fn end_frame(&mut self) {
3257 self.reset_draw_target();
3258 self.reset_read_target();
3259
3260 debug_assert!(self.inside_frame);
3261 self.inside_frame = false;
3262
3263 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3264 self.gl.use_program(0);
3265
3266 for i in 0 .. self.bound_textures.len() {
3267 self.gl.active_texture(gl::TEXTURE0 + i as gl::GLuint);
3268 self.gl.bind_texture(gl::TEXTURE_2D, 0);
3269 }
3270
3271 self.gl.active_texture(gl::TEXTURE0);
3272
3273 self.frame_id.0 += 1;
3274
3275 if self.frame_id.0 == 10 {
3279 if let Some(ref cache) = self.cached_programs {
3280 cache.startup_complete();
3281 }
3282 }
3283 }
3284
3285 pub fn clear_target(
3286 &self,
3287 color: Option<[f32; 4]>,
3288 depth: Option<f32>,
3289 rect: Option<FramebufferIntRect>,
3290 ) {
3291 let mut clear_bits = 0;
3292
3293 if let Some(color) = color {
3294 self.gl.clear_color(color[0], color[1], color[2], color[3]);
3295 clear_bits |= gl::COLOR_BUFFER_BIT;
3296 }
3297
3298 if let Some(depth) = depth {
3299 if cfg!(debug_assertions) {
3300 let mut mask = [0];
3301 unsafe {
3302 self.gl.get_boolean_v(gl::DEPTH_WRITEMASK, &mut mask);
3303 }
3304 assert_ne!(mask[0], 0);
3305 }
3306 self.gl.clear_depth(depth as f64);
3307 clear_bits |= gl::DEPTH_BUFFER_BIT;
3308 }
3309
3310 if clear_bits != 0 {
3311 match rect {
3312 Some(rect) => {
3313 self.gl.enable(gl::SCISSOR_TEST);
3314 self.gl.scissor(
3315 rect.origin.x,
3316 rect.origin.y,
3317 rect.size.width,
3318 rect.size.height,
3319 );
3320 self.gl.clear(clear_bits);
3321 self.gl.disable(gl::SCISSOR_TEST);
3322 }
3323 None => {
3324 self.gl.clear(clear_bits);
3325 }
3326 }
3327 }
3328 }
3329
3330 pub fn enable_depth(&self) {
3331 assert!(self.depth_available, "Enabling depth test without depth target");
3332 self.gl.enable(gl::DEPTH_TEST);
3333 }
3334
3335 pub fn disable_depth(&self) {
3336 self.gl.disable(gl::DEPTH_TEST);
3337 }
3338
3339 pub fn set_depth_func(&self, depth_func: DepthFunction) {
3340 self.gl.depth_func(depth_func as gl::GLuint);
3341 }
3342
3343 pub fn enable_depth_write(&self) {
3344 assert!(self.depth_available, "Enabling depth write without depth target");
3345 self.gl.depth_mask(true);
3346 }
3347
3348 pub fn disable_depth_write(&self) {
3349 self.gl.depth_mask(false);
3350 }
3351
3352 pub fn disable_stencil(&self) {
3353 self.gl.disable(gl::STENCIL_TEST);
3354 }
3355
3356 pub fn set_scissor_rect(&self, rect: FramebufferIntRect) {
3357 self.gl.scissor(
3358 rect.origin.x,
3359 rect.origin.y,
3360 rect.size.width,
3361 rect.size.height,
3362 );
3363 }
3364
3365 pub fn enable_scissor(&self) {
3366 self.gl.enable(gl::SCISSOR_TEST);
3367 }
3368
3369 pub fn disable_scissor(&self) {
3370 self.gl.disable(gl::SCISSOR_TEST);
3371 }
3372
3373 pub fn set_blend(&mut self, enable: bool) {
3374 if enable {
3375 self.gl.enable(gl::BLEND);
3376 } else {
3377 self.gl.disable(gl::BLEND);
3378 }
3379 #[cfg(debug_assertions)]
3380 {
3381 self.shader_is_ready = false;
3382 }
3383 }
3384
3385 fn set_blend_factors(
3386 &mut self,
3387 color: (gl::GLenum, gl::GLenum),
3388 alpha: (gl::GLenum, gl::GLenum),
3389 ) {
3390 self.gl.blend_equation(gl::FUNC_ADD);
3391 if color == alpha {
3392 self.gl.blend_func(color.0, color.1);
3393 } else {
3394 self.gl.blend_func_separate(color.0, color.1, alpha.0, alpha.1);
3395 }
3396 #[cfg(debug_assertions)]
3397 {
3398 self.shader_is_ready = false;
3399 }
3400 }
3401
3402 pub fn set_blend_mode_alpha(&mut self) {
3403 self.set_blend_factors(
3404 (gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA),
3405 (gl::ONE, gl::ONE),
3406 );
3407 }
3408
3409 pub fn set_blend_mode_premultiplied_alpha(&mut self) {
3410 self.set_blend_factors(
3411 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3412 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3413 );
3414 }
3415
3416 pub fn set_blend_mode_premultiplied_dest_out(&mut self) {
3417 self.set_blend_factors(
3418 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3419 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3420 );
3421 }
3422
3423 pub fn set_blend_mode_multiply(&mut self) {
3424 self.set_blend_factors(
3425 (gl::ZERO, gl::SRC_COLOR),
3426 (gl::ZERO, gl::SRC_ALPHA),
3427 );
3428 }
3429 pub fn set_blend_mode_subpixel_pass0(&mut self) {
3430 self.set_blend_factors(
3431 (gl::ZERO, gl::ONE_MINUS_SRC_COLOR),
3432 (gl::ZERO, gl::ONE_MINUS_SRC_ALPHA),
3433 );
3434 }
3435 pub fn set_blend_mode_subpixel_pass1(&mut self) {
3436 self.set_blend_factors(
3437 (gl::ONE, gl::ONE),
3438 (gl::ONE, gl::ONE),
3439 );
3440 }
3441 pub fn set_blend_mode_subpixel_with_bg_color_pass0(&mut self) {
3442 self.set_blend_factors(
3443 (gl::ZERO, gl::ONE_MINUS_SRC_COLOR),
3444 (gl::ZERO, gl::ONE),
3445 );
3446 }
3447 pub fn set_blend_mode_subpixel_with_bg_color_pass1(&mut self) {
3448 self.set_blend_factors(
3449 (gl::ONE_MINUS_DST_ALPHA, gl::ONE),
3450 (gl::ZERO, gl::ONE),
3451 );
3452 }
3453 pub fn set_blend_mode_subpixel_with_bg_color_pass2(&mut self) {
3454 self.set_blend_factors(
3455 (gl::ONE, gl::ONE),
3456 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3457 );
3458 }
3459 pub fn set_blend_mode_subpixel_constant_text_color(&mut self, color: ColorF) {
3460 self.gl.blend_color(color.r, color.g, color.b, 1.0);
3462 self.set_blend_factors(
3463 (gl::CONSTANT_COLOR, gl::ONE_MINUS_SRC_COLOR),
3464 (gl::CONSTANT_ALPHA, gl::ONE_MINUS_SRC_ALPHA),
3465 );
3466 }
3467 pub fn set_blend_mode_subpixel_dual_source(&mut self) {
3468 self.set_blend_factors(
3469 (gl::ONE, gl::ONE_MINUS_SRC1_COLOR),
3470 (gl::ONE, gl::ONE_MINUS_SRC1_ALPHA),
3471 );
3472 }
3473 pub fn set_blend_mode_show_overdraw(&mut self) {
3474 self.set_blend_factors(
3475 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3476 (gl::ONE, gl::ONE_MINUS_SRC_ALPHA),
3477 );
3478 }
3479
3480 pub fn set_blend_mode_max(&mut self) {
3481 self.gl
3482 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3483 self.gl.blend_equation_separate(gl::MAX, gl::FUNC_ADD);
3484 #[cfg(debug_assertions)]
3485 {
3486 self.shader_is_ready = false;
3487 }
3488 }
3489 pub fn set_blend_mode_min(&mut self) {
3490 self.gl
3491 .blend_func_separate(gl::ONE, gl::ONE, gl::ONE, gl::ONE);
3492 self.gl.blend_equation_separate(gl::MIN, gl::FUNC_ADD);
3493 #[cfg(debug_assertions)]
3494 {
3495 self.shader_is_ready = false;
3496 }
3497 }
3498 pub fn set_blend_mode_advanced(&mut self, mode: MixBlendMode) {
3499 self.gl.blend_equation(match mode {
3500 MixBlendMode::Normal => {
3501 self.gl.blend_func_separate(gl::ZERO, gl::SRC_COLOR, gl::ZERO, gl::SRC_ALPHA);
3503 gl::FUNC_ADD
3504 },
3505 MixBlendMode::Multiply => gl::MULTIPLY_KHR,
3506 MixBlendMode::Screen => gl::SCREEN_KHR,
3507 MixBlendMode::Overlay => gl::OVERLAY_KHR,
3508 MixBlendMode::Darken => gl::DARKEN_KHR,
3509 MixBlendMode::Lighten => gl::LIGHTEN_KHR,
3510 MixBlendMode::ColorDodge => gl::COLORDODGE_KHR,
3511 MixBlendMode::ColorBurn => gl::COLORBURN_KHR,
3512 MixBlendMode::HardLight => gl::HARDLIGHT_KHR,
3513 MixBlendMode::SoftLight => gl::SOFTLIGHT_KHR,
3514 MixBlendMode::Difference => gl::DIFFERENCE_KHR,
3515 MixBlendMode::Exclusion => gl::EXCLUSION_KHR,
3516 MixBlendMode::Hue => gl::HSL_HUE_KHR,
3517 MixBlendMode::Saturation => gl::HSL_SATURATION_KHR,
3518 MixBlendMode::Color => gl::HSL_COLOR_KHR,
3519 MixBlendMode::Luminosity => gl::HSL_LUMINOSITY_KHR,
3520 });
3521 #[cfg(debug_assertions)]
3522 {
3523 self.shader_is_ready = false;
3524 }
3525 }
3526
3527 pub fn supports_extension(&self, extension: &str) -> bool {
3528 supports_extension(&self.extensions, extension)
3529 }
3530
3531 pub fn enable_pixel_local_storage(&mut self, enable: bool) {
3534 debug_assert!(self.capabilities.supports_pixel_local_storage);
3535
3536 if enable {
3537 self.gl.enable(gl::SHADER_PIXEL_LOCAL_STORAGE_EXT);
3538 } else {
3539 self.gl.disable(gl::SHADER_PIXEL_LOCAL_STORAGE_EXT);
3540 }
3541 }
3542
3543 pub fn echo_driver_messages(&self) {
3544 if self.capabilities.supports_khr_debug {
3545 Device::log_driver_messages(self.gl());
3546 }
3547 }
3548
3549 fn log_driver_messages(gl: &dyn gl::Gl) {
3550 for msg in gl.get_debug_messages() {
3551 let level = match msg.severity {
3552 gl::DEBUG_SEVERITY_HIGH => Level::Error,
3553 gl::DEBUG_SEVERITY_MEDIUM => Level::Warn,
3554 gl::DEBUG_SEVERITY_LOW => Level::Info,
3555 gl::DEBUG_SEVERITY_NOTIFICATION => Level::Debug,
3556 _ => Level::Trace,
3557 };
3558 let ty = match msg.ty {
3559 gl::DEBUG_TYPE_ERROR => "error",
3560 gl::DEBUG_TYPE_DEPRECATED_BEHAVIOR => "deprecated",
3561 gl::DEBUG_TYPE_UNDEFINED_BEHAVIOR => "undefined",
3562 gl::DEBUG_TYPE_PORTABILITY => "portability",
3563 gl::DEBUG_TYPE_PERFORMANCE => "perf",
3564 gl::DEBUG_TYPE_MARKER => "marker",
3565 gl::DEBUG_TYPE_PUSH_GROUP => "group push",
3566 gl::DEBUG_TYPE_POP_GROUP => "group pop",
3567 gl::DEBUG_TYPE_OTHER => "other",
3568 _ => "?",
3569 };
3570 log!(level, "({}) {}", ty, msg.message);
3571 }
3572 }
3573
3574 pub fn gl_describe_format(&self, format: ImageFormat) -> FormatDesc {
3575 match format {
3576 ImageFormat::R8 => FormatDesc {
3577 internal: gl::R8,
3578 external: gl::RED,
3579 read: gl::RED,
3580 pixel_type: gl::UNSIGNED_BYTE,
3581 },
3582 ImageFormat::R16 => FormatDesc {
3583 internal: gl::R16,
3584 external: gl::RED,
3585 read: gl::RED,
3586 pixel_type: gl::UNSIGNED_SHORT,
3587 },
3588 ImageFormat::BGRA8 => {
3589 FormatDesc {
3590 internal: self.bgra_formats.internal,
3591 external: self.bgra_formats.external,
3592 read: gl::BGRA,
3593 pixel_type: gl::UNSIGNED_BYTE,
3594 }
3595 },
3596 ImageFormat::RGBA8 => {
3597 FormatDesc {
3598 internal: gl::RGBA8,
3599 external: gl::RGBA,
3600 read: gl::RGBA,
3601 pixel_type: gl::UNSIGNED_BYTE,
3602 }
3603 },
3604 ImageFormat::RGBAF32 => FormatDesc {
3605 internal: gl::RGBA32F,
3606 external: gl::RGBA,
3607 read: gl::RGBA,
3608 pixel_type: gl::FLOAT,
3609 },
3610 ImageFormat::RGBAI32 => FormatDesc {
3611 internal: gl::RGBA32I,
3612 external: gl::RGBA_INTEGER,
3613 read: gl::RGBA_INTEGER,
3614 pixel_type: gl::INT,
3615 },
3616 ImageFormat::RG8 => FormatDesc {
3617 internal: gl::RG8,
3618 external: gl::RG,
3619 read: gl::RG,
3620 pixel_type: gl::UNSIGNED_BYTE,
3621 },
3622 ImageFormat::RG16 => FormatDesc {
3623 internal: gl::RG16,
3624 external: gl::RG,
3625 read: gl::RG,
3626 pixel_type: gl::UNSIGNED_SHORT,
3627 },
3628 }
3629 }
3630
3631 fn matching_renderbuffer_format(&self, format: ImageFormat) -> gl::GLenum {
3633 match format {
3634 ImageFormat::R8 => gl::R8,
3635 ImageFormat::R16 => gl::R16UI,
3636 ImageFormat::BGRA8 => panic!("Unable to render to BGRA format!"),
3637 ImageFormat::RGBAF32 => gl::RGBA32F,
3638 ImageFormat::RG8 => gl::RG8,
3639 ImageFormat::RG16 => gl::RG16,
3640 ImageFormat::RGBAI32 => gl::RGBA32I,
3641 ImageFormat::RGBA8 => gl::RGBA8,
3642 }
3643 }
3644
3645 pub fn report_memory(&self) -> MemoryReport {
3647 let mut report = MemoryReport::default();
3648 for dim in self.depth_targets.keys() {
3649 report.depth_target_textures += depth_target_size_in_bytes(dim);
3650 }
3651 report
3652 }
3653}
3654
3655pub struct FormatDesc {
3656 pub internal: gl::GLenum,
3658 pub external: gl::GLuint,
3660 pub read: gl::GLuint,
3663 pub pixel_type: gl::GLuint,
3665}
3666
3667struct UploadChunk {
3668 rect: DeviceIntRect,
3669 layer_index: i32,
3670 stride: Option<i32>,
3671 offset: usize,
3672 format_override: Option<ImageFormat>,
3673}
3674
3675struct PixelBuffer<'a> {
3676 size_allocated: usize,
3677 size_used: usize,
3678 chunks: SmallVec<[UploadChunk; 1]>,
3680 mapping: &'a mut [mem::MaybeUninit<u8>],
3681}
3682
3683impl<'a> PixelBuffer<'a> {
3684 fn new(
3685 size_allocated: usize,
3686 mapping: &'a mut [mem::MaybeUninit<u8>],
3687 ) -> Self {
3688 PixelBuffer {
3689 size_allocated,
3690 size_used: 0,
3691 chunks: SmallVec::new(),
3692 mapping,
3693 }
3694 }
3695
3696 fn flush_chunks(&mut self, target: &mut UploadTarget) {
3697 for chunk in self.chunks.drain() {
3698 target.update_impl(chunk);
3699 }
3700 self.size_used = 0;
3701 }
3702}
3703
3704impl<'a> Drop for PixelBuffer<'a> {
3705 fn drop(&mut self) {
3706 assert_eq!(self.chunks.len(), 0, "PixelBuffer must be flushed before dropping.");
3707 }
3708}
3709
3710struct UploadTarget<'a> {
3711 device: &'a mut Device,
3712 texture: &'a Texture,
3713}
3714
3715enum TextureUploaderType<'a> {
3716 Immediate,
3717 SingleUseBuffers(VertexUsageHint),
3718 MutliUseBuffer(PixelBuffer<'a>)
3719}
3720
3721pub struct TextureUploader<'a, T> {
3722 target: UploadTarget<'a>,
3723 uploader_type: TextureUploaderType<'a>,
3724 marker: PhantomData<T>,
3725}
3726
3727impl<'a, T> Drop for TextureUploader<'a, T> {
3728 fn drop(&mut self) {
3729 match self.uploader_type {
3730 TextureUploaderType::MutliUseBuffer(ref mut buffer) => {
3731 self.target.device.gl.unmap_buffer(gl::PIXEL_UNPACK_BUFFER);
3732 buffer.flush_chunks(&mut self.target);
3733 self.target.device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
3734 }
3735 TextureUploaderType::SingleUseBuffers(_) => {
3736 self.target.device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
3737 }
3738 TextureUploaderType::Immediate => {}
3739 }
3740 }
3741}
3742
3743impl<'a, T> TextureUploader<'a, T> {
3744 pub fn upload(
3745 &mut self,
3746 mut rect: DeviceIntRect,
3747 layer_index: i32,
3748 stride: Option<i32>,
3749 format_override: Option<ImageFormat>,
3750 data: *const T,
3751 len: usize,
3752 ) -> usize {
3753 let cropped = rect.intersection(
3756 &DeviceIntRect::new(DeviceIntPoint::zero(), self.target.texture.get_dimensions())
3757 );
3758 if cfg!(debug_assertions) && cropped.map_or(true, |r| r != rect) {
3759 warn!("Cropping texture upload {:?} to {:?}", rect, cropped);
3760 }
3761 rect = match cropped {
3762 None => return 0,
3763 Some(r) => r,
3764 };
3765
3766 let bytes_pp = self.target.texture.format.bytes_per_pixel() as usize;
3767 let width_bytes = rect.size.width as usize * bytes_pp;
3768
3769 let src_stride = stride.map_or(width_bytes, |stride| {
3770 assert!(stride >= 0);
3771 stride as usize
3772 });
3773 let src_size = (rect.size.height as usize - 1) * src_stride + width_bytes;
3774 assert!(src_size <= len * mem::size_of::<T>());
3775
3776 let (dst_size, dst_stride) = self.target.device.required_upload_size_and_stride(
3779 rect.size,
3780 self.target.texture.format,
3781 );
3782
3783 let mut single_use_buffer = None;
3785 let mut buffer = match self.uploader_type {
3786 TextureUploaderType::MutliUseBuffer(ref mut buffer) => Some(buffer),
3787 TextureUploaderType::SingleUseBuffers(hint) => {
3788 match self.target.device.create_upload_buffer(hint, dst_size) {
3789 Ok(buffer) => {
3790 single_use_buffer = Some(buffer);
3791 single_use_buffer.as_mut()
3792 }
3793 Err(_) => {
3794 self.target.device.gl.bind_buffer(gl::PIXEL_UNPACK_BUFFER, 0);
3796 self.uploader_type = TextureUploaderType::Immediate;
3797 None
3798 }
3799 }
3800 }
3801 TextureUploaderType::Immediate => None,
3802 };
3803
3804 match buffer {
3805 Some(ref mut buffer) => {
3806 if !self.target.device.capabilities.supports_nonzero_pbo_offsets {
3807 assert_eq!(buffer.size_used, 0, "PBO uploads from non-zero offset are not supported.");
3808 }
3809 assert!(buffer.size_used + dst_size <= buffer.size_allocated, "PixelBuffer is too small");
3810
3811 unsafe {
3812 let src: &[mem::MaybeUninit<u8>] = slice::from_raw_parts(data as *const _, src_size);
3813
3814 if src_stride == dst_stride {
3815 let dst_start = buffer.size_used;
3818 let dst_end = dst_start + src_size;
3819
3820 buffer.mapping[dst_start..dst_end].copy_from_slice(src);
3821 } else {
3822 for y in 0..rect.size.height as usize {
3825 let src_start = y * src_stride;
3826 let src_end = src_start + width_bytes;
3827 let dst_start = buffer.size_used + y * dst_stride;
3828 let dst_end = dst_start + width_bytes;
3829
3830 buffer.mapping[dst_start..dst_end].copy_from_slice(&src[src_start..src_end])
3831 }
3832 }
3833 }
3834
3835 buffer.chunks.push(UploadChunk {
3836 rect,
3837 layer_index,
3838 stride: Some(dst_stride as i32),
3839 offset: buffer.size_used,
3840 format_override,
3841 });
3842 buffer.size_used += dst_size;
3843 }
3844 None => {
3845 if cfg!(debug_assertions) {
3846 let mut bound_buffer = [0];
3847 unsafe {
3848 self.target.device.gl.get_integer_v(gl::PIXEL_UNPACK_BUFFER_BINDING, &mut bound_buffer);
3849 }
3850 assert_eq!(bound_buffer[0], 0, "GL_PIXEL_UNPACK_BUFFER must not be bound for immediate uploads.");
3851 }
3852
3853 self.target.update_impl(UploadChunk {
3854 rect,
3855 layer_index,
3856 stride,
3857 offset: data as _,
3858 format_override,
3859 });
3860 }
3861 }
3862
3863 if let Some(ref mut buffer) = single_use_buffer {
3865 self.target.device.gl.unmap_buffer(gl::PIXEL_UNPACK_BUFFER);
3866 buffer.flush_chunks(&mut self.target);
3867 }
3868
3869 dst_size
3870 }
3871}
3872
3873impl<'a> UploadTarget<'a> {
3874 fn update_impl(&mut self, chunk: UploadChunk) {
3875 let format = chunk.format_override.unwrap_or(self.texture.format);
3876 let (gl_format, bpp, data_type) = match format {
3877 ImageFormat::R8 => (gl::RED, 1, gl::UNSIGNED_BYTE),
3878 ImageFormat::R16 => (gl::RED, 2, gl::UNSIGNED_SHORT),
3879 ImageFormat::BGRA8 => (self.device.bgra_formats.external, 4, gl::UNSIGNED_BYTE),
3880 ImageFormat::RGBA8 => (gl::RGBA, 4, gl::UNSIGNED_BYTE),
3881 ImageFormat::RG8 => (gl::RG, 2, gl::UNSIGNED_BYTE),
3882 ImageFormat::RG16 => (gl::RG, 4, gl::UNSIGNED_SHORT),
3883 ImageFormat::RGBAF32 => (gl::RGBA, 16, gl::FLOAT),
3884 ImageFormat::RGBAI32 => (gl::RGBA_INTEGER, 16, gl::INT),
3885 };
3886
3887 let row_length = match chunk.stride {
3888 Some(value) => value / bpp,
3889 None => self.texture.size.width,
3890 };
3891
3892 if chunk.stride.is_some() {
3893 self.device.gl.pixel_store_i(
3894 gl::UNPACK_ROW_LENGTH,
3895 row_length as _,
3896 );
3897 }
3898
3899 let pos = chunk.rect.origin;
3900 let size = chunk.rect.size;
3901
3902 match self.texture.target {
3903 gl::TEXTURE_2D_ARRAY => {
3904 self.device.gl.tex_sub_image_3d_pbo(
3905 self.texture.target,
3906 0,
3907 pos.x as _,
3908 pos.y as _,
3909 chunk.layer_index,
3910 size.width as _,
3911 size.height as _,
3912 1,
3913 gl_format,
3914 data_type,
3915 chunk.offset,
3916 );
3917 }
3918 gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => {
3919 self.device.gl.tex_sub_image_2d_pbo(
3920 self.texture.target,
3921 0,
3922 pos.x as _,
3923 pos.y as _,
3924 size.width as _,
3925 size.height as _,
3926 gl_format,
3927 data_type,
3928 chunk.offset,
3929 );
3930 }
3931 _ => panic!("BUG: Unexpected texture target!"),
3932 }
3933
3934 if self.texture.filter == TextureFilter::Trilinear {
3936 self.device.gl.generate_mipmap(self.texture.target);
3937 }
3938
3939 if chunk.stride.is_some() {
3941 self.device.gl.pixel_store_i(gl::UNPACK_ROW_LENGTH, 0 as _);
3942 }
3943 }
3944}
3945
3946fn texels_to_u8_slice<T: Texel>(texels: &[T]) -> &[u8] {
3947 unsafe {
3948 slice::from_raw_parts(texels.as_ptr() as *const u8, texels.len() * mem::size_of::<T>())
3949 }
3950}