1#![cfg(target_os = "linux")]
5#![cfg(feature = "opengl")]
6
7use edgefirst_decoder::DetectBox;
8#[cfg(feature = "decoder")]
9use edgefirst_decoder::Segmentation;
10use edgefirst_tensor::{TensorMemory, TensorTrait};
11use four_char_code::FourCharCode;
12use gbm::{
13 drm::{buffer::DrmFourcc, control::Device as DrmControlDevice, Device as DrmDevice},
14 AsRaw, Device,
15};
16use khronos_egl::{self as egl, Attrib, Display, Dynamic, Instance, EGL1_4};
17use log::{debug, error};
18use std::{
19 collections::BTreeSet,
20 ffi::{c_char, c_void, CStr, CString},
21 os::fd::AsRawFd,
22 ptr::{null, null_mut, NonNull},
23 rc::Rc,
24 str::FromStr,
25 sync::OnceLock,
26 thread::JoinHandle,
27 time::Instant,
28};
29use tokio::sync::mpsc::Sender;
30
31macro_rules! function {
32 () => {{
33 fn f() {}
34 fn type_name_of<T>(_: T) -> &'static str {
35 std::any::type_name::<T>()
36 }
37 let name = type_name_of(f);
38
39 match &name[..name.len() - 3].rfind(':') {
41 Some(pos) => &name[pos + 1..name.len() - 3],
42 None => &name[..name.len() - 3],
43 }
44 }};
45}
46
47#[cfg(feature = "decoder")]
48use crate::DEFAULT_COLORS;
49use crate::{
50 CPUProcessor, Crop, Error, Flip, ImageProcessorTrait, Rect, Rotation, TensorImage,
51 TensorImageRef, GREY, NV12, PLANAR_RGB, PLANAR_RGBA, RGB, RGBA, YUYV,
52};
53
54static EGL_LIB: OnceLock<&'static libloading::Library> = OnceLock::new();
58
59fn get_egl_lib() -> Result<&'static libloading::Library, crate::Error> {
60 if let Some(egl) = EGL_LIB.get() {
61 Ok(egl)
62 } else {
63 let egl = unsafe { libloading::Library::new("libEGL.so.1")? };
64 let egl: &'static libloading::Library = Box::leak(Box::new(egl));
66 Ok(EGL_LIB.get_or_init(|| egl))
67 }
68}
69
70type Egl = Instance<Dynamic<&'static libloading::Library, EGL1_4>>;
71pub(crate) struct GlContext {
72 pub(crate) support_dma: bool,
73 pub(crate) surface: Option<egl::Surface>,
74 pub(crate) display: EglDisplayType,
75 pub(crate) ctx: egl::Context,
76 pub(crate) egl: Rc<Egl>,
77}
78
79pub(crate) enum EglDisplayType {
80 Default(egl::Display),
81 Gbm(egl::Display, #[allow(dead_code)] Device<Card>),
82 PlatformDisplay(egl::Display),
83}
84
85impl EglDisplayType {
86 fn as_display(&self) -> egl::Display {
87 match self {
88 EglDisplayType::Default(disp) => *disp,
89 EglDisplayType::Gbm(disp, _) => *disp,
90 EglDisplayType::PlatformDisplay(disp) => *disp,
91 }
92 }
93}
94
95impl GlContext {
96 pub(crate) fn new() -> Result<GlContext, crate::Error> {
97 let egl: Rc<Egl> =
99 Rc::new(unsafe { Instance::<Dynamic<_, EGL1_4>>::load_required_from(get_egl_lib()?)? });
100
101 if let Ok(headless) = Self::try_initialize_egl(egl.clone(), Self::egl_get_default_display) {
102 return Ok(headless);
103 } else {
104 log::debug!("Didn't initialize EGL with Default Display");
105 }
106
107 if let Ok(headless) = Self::try_initialize_egl(egl.clone(), Self::egl_get_gbm_display) {
108 return Ok(headless);
109 } else {
110 log::debug!("Didn't initialize EGL with GBM Display");
111 }
112
113 if let Ok(headless) =
114 Self::try_initialize_egl(egl.clone(), Self::egl_get_platform_display_from_device)
115 {
116 return Ok(headless);
117 } else {
118 log::debug!("Didn't initialize EGL with platform display from device enumeration");
119 }
120
121 Err(Error::OpenGl(
122 "Could not initialize EGL with any known method".to_string(),
123 ))
124 }
125
126 fn try_initialize_egl(
127 egl: Rc<Egl>,
128 display_fn: impl Fn(&Egl) -> Result<EglDisplayType, crate::Error>,
129 ) -> Result<GlContext, crate::Error> {
130 let display = display_fn(&egl)?;
131 log::debug!("egl initialize with display: {:x?}", display.as_display());
132 egl.initialize(display.as_display())?;
133 let attributes = [
134 egl::SURFACE_TYPE,
135 egl::PBUFFER_BIT,
136 egl::RENDERABLE_TYPE,
137 egl::OPENGL_ES3_BIT,
138 egl::RED_SIZE,
139 8,
140 egl::GREEN_SIZE,
141 8,
142 egl::BLUE_SIZE,
143 8,
144 egl::ALPHA_SIZE,
145 8,
146 egl::NONE,
147 ];
148
149 let config =
150 if let Some(config) = egl.choose_first_config(display.as_display(), &attributes)? {
151 config
152 } else {
153 return Err(crate::Error::NotImplemented(
154 "Did not find valid OpenGL ES config".to_string(),
155 ));
156 };
157
158 debug!("config: {config:?}");
159
160 let surface = Some(egl.create_pbuffer_surface(
161 display.as_display(),
162 config,
163 &[egl::WIDTH, 64, egl::HEIGHT, 64, egl::NONE],
164 )?);
165
166 egl.bind_api(egl::OPENGL_ES_API)?;
167 let context_attributes = [egl::CONTEXT_MAJOR_VERSION, 3, egl::NONE, egl::NONE];
168
169 let ctx = egl.create_context(display.as_display(), config, None, &context_attributes)?;
170 debug!("ctx: {ctx:?}");
171
172 egl.make_current(display.as_display(), surface, surface, Some(ctx))?;
173
174 let support_dma = Self::egl_check_support_dma(&egl).is_ok();
175 let headless = GlContext {
176 display,
177 ctx,
178 egl,
179 surface,
180 support_dma,
181 };
182 Ok(headless)
183 }
184
185 fn egl_get_default_display(egl: &Egl) -> Result<EglDisplayType, crate::Error> {
186 if let Some(display) = unsafe { egl.get_display(egl::DEFAULT_DISPLAY) } {
188 debug!("default display: {display:?}");
189 return Ok(EglDisplayType::Default(display));
190 }
191
192 Err(Error::OpenGl(
193 "Could not obtain EGL Default Display".to_string(),
194 ))
195 }
196
197 fn egl_get_gbm_display(egl: &Egl) -> Result<EglDisplayType, crate::Error> {
198 let gbm = Device::new(Card::open_global()?)?;
200
201 debug!("gbm: {gbm:?}");
202 let display = Self::egl_get_platform_display_with_fallback(
203 egl,
204 egl_ext::PLATFORM_GBM_KHR,
205 gbm.as_raw() as *mut c_void,
206 &[egl::ATTRIB_NONE],
207 )?;
208
209 Ok(EglDisplayType::Gbm(display, gbm))
210 }
211
212 fn egl_get_platform_display_from_device(egl: &Egl) -> Result<EglDisplayType, crate::Error> {
213 let extensions = egl.query_string(None, egl::EXTENSIONS)?;
214 let extensions = extensions.to_string_lossy();
215 log::debug!("EGL Extensions: {}", extensions);
216
217 if !extensions.contains("EGL_EXT_device_enumeration") {
218 return Err(Error::GLVersion(
219 "EGL doesn't supported EGL_EXT_device_enumeration extension".to_string(),
220 ));
221 }
222
223 type EGLDeviceEXT = *mut c_void;
224 let devices = if let Some(ext) = egl.get_proc_address("eglQueryDevicesEXT") {
225 let func: unsafe extern "system" fn(
226 max_devices: egl::Int,
227 devices: *mut EGLDeviceEXT,
228 num_devices: *mut egl::Int,
229 ) -> *const c_char = unsafe { std::mem::transmute(ext) };
230 let mut devices = [std::ptr::null_mut(); 10];
231 let mut num_devices = 0;
232 unsafe { func(devices.len() as i32, devices.as_mut_ptr(), &mut num_devices) };
233 for i in 0..num_devices {
234 log::debug!("EGL device: {:?}", devices[i as usize]);
235 }
236 devices[0..num_devices as usize].to_vec()
237 } else {
238 return Err(Error::GLVersion(
239 "EGL doesn't supported eglQueryDevicesEXT function".to_string(),
240 ));
241 };
242
243 if !extensions.contains("EGL_EXT_platform_device") {
244 return Err(Error::GLVersion(
245 "EGL doesn't supported EGL_EXT_platform_device extension".to_string(),
246 ));
247 }
248
249 let disp = Self::egl_get_platform_display_with_fallback(
251 egl,
252 egl_ext::PLATFORM_DEVICE_EXT,
253 devices[0],
254 &[egl::ATTRIB_NONE],
255 )?;
256 Ok(EglDisplayType::PlatformDisplay(disp))
257 }
258
259 fn egl_check_support_dma(egl: &Egl) -> Result<(), crate::Error> {
260 let extensions = egl.query_string(None, egl::EXTENSIONS)?;
261 let extensions = extensions.to_string_lossy();
262 log::debug!("EGL Extensions: {}", extensions);
263
264 if egl.upcast::<egl::EGL1_5>().is_some() {
265 return Ok(());
266 }
267
268 if !extensions.contains("EGL_EXT_image_dma_buf_import") {
269 return Err(crate::Error::GLVersion(
270 "EGL does not support EGL_EXT_image_dma_buf_import extension".to_string(),
271 ));
272 }
273
274 if egl.get_proc_address("eglCreateImageKHR").is_none() {
275 return Err(crate::Error::GLVersion(
276 "EGL does not support eglCreateImageKHR function".to_string(),
277 ));
278 }
279
280 if egl.get_proc_address("eglDestroyImageKHR").is_none() {
281 return Err(crate::Error::GLVersion(
282 "EGL does not support eglDestroyImageKHR function".to_string(),
283 ));
284 }
285 Ok(())
287 }
288
289 fn egl_get_platform_display_with_fallback(
290 egl: &Egl,
291 platform: egl::Enum,
292 native_display: *mut c_void,
293 attrib_list: &[Attrib],
294 ) -> Result<Display, Error> {
295 if let Some(egl) = egl.upcast::<egl::EGL1_5>() {
296 unsafe { egl.get_platform_display(platform, native_display, attrib_list) }
297 .map_err(|e| e.into())
298 } else if let Some(ext) = egl.get_proc_address("eglGetPlatformDisplayEXT") {
299 let func: unsafe extern "system" fn(
300 platform: egl::Enum,
301 native_display: *mut c_void,
302 attrib_list: *const Attrib,
303 ) -> egl::EGLDisplay = unsafe { std::mem::transmute(ext) };
304 let disp = unsafe { func(platform, native_display, attrib_list.as_ptr()) };
305 if disp != egl::NO_DISPLAY {
306 Ok(unsafe { Display::from_ptr(disp) })
307 } else {
308 Err(egl.get_error().map(|e| e.into()).unwrap_or(Error::Internal(
309 "EGL failed but no error was reported".to_owned(),
310 )))
311 }
312 } else {
313 Err(Error::EGLLoad(egl::LoadError::InvalidVersion {
314 provided: egl.version(),
315 required: khronos_egl::Version::EGL1_5,
316 }))
317 }
318 }
319
320 fn egl_create_image_with_fallback(
321 egl: &Egl,
322 display: Display,
323 ctx: egl::Context,
324 target: egl::Enum,
325 buffer: egl::ClientBuffer,
326 attrib_list: &[Attrib],
327 ) -> Result<egl::Image, Error> {
328 if let Some(egl) = egl.upcast::<egl::EGL1_5>() {
329 egl.create_image(display, ctx, target, buffer, attrib_list)
330 .map_err(|e| e.into())
331 } else if let Some(ext) = egl.get_proc_address("eglCreateImageKHR") {
332 log::trace!("eglCreateImageKHR addr: {:?}", ext);
333 let func: unsafe extern "system" fn(
334 display: egl::EGLDisplay,
335 ctx: egl::EGLContext,
336 target: egl::Enum,
337 buffer: egl::EGLClientBuffer,
338 attrib_list: *const egl::Int,
339 ) -> egl::EGLImage = unsafe { std::mem::transmute(ext) };
340 let new_attrib_list = attrib_list
341 .iter()
342 .map(|x| *x as egl::Int)
343 .collect::<Vec<_>>();
344
345 let image = unsafe {
346 func(
347 display.as_ptr(),
348 ctx.as_ptr(),
349 target,
350 buffer.as_ptr(),
351 new_attrib_list.as_ptr(),
352 )
353 };
354 if image != egl::NO_IMAGE {
355 Ok(unsafe { egl::Image::from_ptr(image) })
356 } else {
357 Err(egl.get_error().map(|e| e.into()).unwrap_or(Error::Internal(
358 "EGL failed but no error was reported".to_owned(),
359 )))
360 }
361 } else {
362 Err(Error::EGLLoad(egl::LoadError::InvalidVersion {
363 provided: egl.version(),
364 required: khronos_egl::Version::EGL1_5,
365 }))
366 }
367 }
368
369 fn egl_destory_image_with_fallback(
370 egl: &Egl,
371 display: Display,
372 image: egl::Image,
373 ) -> Result<(), Error> {
374 if let Some(egl) = egl.upcast::<egl::EGL1_5>() {
375 egl.destroy_image(display, image).map_err(|e| e.into())
376 } else if let Some(ext) = egl.get_proc_address("eglDestroyImageKHR") {
377 let func: unsafe extern "system" fn(
378 display: egl::EGLDisplay,
379 image: egl::EGLImage,
380 ) -> egl::Boolean = unsafe { std::mem::transmute(ext) };
381 let res = unsafe { func(display.as_ptr(), image.as_ptr()) };
382 if res == egl::TRUE {
383 Ok(())
384 } else {
385 Err(egl.get_error().map(|e| e.into()).unwrap_or(Error::Internal(
386 "EGL failed but no error was reported".to_owned(),
387 )))
388 }
389 } else {
390 Err(Error::EGLLoad(egl::LoadError::InvalidVersion {
391 provided: egl.version(),
392 required: khronos_egl::Version::EGL1_5,
393 }))
394 }
395 }
396}
397
398impl Drop for GlContext {
399 fn drop(&mut self) {
400 let _ = self
401 .egl
402 .make_current(self.display.as_display(), None, None, None);
403
404 let _ = self
405 .egl
406 .destroy_context(self.display.as_display(), self.ctx);
407
408 if let Some(surface) = self.surface.take() {
409 let _ = self.egl.destroy_surface(self.display.as_display(), surface);
410 }
411
412 let _ = self.egl.terminate(self.display.as_display());
413 }
414}
415
416#[derive(Debug)]
417pub(crate) struct Card(std::fs::File);
419
420impl std::os::unix::io::AsFd for Card {
423 fn as_fd(&self) -> std::os::unix::io::BorrowedFd<'_> {
424 self.0.as_fd()
425 }
426}
427
428impl DrmDevice for Card {}
430impl DrmControlDevice for Card {}
431
432impl Card {
434 pub fn open(path: &str) -> Result<Self, crate::Error> {
435 let mut options = std::fs::OpenOptions::new();
436 options.read(true);
437 options.write(true);
438 let c = options.open(path);
439 match c {
440 Ok(c) => Ok(Card(c)),
441 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
442 Err(Error::NotFound(format!("File not found: {path}")))
443 }
444 Err(e) => Err(e.into()),
445 }
446 }
447
448 pub fn open_global() -> Result<Self, crate::Error> {
449 let targets = ["/dev/dri/render128", "/dev/dri/card0", "/dev/dri/card1"];
450 let e = Self::open(targets[0]);
451 if let Ok(t) = e {
452 return Ok(t);
453 }
454 for t in &targets[1..] {
455 if let Ok(t) = Self::open(t) {
456 return Ok(t);
457 }
458 }
459 e
460 }
461}
462
463#[derive(Debug, Clone, Copy)]
464struct RegionOfInterest {
465 left: f32,
466 top: f32,
467 right: f32,
468 bottom: f32,
469}
470
471enum GLProcessorMessage {
472 ImageConvert(
473 SendablePtr<TensorImage>,
474 SendablePtr<TensorImage>,
475 Rotation,
476 Flip,
477 Crop,
478 tokio::sync::oneshot::Sender<Result<(), Error>>,
479 ),
480 SetColors(
481 Vec<[u8; 4]>,
482 tokio::sync::oneshot::Sender<Result<(), Error>>,
483 ),
484 ImageRender(
485 SendablePtr<TensorImage>,
486 SendablePtr<DetectBox>,
487 SendablePtr<Segmentation>,
488 tokio::sync::oneshot::Sender<Result<(), Error>>,
489 ),
490}
491
492#[derive(Debug)]
497pub struct GLProcessorThreaded {
498 handle: Option<JoinHandle<()>>,
500
501 sender: Option<Sender<GLProcessorMessage>>,
503 support_dma: bool,
504}
505
506unsafe impl Send for GLProcessorThreaded {}
507unsafe impl Sync for GLProcessorThreaded {}
508
509struct SendablePtr<T: Send> {
510 ptr: NonNull<T>,
511 len: usize,
512}
513
514unsafe impl<T> Send for SendablePtr<T> where T: Send {}
515
516impl GLProcessorThreaded {
517 pub fn new() -> Result<Self, Error> {
519 let (send, mut recv) = tokio::sync::mpsc::channel::<GLProcessorMessage>(1);
520
521 let (create_ctx_send, create_ctx_recv) = tokio::sync::oneshot::channel();
522
523 let func = move || {
524 let mut gl_converter = match GLProcessorST::new() {
525 Ok(gl) => gl,
526 Err(e) => {
527 let _ = create_ctx_send.send(Err(e));
528 return;
529 }
530 };
531 let _ = create_ctx_send.send(Ok(gl_converter.gl_context.support_dma));
532 while let Some(msg) = recv.blocking_recv() {
533 match msg {
534 GLProcessorMessage::ImageConvert(src, mut dst, rotation, flip, crop, resp) => {
535 let src = unsafe { src.ptr.as_ref() };
538 let dst = unsafe { dst.ptr.as_mut() };
539 let res = gl_converter.convert(src, dst, rotation, flip, crop);
540 let _ = resp.send(res);
541 }
542 GLProcessorMessage::ImageRender(mut dst, det, seg, resp) => {
543 let dst = unsafe { dst.ptr.as_mut() };
547 let det = unsafe { std::slice::from_raw_parts(det.ptr.as_ptr(), det.len) };
548 let seg = unsafe { std::slice::from_raw_parts(seg.ptr.as_ptr(), seg.len) };
549 let res = gl_converter.render_to_image(dst, det, seg);
550 let _ = resp.send(res);
551 }
552 GLProcessorMessage::SetColors(colors, resp) => {
553 let res = gl_converter.set_class_colors(&colors);
554 let _ = resp.send(res);
555 }
556 }
557 }
558 };
559
560 let handle = std::thread::spawn(func);
562
563 let support_dma = match create_ctx_recv.blocking_recv() {
564 Ok(Err(e)) => return Err(e),
565 Err(_) => {
566 return Err(Error::Internal(
567 "GL converter error messaging closed without update".to_string(),
568 ));
569 }
570 Ok(Ok(supports_dma)) => supports_dma,
571 };
572
573 Ok(Self {
574 handle: Some(handle),
575 sender: Some(send),
576 support_dma,
577 })
578 }
579}
580
581impl ImageProcessorTrait for GLProcessorThreaded {
582 fn convert(
583 &mut self,
584 src: &TensorImage,
585 dst: &mut TensorImage,
586 rotation: crate::Rotation,
587 flip: Flip,
588 crop: Crop,
589 ) -> crate::Result<()> {
590 crop.check_crop(src, dst)?;
591 if !GLProcessorST::check_src_format_supported(self.support_dma, src) {
592 return Err(crate::Error::NotSupported(format!(
593 "Opengl doesn't support {} source texture",
594 src.fourcc().display()
595 )));
596 }
597
598 if !GLProcessorST::check_dst_format_supported(self.support_dma, dst) {
599 return Err(crate::Error::NotSupported(format!(
600 "Opengl doesn't support {} destination texture",
601 dst.fourcc().display()
602 )));
603 }
604
605 let (err_send, err_recv) = tokio::sync::oneshot::channel();
606 self.sender
607 .as_ref()
608 .unwrap()
609 .blocking_send(GLProcessorMessage::ImageConvert(
610 SendablePtr {
611 ptr: src.into(),
612 len: 1,
613 },
614 SendablePtr {
615 ptr: dst.into(),
616 len: 1,
617 },
618 rotation,
619 flip,
620 crop,
621 err_send,
622 ))
623 .map_err(|_| Error::Internal("GL converter thread exited".to_string()))?;
624 err_recv.blocking_recv().map_err(|_| {
625 Error::Internal("GL converter error messaging closed without update".to_string())
626 })?
627 }
628
629 fn convert_ref(
630 &mut self,
631 src: &TensorImage,
632 dst: &mut TensorImageRef<'_>,
633 rotation: Rotation,
634 flip: Flip,
635 crop: Crop,
636 ) -> crate::Result<()> {
637 let mut cpu = CPUProcessor::new();
639 cpu.convert_ref(src, dst, rotation, flip, crop)
640 }
641
642 #[cfg(feature = "decoder")]
643 fn render_to_image(
644 &mut self,
645 dst: &mut TensorImage,
646 detect: &[crate::DetectBox],
647 segmentation: &[crate::Segmentation],
648 ) -> crate::Result<()> {
649 let (err_send, err_recv) = tokio::sync::oneshot::channel();
650 self.sender
651 .as_ref()
652 .unwrap()
653 .blocking_send(GLProcessorMessage::ImageRender(
654 SendablePtr {
655 ptr: dst.into(),
656 len: 1,
657 },
658 SendablePtr {
659 ptr: NonNull::new(detect.as_ptr() as *mut DetectBox).unwrap(),
660 len: detect.len(),
661 },
662 SendablePtr {
663 ptr: NonNull::new(segmentation.as_ptr() as *mut Segmentation).unwrap(),
664 len: segmentation.len(),
665 },
666 err_send,
667 ))
668 .map_err(|_| Error::Internal("GL converter thread exited".to_string()))?;
669 err_recv.blocking_recv().map_err(|_| {
670 Error::Internal("GL converter error messaging closed without update".to_string())
671 })?
672 }
673
674 #[cfg(feature = "decoder")]
675 fn set_class_colors(&mut self, colors: &[[u8; 4]]) -> Result<(), crate::Error> {
676 let (err_send, err_recv) = tokio::sync::oneshot::channel();
677 self.sender
678 .as_ref()
679 .unwrap()
680 .blocking_send(GLProcessorMessage::SetColors(colors.to_vec(), err_send))
681 .map_err(|_| Error::Internal("GL converter thread exited".to_string()))?;
682 err_recv.blocking_recv().map_err(|_| {
683 Error::Internal("GL converter error messaging closed without update".to_string())
684 })?
685 }
686}
687
688impl Drop for GLProcessorThreaded {
689 fn drop(&mut self) {
690 drop(self.sender.take());
691 let _ = self.handle.take().and_then(|h| h.join().ok());
692 }
693}
694
695pub struct GLProcessorST {
697 camera_eglimage_texture: Texture,
698 camera_normal_texture: Texture,
699 render_texture: Texture,
700 #[cfg(feature = "decoder")]
701 segmentation_texture: Texture,
702 #[cfg(feature = "decoder")]
703 segmentation_program: GlProgram,
704 #[cfg(feature = "decoder")]
705 instanced_segmentation_program: GlProgram,
706 #[cfg(feature = "decoder")]
707 color_program: GlProgram,
708 vertex_buffer: Buffer,
709 texture_buffer: Buffer,
710 texture_program: GlProgram,
711 texture_program_yuv: GlProgram,
712 texture_program_planar: GlProgram,
713 gl_context: GlContext,
714}
715
716impl ImageProcessorTrait for GLProcessorST {
717 fn convert(
718 &mut self,
719 src: &TensorImage,
720 dst: &mut TensorImage,
721 rotation: crate::Rotation,
722 flip: Flip,
723 crop: Crop,
724 ) -> crate::Result<()> {
725 crop.check_crop(src, dst)?;
726 if !Self::check_src_format_supported(self.gl_context.support_dma, src) {
727 return Err(crate::Error::NotSupported(format!(
728 "Opengl doesn't support {} source texture",
729 src.fourcc().display()
730 )));
731 }
732
733 if !Self::check_dst_format_supported(self.gl_context.support_dma, dst) {
734 return Err(crate::Error::NotSupported(format!(
735 "Opengl doesn't support {} destination texture",
736 dst.fourcc().display()
737 )));
738 }
739 log::debug!(
740 "dst tensor: {:?} src tensor :{:?}",
741 dst.tensor().memory(),
742 src.tensor().memory()
743 );
744 check_gl_error(function!(), line!())?;
745 if self.gl_context.support_dma
746 && dst.tensor().memory() == TensorMemory::Dma
747 && dst.fourcc() != RGB
748 {
750 let res = self.convert_dest_dma(dst, src, rotation, flip, crop);
751 return res;
752 }
753 let start = Instant::now();
754 let res = self.convert_dest_non_dma(dst, src, rotation, flip, crop);
755 log::debug!("convert_dest_non_dma takes {:?}", start.elapsed());
756 res
757 }
758
759 fn convert_ref(
760 &mut self,
761 src: &TensorImage,
762 dst: &mut TensorImageRef<'_>,
763 rotation: Rotation,
764 flip: Flip,
765 crop: Crop,
766 ) -> crate::Result<()> {
767 let mut cpu = CPUProcessor::new();
769 cpu.convert_ref(src, dst, rotation, flip, crop)
770 }
771
772 #[cfg(feature = "decoder")]
773 fn render_to_image(
774 &mut self,
775 dst: &mut TensorImage,
776 detect: &[DetectBox],
777 segmentation: &[Segmentation],
778 ) -> Result<(), crate::Error> {
779 use crate::FunctionTimer;
780
781 let _timer = FunctionTimer::new("GLProcessorST::render_to_image");
782 if !matches!(dst.fourcc(), RGBA | RGB) {
783 return Err(crate::Error::NotSupported(
784 "Opengl image rendering only supports RGBA or RGB images".to_string(),
785 ));
786 }
787
788 let (_render_buffer, is_dma) = match dst.tensor.memory() {
789 edgefirst_tensor::TensorMemory::Dma => {
790 if let Ok(render_buffer) = self.setup_renderbuffer_dma(dst) {
791 (render_buffer, true)
792 } else {
793 (
794 self.setup_renderbuffer_non_dma(
795 dst,
796 Crop::new().with_dst_rect(Some(Rect::new(0, 0, 0, 0))),
797 )?,
798 false,
799 )
800 }
801 }
802 _ => (
803 self.setup_renderbuffer_non_dma(
804 dst,
805 Crop::new().with_dst_rect(Some(Rect::new(0, 0, 0, 0))),
806 )?,
807 false,
808 ), };
810
811 gls::enable(gls::gl::BLEND);
812 gls::blend_func_separate(
813 gls::gl::SRC_ALPHA,
814 gls::gl::ONE_MINUS_SRC_ALPHA,
815 gls::gl::ZERO,
816 gls::gl::ONE,
817 );
818
819 self.render_box(dst, detect)?;
820 self.render_segmentation(detect, segmentation)?;
821
822 gls::finish();
823 if !is_dma {
824 let mut dst_map = dst.tensor().map()?;
825 let format = match dst.fourcc() {
826 RGB => gls::gl::RGB,
827 RGBA => gls::gl::RGBA,
828 _ => unreachable!(),
829 };
830 unsafe {
831 gls::gl::ReadBuffer(gls::gl::COLOR_ATTACHMENT0);
832 gls::gl::ReadnPixels(
833 0,
834 0,
835 dst.width() as i32,
836 dst.height() as i32,
837 format,
838 gls::gl::UNSIGNED_BYTE,
839 dst.tensor.len() as i32,
840 dst_map.as_mut_ptr() as *mut c_void,
841 );
842 }
843 }
844
845 Ok(())
846 }
847
848 #[cfg(feature = "decoder")]
849 fn set_class_colors(&mut self, colors: &[[u8; 4]]) -> crate::Result<()> {
850 if colors.is_empty() {
851 return Ok(());
852 }
853 let mut colors_f32 = colors
854 .iter()
855 .map(|c| {
856 [
857 c[0] as f32 / 255.0,
858 c[1] as f32 / 255.0,
859 c[2] as f32 / 255.0,
860 c[3] as f32 / 255.0,
861 ]
862 })
863 .take(20)
864 .collect::<Vec<[f32; 4]>>();
865
866 self.segmentation_program
867 .load_uniform_4fv(c"colors", &colors_f32)?;
868 self.instanced_segmentation_program
869 .load_uniform_4fv(c"colors", &colors_f32)?;
870
871 colors_f32.iter_mut().for_each(|c| {
872 c[3] = 1.0; });
874 self.color_program
875 .load_uniform_4fv(c"colors", &colors_f32)?;
876
877 Ok(())
878 }
879}
880
881impl GLProcessorST {
882 pub fn new() -> Result<GLProcessorST, crate::Error> {
883 let gl_context = GlContext::new()?;
884 gls::load_with(|s| {
885 gl_context
886 .egl
887 .get_proc_address(s)
888 .map_or(std::ptr::null(), |p| p as *const _)
889 });
890
891 Self::gl_check_support()?;
892
893 unsafe {
895 gls::gl::PixelStorei(gls::gl::PACK_ALIGNMENT, 1);
896 gls::gl::PixelStorei(gls::gl::UNPACK_ALIGNMENT, 1);
897 }
898
899 let texture_program_planar =
900 GlProgram::new(generate_vertex_shader(), generate_planar_rgb_shader())?;
901
902 let texture_program =
903 GlProgram::new(generate_vertex_shader(), generate_texture_fragment_shader())?;
904
905 let texture_program_yuv = GlProgram::new(
906 generate_vertex_shader(),
907 generate_texture_fragment_shader_yuv(),
908 )?;
909
910 #[cfg(feature = "decoder")]
911 let segmentation_program =
912 GlProgram::new(generate_vertex_shader(), generate_segmentation_shader())?;
913 #[cfg(feature = "decoder")]
914 segmentation_program.load_uniform_4fv(c"colors", &DEFAULT_COLORS)?;
915 #[cfg(feature = "decoder")]
916 let instanced_segmentation_program = GlProgram::new(
917 generate_vertex_shader(),
918 generate_instanced_segmentation_shader(),
919 )?;
920 #[cfg(feature = "decoder")]
921 instanced_segmentation_program.load_uniform_4fv(c"colors", &DEFAULT_COLORS)?;
922
923 #[cfg(feature = "decoder")]
924 let color_program = GlProgram::new(generate_vertex_shader(), generate_color_shader())?;
925 #[cfg(feature = "decoder")]
926 color_program.load_uniform_4fv(c"colors", &DEFAULT_COLORS)?;
927
928 let camera_eglimage_texture = Texture::new();
929 let camera_normal_texture = Texture::new();
930 let render_texture = Texture::new();
931 let segmentation_texture = Texture::new();
932 let vertex_buffer = Buffer::new(0, 3, 100);
933 let texture_buffer = Buffer::new(1, 2, 100);
934
935 let converter = GLProcessorST {
936 gl_context,
937 texture_program,
938 texture_program_yuv,
939 texture_program_planar,
940 camera_eglimage_texture,
941 camera_normal_texture,
942 #[cfg(feature = "decoder")]
943 segmentation_texture,
944 vertex_buffer,
945 texture_buffer,
946 render_texture,
947 #[cfg(feature = "decoder")]
948 segmentation_program,
949 #[cfg(feature = "decoder")]
950 instanced_segmentation_program,
951 #[cfg(feature = "decoder")]
952 color_program,
953 };
954 check_gl_error(function!(), line!())?;
955
956 log::debug!("GLConverter created");
957 Ok(converter)
958 }
959
960 fn check_src_format_supported(support_dma: bool, img: &TensorImage) -> bool {
961 if support_dma && img.tensor().memory() == TensorMemory::Dma {
962 matches!(img.fourcc(), RGBA | GREY | YUYV | NV12)
964 } else {
965 matches!(img.fourcc(), RGB | RGBA | GREY)
966 }
967 }
968
969 fn check_dst_format_supported(support_dma: bool, img: &TensorImage) -> bool {
970 if support_dma && img.tensor().memory() == TensorMemory::Dma {
971 matches!(img.fourcc(), RGBA | GREY | PLANAR_RGB)
973 } else {
974 matches!(img.fourcc(), RGB | RGBA | GREY)
975 }
976 }
977
978 fn gl_check_support() -> Result<(), crate::Error> {
979 if let Ok(version) = gls::get_string(gls::gl::SHADING_LANGUAGE_VERSION) {
980 log::debug!("GL Shading Language Version: {version:?}");
981 } else {
982 log::warn!("Could not get GL Shading Language Version");
983 }
984
985 let extensions = unsafe {
986 let str = gls::gl::GetString(gls::gl::EXTENSIONS);
987 if str.is_null() {
988 return Err(crate::Error::GLVersion(
989 "GL returned no supported extensions".to_string(),
990 ));
991 }
992 CStr::from_ptr(str as *const c_char)
993 .to_string_lossy()
994 .to_string()
995 };
996 log::debug!("GL Extensions: {extensions}");
997 let required_ext = [
998 "GL_OES_EGL_image_external_essl3",
999 "GL_OES_surfaceless_context",
1000 ];
1001 let extensions = extensions.split_ascii_whitespace().collect::<BTreeSet<_>>();
1002 for required in required_ext {
1003 if !extensions.contains(required) {
1004 return Err(crate::Error::GLVersion(format!(
1005 "GL does not support {required} extension",
1006 )));
1007 }
1008 }
1009
1010 Ok(())
1011 }
1012
1013 fn setup_renderbuffer_dma(&mut self, dst: &TensorImage) -> crate::Result<FrameBuffer> {
1014 let frame_buffer = FrameBuffer::new();
1015 frame_buffer.bind();
1016
1017 let (width, height) = if matches!(dst.fourcc(), PLANAR_RGB) {
1018 let width = dst.width();
1019 let height = dst.height() * 3;
1020 (width as i32, height as i32)
1021 } else {
1022 (dst.width() as i32, dst.height() as i32)
1023 };
1024 let dest_img = self.create_image_from_dma2(dst)?;
1025 unsafe {
1026 gls::gl::UseProgram(self.texture_program_yuv.id);
1027 gls::gl::ActiveTexture(gls::gl::TEXTURE0);
1028 gls::gl::BindTexture(gls::gl::TEXTURE_2D, self.render_texture.id);
1029 gls::gl::TexParameteri(
1030 gls::gl::TEXTURE_2D,
1031 gls::gl::TEXTURE_MIN_FILTER,
1032 gls::gl::LINEAR as i32,
1033 );
1034 gls::gl::TexParameteri(
1035 gls::gl::TEXTURE_2D,
1036 gls::gl::TEXTURE_MAG_FILTER,
1037 gls::gl::LINEAR as i32,
1038 );
1039 gls::gl::EGLImageTargetTexture2DOES(gls::gl::TEXTURE_2D, dest_img.egl_image.as_ptr());
1040 gls::gl::FramebufferTexture2D(
1041 gls::gl::FRAMEBUFFER,
1042 gls::gl::COLOR_ATTACHMENT0,
1043 gls::gl::TEXTURE_2D,
1044 self.render_texture.id,
1045 0,
1046 );
1047 check_gl_error(function!(), line!())?;
1048 gls::gl::Viewport(0, 0, width, height);
1049 }
1050 Ok(frame_buffer)
1051 }
1052
1053 fn convert_dest_dma(
1054 &mut self,
1055 dst: &mut TensorImage,
1056 src: &TensorImage,
1057 rotation: crate::Rotation,
1058 flip: Flip,
1059 crop: Crop,
1060 ) -> crate::Result<()> {
1061 assert!(self.gl_context.support_dma);
1062 let _framebuffer = self.setup_renderbuffer_dma(dst)?;
1063 if dst.is_planar() {
1064 self.convert_to_planar(src, dst, rotation, flip, crop)
1065 } else {
1066 self.convert_to(src, dst, rotation, flip, crop)
1067 }
1068 }
1069
1070 fn setup_renderbuffer_non_dma(
1071 &mut self,
1072 dst: &TensorImage,
1073 crop: Crop,
1074 ) -> crate::Result<FrameBuffer> {
1075 debug_assert!(matches!(dst.fourcc(), RGB | RGBA | GREY | PLANAR_RGB));
1076 let (width, height) = if dst.is_planar() {
1077 let width = dst.width() / 4;
1078 let height = match dst.fourcc() {
1079 RGBA => dst.height() * 4,
1080 RGB => dst.height() * 3,
1081 GREY => dst.height(),
1082 _ => unreachable!(),
1083 };
1084 (width as i32, height as i32)
1085 } else {
1086 (dst.width() as i32, dst.height() as i32)
1087 };
1088
1089 let format = if dst.is_planar() {
1090 gls::gl::RED
1091 } else {
1092 match dst.fourcc() {
1093 RGB => gls::gl::RGB,
1094 RGBA => gls::gl::RGBA,
1095 GREY => gls::gl::RED,
1096 _ => unreachable!(),
1097 }
1098 };
1099
1100 let start = Instant::now();
1101 let frame_buffer = FrameBuffer::new();
1102 frame_buffer.bind();
1103
1104 let map;
1105
1106 let pixels = if crop.dst_rect.is_none_or(|crop| {
1107 crop.top == 0
1108 && crop.left == 0
1109 && crop.height == dst.height()
1110 && crop.width == dst.width()
1111 }) {
1112 std::ptr::null()
1113 } else {
1114 map = dst.tensor().map()?;
1115 map.as_ptr() as *const c_void
1116 };
1117 unsafe {
1118 gls::gl::UseProgram(self.texture_program.id);
1119 gls::gl::BindTexture(gls::gl::TEXTURE_2D, self.render_texture.id);
1120 gls::gl::ActiveTexture(gls::gl::TEXTURE0);
1121 gls::gl::TexParameteri(
1122 gls::gl::TEXTURE_2D,
1123 gls::gl::TEXTURE_MIN_FILTER,
1124 gls::gl::LINEAR as i32,
1125 );
1126 gls::gl::TexParameteri(
1127 gls::gl::TEXTURE_2D,
1128 gls::gl::TEXTURE_MAG_FILTER,
1129 gls::gl::LINEAR as i32,
1130 );
1131
1132 gls::gl::TexImage2D(
1133 gls::gl::TEXTURE_2D,
1134 0,
1135 format as i32,
1136 width,
1137 height,
1138 0,
1139 format,
1140 gls::gl::UNSIGNED_BYTE,
1141 pixels,
1142 );
1143 check_gl_error(function!(), line!())?;
1144 gls::gl::FramebufferTexture2D(
1145 gls::gl::FRAMEBUFFER,
1146 gls::gl::COLOR_ATTACHMENT0,
1147 gls::gl::TEXTURE_2D,
1148 self.render_texture.id,
1149 0,
1150 );
1151 check_gl_error(function!(), line!())?;
1152 gls::gl::Viewport(0, 0, width, height);
1153 }
1154 log::debug!("Set up framebuffer takes {:?}", start.elapsed());
1155 Ok(frame_buffer)
1156 }
1157
1158 fn convert_dest_non_dma(
1159 &mut self,
1160 dst: &mut TensorImage,
1161 src: &TensorImage,
1162 rotation: crate::Rotation,
1163 flip: Flip,
1164 crop: Crop,
1165 ) -> crate::Result<()> {
1166 let _framebuffer = self.setup_renderbuffer_non_dma(dst, crop)?;
1167 let start = Instant::now();
1168 if dst.is_planar() {
1169 self.convert_to_planar(src, dst, rotation, flip, crop)?;
1170 } else {
1171 self.convert_to(src, dst, rotation, flip, crop)?;
1172 }
1173 log::debug!("Draw to framebuffer takes {:?}", start.elapsed());
1174 let start = Instant::now();
1175 let dest_format = match dst.fourcc() {
1176 RGB => gls::gl::RGB,
1177 RGBA => gls::gl::RGBA,
1178 GREY => gls::gl::RED,
1179 _ => unreachable!(),
1180 };
1181
1182 unsafe {
1183 let mut dst_map = dst.tensor().map()?;
1184 gls::gl::ReadBuffer(gls::gl::COLOR_ATTACHMENT0);
1185 gls::gl::ReadnPixels(
1186 0,
1187 0,
1188 dst.width() as i32,
1189 dst.height() as i32,
1190 dest_format,
1191 gls::gl::UNSIGNED_BYTE,
1192 dst.tensor.len() as i32,
1193 dst_map.as_mut_ptr() as *mut c_void,
1194 );
1195 }
1196 log::debug!("Read from framebuffer takes {:?}", start.elapsed());
1197 Ok(())
1198 }
1199
1200 fn convert_to(
1201 &mut self,
1202 src: &TensorImage,
1203 dst: &TensorImage,
1204 rotation: crate::Rotation,
1205 flip: Flip,
1206 crop: Crop,
1207 ) -> Result<(), crate::Error> {
1208 check_gl_error(function!(), line!())?;
1209
1210 let has_crop = crop.dst_rect.is_some_and(|x| {
1211 x.left != 0 || x.top != 0 || x.width != dst.width() || x.height != dst.height()
1212 });
1213 if has_crop {
1214 if let Some(dst_color) = crop.dst_color {
1215 unsafe {
1216 gls::gl::ClearColor(
1217 dst_color[0] as f32 / 255.0,
1218 dst_color[1] as f32 / 255.0,
1219 dst_color[2] as f32 / 255.0,
1220 dst_color[3] as f32 / 255.0,
1221 );
1222 gls::gl::Clear(gls::gl::COLOR_BUFFER_BIT);
1223 };
1224 }
1225 }
1226
1227 let src_roi = if let Some(crop) = crop.src_rect {
1229 RegionOfInterest {
1230 left: crop.left as f32 / src.width() as f32,
1231 top: (crop.top + crop.height) as f32 / src.height() as f32,
1232 right: (crop.left + crop.width) as f32 / src.width() as f32,
1233 bottom: crop.top as f32 / src.height() as f32,
1234 }
1235 } else {
1236 RegionOfInterest {
1237 left: 0.,
1238 top: 1.,
1239 right: 1.,
1240 bottom: 0.,
1241 }
1242 };
1243
1244 let cvt_screen_coord = |normalized| normalized * 2.0 - 1.0;
1246 let dst_roi = if let Some(crop) = crop.dst_rect {
1247 RegionOfInterest {
1248 left: cvt_screen_coord(crop.left as f32 / dst.width() as f32),
1249 top: cvt_screen_coord((crop.top + crop.height) as f32 / dst.height() as f32),
1250 right: cvt_screen_coord((crop.left + crop.width) as f32 / dst.width() as f32),
1251 bottom: cvt_screen_coord(crop.top as f32 / dst.height() as f32),
1252 }
1253 } else {
1254 RegionOfInterest {
1255 left: -1.,
1256 top: 1.,
1257 right: 1.,
1258 bottom: -1.,
1259 }
1260 };
1261 let rotation_offset = match rotation {
1262 crate::Rotation::None => 0,
1263 crate::Rotation::Clockwise90 => 1,
1264 crate::Rotation::Rotate180 => 2,
1265 crate::Rotation::CounterClockwise90 => 3,
1266 };
1267 if self.gl_context.support_dma && src.tensor().memory() == TensorMemory::Dma {
1268 match self.create_image_from_dma2(src) {
1269 Ok(new_egl_image) => self.draw_camera_texture_eglimage(
1270 src,
1271 &new_egl_image,
1272 src_roi,
1273 dst_roi,
1274 rotation_offset,
1275 flip,
1276 )?,
1277 Err(e) => {
1278 log::warn!("EGL image creation failed for {:?}: {:?}", src.fourcc(), e);
1279 let start = Instant::now();
1280 self.draw_src_texture(src, src_roi, dst_roi, rotation_offset, flip)?;
1281 log::debug!("draw_src_texture takes {:?}", start.elapsed());
1282 }
1283 }
1284 } else {
1285 let start = Instant::now();
1286 self.draw_src_texture(src, src_roi, dst_roi, rotation_offset, flip)?;
1287 log::debug!("draw_src_texture takes {:?}", start.elapsed());
1288 }
1289
1290 let start = Instant::now();
1291 unsafe { gls::gl::Finish() };
1292 log::debug!("gl_Finish takes {:?}", start.elapsed());
1293 check_gl_error(function!(), line!())?;
1294 Ok(())
1295 }
1296
1297 fn convert_to_planar(
1298 &self,
1299 src: &TensorImage,
1300 dst: &TensorImage,
1301 rotation: crate::Rotation,
1302 flip: Flip,
1303 crop: Crop,
1304 ) -> Result<(), crate::Error> {
1305 let alpha = match dst.fourcc() {
1328 PLANAR_RGB => false,
1329 PLANAR_RGBA => true,
1330 _ => {
1331 return Err(crate::Error::NotSupported(
1332 "Destination format must be PLANAR_RGB or PLANAR_RGBA".to_string(),
1333 ));
1334 }
1335 };
1336
1337 let src_roi = if let Some(crop) = crop.src_rect {
1339 RegionOfInterest {
1340 left: crop.left as f32 / src.width() as f32,
1341 top: (crop.top + crop.height) as f32 / src.height() as f32,
1342 right: (crop.left + crop.width) as f32 / src.width() as f32,
1343 bottom: crop.top as f32 / src.height() as f32,
1344 }
1345 } else {
1346 RegionOfInterest {
1347 left: 0.,
1348 top: 1.,
1349 right: 1.,
1350 bottom: 0.,
1351 }
1352 };
1353
1354 let cvt_screen_coord = |normalized| normalized * 2.0 - 1.0;
1356 let dst_roi = if let Some(crop) = crop.dst_rect {
1357 RegionOfInterest {
1358 left: cvt_screen_coord(crop.left as f32 / dst.width() as f32),
1359 top: cvt_screen_coord((crop.top + crop.height) as f32 / dst.height() as f32),
1360 right: cvt_screen_coord((crop.left + crop.width) as f32 / dst.width() as f32),
1361 bottom: cvt_screen_coord(crop.top as f32 / dst.height() as f32),
1362 }
1363 } else {
1364 RegionOfInterest {
1365 left: -1.,
1366 top: 1.,
1367 right: 1.,
1368 bottom: -1.,
1369 }
1370 };
1371 let rotation_offset = match rotation {
1372 crate::Rotation::None => 0,
1373 crate::Rotation::Clockwise90 => 1,
1374 crate::Rotation::Rotate180 => 2,
1375 crate::Rotation::CounterClockwise90 => 3,
1376 };
1377
1378 let has_crop = crop.dst_rect.is_some_and(|x| {
1379 x.left != 0 || x.top != 0 || x.width != dst.width() || x.height != dst.height()
1380 });
1381 if has_crop {
1382 if let Some(dst_color) = crop.dst_color {
1383 self.clear_rect_planar(
1384 dst.width(),
1385 dst.height(),
1386 dst_roi,
1387 [
1388 dst_color[0] as f32 / 255.0,
1389 dst_color[1] as f32 / 255.0,
1390 dst_color[2] as f32 / 255.0,
1391 dst_color[3] as f32 / 255.0,
1392 ],
1393 alpha,
1394 )?;
1395 }
1396 }
1397
1398 let new_egl_image = self.create_image_from_dma2(src)?;
1399
1400 self.draw_camera_texture_to_rgb_planar(
1401 &new_egl_image,
1402 src_roi,
1403 dst_roi,
1404 rotation_offset,
1405 flip,
1406 alpha,
1407 )?;
1408 unsafe { gls::gl::Finish() };
1409
1410 Ok(())
1411 }
1412
1413 fn clear_rect_planar(
1414 &self,
1415 width: usize,
1416 height: usize,
1417 dst_roi: RegionOfInterest,
1418 color: [f32; 4],
1419 alpha: bool,
1420 ) -> Result<(), Error> {
1421 if !alpha && color[0] == color[1] && color[1] == color[2] {
1422 unsafe {
1423 gls::gl::ClearColor(color[0], color[0], color[0], 1.0);
1424 gls::gl::Clear(gls::gl::COLOR_BUFFER_BIT);
1425 };
1426 }
1427
1428 let split = if alpha { 4 } else { 3 };
1429
1430 unsafe {
1431 gls::gl::Enable(gls::gl::SCISSOR_TEST);
1432 let x = (((dst_roi.left + 1.0) / 2.0) * width as f32).round() as i32;
1433 let y = (((dst_roi.bottom + 1.0) / 2.0) * height as f32).round() as i32;
1434 let width = (((dst_roi.right - dst_roi.left) / 2.0) * width as f32).round() as i32;
1435 let height = (((dst_roi.top - dst_roi.bottom) / 2.0) * height as f32 / split as f32)
1436 .round() as i32;
1437 for (i, c) in color.iter().enumerate().take(split) {
1438 gls::gl::Scissor(x, y + i as i32 * height, width, height);
1439 gls::gl::ClearColor(*c, *c, *c, 1.0);
1440 gls::gl::Clear(gls::gl::COLOR_BUFFER_BIT);
1441 }
1442 gls::gl::Disable(gls::gl::SCISSOR_TEST);
1443 }
1444 Ok(())
1445 }
1446
1447 #[allow(clippy::too_many_arguments)]
1448 fn draw_camera_texture_to_rgb_planar(
1449 &self,
1450 egl_img: &EglImage,
1451 src_roi: RegionOfInterest,
1452 mut dst_roi: RegionOfInterest,
1453 rotation_offset: usize,
1454 flip: Flip,
1455 alpha: bool,
1456 ) -> Result<(), Error> {
1457 let texture_target = gls::gl::TEXTURE_EXTERNAL_OES;
1458 match flip {
1459 Flip::None => {}
1460 Flip::Vertical => {
1461 std::mem::swap(&mut dst_roi.top, &mut dst_roi.bottom);
1462 }
1463 Flip::Horizontal => {
1464 std::mem::swap(&mut dst_roi.left, &mut dst_roi.right);
1465 }
1466 }
1467 unsafe {
1468 gls::gl::UseProgram(self.texture_program_planar.id);
1470 gls::gl::BindTexture(texture_target, self.camera_eglimage_texture.id);
1471 gls::gl::ActiveTexture(gls::gl::TEXTURE0);
1472 gls::gl::TexParameteri(
1473 texture_target,
1474 gls::gl::TEXTURE_MIN_FILTER,
1475 gls::gl::LINEAR as i32,
1476 );
1477 gls::gl::TexParameteri(
1478 texture_target,
1479 gls::gl::TEXTURE_MAG_FILTER,
1480 gls::gl::LINEAR as i32,
1481 );
1482 gls::gl::TexParameteri(
1483 texture_target,
1484 gls::gl::TEXTURE_WRAP_S,
1485 gls::gl::CLAMP_TO_EDGE as i32,
1486 );
1487
1488 gls::gl::TexParameteri(
1489 texture_target,
1490 gls::gl::TEXTURE_WRAP_T,
1491 gls::gl::CLAMP_TO_EDGE as i32,
1492 );
1493
1494 gls::egl_image_target_texture_2d_oes(texture_target, egl_img.egl_image.as_ptr());
1495 check_gl_error(function!(), line!())?;
1496 let y_centers = if alpha {
1497 vec![-3.0 / 4.0, -1.0 / 4.0, 1.0 / 4.0, 3.0 / 4.0]
1498 } else {
1499 vec![-2.0 / 3.0, 0.0, 2.0 / 3.0]
1500 };
1501 let swizzles = [gls::gl::RED, gls::gl::GREEN, gls::gl::BLUE, gls::gl::ALPHA];
1502 for (i, y_center) in y_centers.iter().enumerate() {
1504 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.vertex_buffer.id);
1505 gls::gl::EnableVertexAttribArray(self.vertex_buffer.buffer_index);
1506 let camera_vertices: [f32; 12] = [
1507 dst_roi.left,
1508 dst_roi.top / 3.0 + y_center,
1509 0., dst_roi.right,
1511 dst_roi.top / 3.0 + y_center,
1512 0., dst_roi.right,
1514 dst_roi.bottom / 3.0 + y_center,
1515 0., dst_roi.left,
1517 dst_roi.bottom / 3.0 + y_center,
1518 0., ];
1520 gls::gl::BufferData(
1521 gls::gl::ARRAY_BUFFER,
1522 (size_of::<f32>() * camera_vertices.len()) as isize,
1523 camera_vertices.as_ptr() as *const c_void,
1524 gls::gl::DYNAMIC_DRAW,
1525 );
1526
1527 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.texture_buffer.id);
1528 gls::gl::EnableVertexAttribArray(self.texture_buffer.buffer_index);
1529 let texture_vertices: [f32; 16] = [
1530 src_roi.left,
1531 src_roi.top,
1532 src_roi.right,
1533 src_roi.top,
1534 src_roi.right,
1535 src_roi.bottom,
1536 src_roi.left,
1537 src_roi.bottom,
1538 src_roi.left,
1539 src_roi.top,
1540 src_roi.right,
1541 src_roi.top,
1542 src_roi.right,
1543 src_roi.bottom,
1544 src_roi.left,
1545 src_roi.bottom,
1546 ];
1547
1548 gls::gl::BufferData(
1549 gls::gl::ARRAY_BUFFER,
1550 (size_of::<f32>() * 8) as isize,
1551 (texture_vertices[(rotation_offset * 2)..]).as_ptr() as *const c_void,
1552 gls::gl::DYNAMIC_DRAW,
1553 );
1554 let vertices_index: [u32; 4] = [0, 1, 2, 3];
1555 gls::gl::TexParameteri(
1559 texture_target,
1560 gls::gl::TEXTURE_SWIZZLE_R,
1561 swizzles[i] as i32,
1562 );
1563
1564 gls::gl::DrawElements(
1565 gls::gl::TRIANGLE_FAN,
1566 vertices_index.len() as i32,
1567 gls::gl::UNSIGNED_INT,
1568 vertices_index.as_ptr() as *const c_void,
1569 );
1570 }
1571 check_gl_error(function!(), line!())?;
1572 }
1573 Ok(())
1574 }
1575
1576 fn draw_src_texture(
1577 &mut self,
1578 src: &TensorImage,
1579 src_roi: RegionOfInterest,
1580 mut dst_roi: RegionOfInterest,
1581 rotation_offset: usize,
1582 flip: Flip,
1583 ) -> Result<(), Error> {
1584 let texture_target = gls::gl::TEXTURE_2D;
1585 let texture_format = match src.fourcc() {
1586 RGB => gls::gl::RGB,
1587 RGBA => gls::gl::RGBA,
1588 GREY => gls::gl::RED,
1589 _ => unreachable!(),
1590 };
1591 unsafe {
1592 gls::gl::UseProgram(self.texture_program.id);
1593 gls::gl::BindTexture(texture_target, self.camera_normal_texture.id);
1594 gls::gl::ActiveTexture(gls::gl::TEXTURE0);
1595 gls::gl::TexParameteri(
1596 texture_target,
1597 gls::gl::TEXTURE_MIN_FILTER,
1598 gls::gl::LINEAR as i32,
1599 );
1600 gls::gl::TexParameteri(
1601 texture_target,
1602 gls::gl::TEXTURE_MAG_FILTER,
1603 gls::gl::LINEAR as i32,
1604 );
1605 if src.fourcc() == GREY {
1606 for swizzle in [
1607 gls::gl::TEXTURE_SWIZZLE_R,
1608 gls::gl::TEXTURE_SWIZZLE_G,
1609 gls::gl::TEXTURE_SWIZZLE_B,
1610 ] {
1611 gls::gl::TexParameteri(gls::gl::TEXTURE_2D, swizzle, gls::gl::RED as i32);
1612 }
1613 } else {
1614 for (swizzle, src) in [
1615 (gls::gl::TEXTURE_SWIZZLE_R, gls::gl::RED),
1616 (gls::gl::TEXTURE_SWIZZLE_G, gls::gl::GREEN),
1617 (gls::gl::TEXTURE_SWIZZLE_B, gls::gl::BLUE),
1618 ] {
1619 gls::gl::TexParameteri(gls::gl::TEXTURE_2D, swizzle, src as i32);
1620 }
1621 }
1622 self.camera_normal_texture.update_texture(
1623 texture_target,
1624 src.width(),
1625 src.height(),
1626 texture_format,
1627 &src.tensor().map()?,
1628 );
1629
1630 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.vertex_buffer.id);
1631 gls::gl::EnableVertexAttribArray(self.vertex_buffer.buffer_index);
1632
1633 match flip {
1634 Flip::None => {}
1635 Flip::Vertical => {
1636 std::mem::swap(&mut dst_roi.top, &mut dst_roi.bottom);
1637 }
1638 Flip::Horizontal => {
1639 std::mem::swap(&mut dst_roi.left, &mut dst_roi.right);
1640 }
1641 }
1642
1643 let camera_vertices: [f32; 12] = [
1644 dst_roi.left,
1645 dst_roi.top,
1646 0., dst_roi.right,
1648 dst_roi.top,
1649 0., dst_roi.right,
1651 dst_roi.bottom,
1652 0., dst_roi.left,
1654 dst_roi.bottom,
1655 0., ];
1657 gls::gl::BufferData(
1658 gls::gl::ARRAY_BUFFER,
1659 (size_of::<f32>() * camera_vertices.len()) as isize,
1660 camera_vertices.as_ptr() as *const c_void,
1661 gls::gl::DYNAMIC_DRAW,
1662 );
1663 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.texture_buffer.id);
1664 gls::gl::EnableVertexAttribArray(self.texture_buffer.buffer_index);
1665 let texture_vertices: [f32; 16] = [
1666 src_roi.left,
1667 src_roi.top,
1668 src_roi.right,
1669 src_roi.top,
1670 src_roi.right,
1671 src_roi.bottom,
1672 src_roi.left,
1673 src_roi.bottom,
1674 src_roi.left,
1675 src_roi.top,
1676 src_roi.right,
1677 src_roi.top,
1678 src_roi.right,
1679 src_roi.bottom,
1680 src_roi.left,
1681 src_roi.bottom,
1682 ];
1683
1684 gls::gl::BufferData(
1685 gls::gl::ARRAY_BUFFER,
1686 (size_of::<f32>() * 8) as isize,
1687 (texture_vertices[(rotation_offset * 2)..]).as_ptr() as *const c_void,
1688 gls::gl::DYNAMIC_DRAW,
1689 );
1690 let vertices_index: [u32; 4] = [0, 1, 2, 3];
1691 gls::gl::DrawElements(
1692 gls::gl::TRIANGLE_FAN,
1693 vertices_index.len() as i32,
1694 gls::gl::UNSIGNED_INT,
1695 vertices_index.as_ptr() as *const c_void,
1696 );
1697 check_gl_error(function!(), line!())?;
1698
1699 Ok(())
1700 }
1701 }
1702
1703 fn draw_camera_texture_eglimage(
1704 &self,
1705 src: &TensorImage,
1706 egl_img: &EglImage,
1707 src_roi: RegionOfInterest,
1708 mut dst_roi: RegionOfInterest,
1709 rotation_offset: usize,
1710 flip: Flip,
1711 ) -> Result<(), Error> {
1712 let texture_target = gls::gl::TEXTURE_EXTERNAL_OES;
1714 unsafe {
1715 gls::gl::UseProgram(self.texture_program_yuv.id);
1716 gls::gl::BindTexture(texture_target, self.camera_eglimage_texture.id);
1717 gls::gl::ActiveTexture(gls::gl::TEXTURE0);
1718 gls::gl::TexParameteri(
1719 texture_target,
1720 gls::gl::TEXTURE_MIN_FILTER,
1721 gls::gl::LINEAR as i32,
1722 );
1723 gls::gl::TexParameteri(
1724 texture_target,
1725 gls::gl::TEXTURE_MAG_FILTER,
1726 gls::gl::LINEAR as i32,
1727 );
1728
1729 if src.fourcc() == GREY {
1730 for swizzle in [
1731 gls::gl::TEXTURE_SWIZZLE_R,
1732 gls::gl::TEXTURE_SWIZZLE_G,
1733 gls::gl::TEXTURE_SWIZZLE_B,
1734 ] {
1735 gls::gl::TexParameteri(gls::gl::TEXTURE_2D, swizzle, gls::gl::RED as i32);
1736 }
1737 } else {
1738 for (swizzle, src) in [
1739 (gls::gl::TEXTURE_SWIZZLE_R, gls::gl::RED),
1740 (gls::gl::TEXTURE_SWIZZLE_G, gls::gl::GREEN),
1741 (gls::gl::TEXTURE_SWIZZLE_B, gls::gl::BLUE),
1742 ] {
1743 gls::gl::TexParameteri(gls::gl::TEXTURE_2D, swizzle, src as i32);
1744 }
1745 }
1746
1747 gls::egl_image_target_texture_2d_oes(texture_target, egl_img.egl_image.as_ptr());
1748 check_gl_error(function!(), line!())?;
1749 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.vertex_buffer.id);
1750 gls::gl::EnableVertexAttribArray(self.vertex_buffer.buffer_index);
1751
1752 match flip {
1753 Flip::None => {}
1754 Flip::Vertical => {
1755 std::mem::swap(&mut dst_roi.top, &mut dst_roi.bottom);
1756 }
1757 Flip::Horizontal => {
1758 std::mem::swap(&mut dst_roi.left, &mut dst_roi.right);
1759 }
1760 }
1761
1762 let camera_vertices: [f32; 12] = [
1763 dst_roi.left,
1764 dst_roi.top,
1765 0., dst_roi.right,
1767 dst_roi.top,
1768 0., dst_roi.right,
1770 dst_roi.bottom,
1771 0., dst_roi.left,
1773 dst_roi.bottom,
1774 0., ];
1776 gls::gl::BufferSubData(
1777 gls::gl::ARRAY_BUFFER,
1778 0,
1779 (size_of::<f32>() * camera_vertices.len()) as isize,
1780 camera_vertices.as_ptr() as *const c_void,
1781 );
1782
1783 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.texture_buffer.id);
1784 gls::gl::EnableVertexAttribArray(self.texture_buffer.buffer_index);
1785
1786 let texture_vertices: [f32; 16] = [
1787 src_roi.left,
1788 src_roi.top,
1789 src_roi.right,
1790 src_roi.top,
1791 src_roi.right,
1792 src_roi.bottom,
1793 src_roi.left,
1794 src_roi.bottom,
1795 src_roi.left,
1796 src_roi.top,
1797 src_roi.right,
1798 src_roi.top,
1799 src_roi.right,
1800 src_roi.bottom,
1801 src_roi.left,
1802 src_roi.bottom,
1803 ];
1804 gls::gl::BufferSubData(
1805 gls::gl::ARRAY_BUFFER,
1806 0,
1807 (size_of::<f32>() * 8) as isize,
1808 (texture_vertices[(rotation_offset * 2)..]).as_ptr() as *const c_void,
1809 );
1810
1811 let vertices_index: [u32; 4] = [0, 1, 2, 3];
1812 gls::gl::DrawElements(
1813 gls::gl::TRIANGLE_FAN,
1814 vertices_index.len() as i32,
1815 gls::gl::UNSIGNED_INT,
1816 vertices_index.as_ptr() as *const c_void,
1817 );
1818 }
1819 check_gl_error(function!(), line!())?;
1820 Ok(())
1821 }
1822
1823 fn create_image_from_dma2(&self, src: &TensorImage) -> Result<EglImage, crate::Error> {
1824 let width;
1825 let height;
1826 let format;
1827 let channels;
1828
1829 if src.fourcc() == NV12 {
1831 if !src.width().is_multiple_of(4) {
1832 return Err(Error::NotSupported(
1833 "OpenGL EGLImage doesn't support image widths which are not multiples of 4"
1834 .to_string(),
1835 ));
1836 }
1837 width = src.width();
1838 height = src.height();
1839 format = fourcc_to_drm(NV12);
1840 channels = 1; } else if src.is_planar() {
1842 if !src.width().is_multiple_of(16) {
1843 return Err(Error::NotSupported(
1844 "OpenGL Planar RGB EGLImage doesn't support image widths which are not multiples of 16"
1845 .to_string(),
1846 ));
1847 }
1848 match src.fourcc() {
1849 PLANAR_RGB => {
1850 format = DrmFourcc::R8;
1851 width = src.width();
1852 height = src.height() * 3;
1853 channels = 1;
1854 }
1855 fourcc => {
1856 return Err(crate::Error::NotSupported(format!(
1857 "Unsupported Planar FourCC {fourcc:?}"
1858 )));
1859 }
1860 };
1861 } else {
1862 if !src.width().is_multiple_of(4) {
1863 return Err(Error::NotSupported(
1864 "OpenGL EGLImage doesn't support image widths which are not multiples of 4"
1865 .to_string(),
1866 ));
1867 }
1868 width = src.width();
1869 height = src.height();
1870 format = fourcc_to_drm(src.fourcc());
1871 channels = src.channels();
1872 }
1873
1874 let fd = match &src.tensor {
1875 edgefirst_tensor::Tensor::Dma(dma_tensor) => dma_tensor.fd.as_raw_fd(),
1876 edgefirst_tensor::Tensor::Shm(_) => {
1877 return Err(Error::NotImplemented(
1878 "OpenGL EGLImage doesn't support SHM".to_string(),
1879 ));
1880 }
1881 edgefirst_tensor::Tensor::Mem(_) => {
1882 return Err(Error::NotImplemented(
1883 "OpenGL EGLImage doesn't support MEM".to_string(),
1884 ));
1885 }
1886 };
1887
1888 let plane0_pitch = if src.fourcc() == NV12 {
1891 width
1892 } else {
1893 width * channels
1894 };
1895
1896 let mut egl_img_attr = vec![
1897 egl_ext::LINUX_DRM_FOURCC as Attrib,
1898 format as Attrib,
1899 khronos_egl::WIDTH as Attrib,
1900 width as Attrib,
1901 khronos_egl::HEIGHT as Attrib,
1902 height as Attrib,
1903 egl_ext::DMA_BUF_PLANE0_PITCH as Attrib,
1904 plane0_pitch as Attrib,
1905 egl_ext::DMA_BUF_PLANE0_OFFSET as Attrib,
1906 0 as Attrib,
1907 egl_ext::DMA_BUF_PLANE0_FD as Attrib,
1908 fd as Attrib,
1909 egl::IMAGE_PRESERVED as Attrib,
1910 egl::TRUE as Attrib,
1911 ];
1912
1913 if src.fourcc() == NV12 {
1915 let uv_offset = width * height; egl_img_attr.append(&mut vec![
1917 egl_ext::DMA_BUF_PLANE1_FD as Attrib,
1918 fd as Attrib,
1919 egl_ext::DMA_BUF_PLANE1_OFFSET as Attrib,
1920 uv_offset as Attrib,
1921 egl_ext::DMA_BUF_PLANE1_PITCH as Attrib,
1922 width as Attrib, ]);
1924 }
1925
1926 if matches!(src.fourcc(), YUYV | NV12) {
1927 egl_img_attr.append(&mut vec![
1928 egl_ext::YUV_COLOR_SPACE_HINT as Attrib,
1929 egl_ext::ITU_REC709 as Attrib,
1930 egl_ext::SAMPLE_RANGE_HINT as Attrib,
1931 egl_ext::YUV_NARROW_RANGE as Attrib,
1932 ]);
1933 }
1934
1935 egl_img_attr.push(khronos_egl::NONE as Attrib);
1936
1937 match self.new_egl_image_owned(egl_ext::LINUX_DMA_BUF, &egl_img_attr) {
1938 Ok(v) => Ok(v),
1939 Err(e) => Err(e),
1940 }
1941 }
1942
1943 fn new_egl_image_owned(
1944 &'_ self,
1945 target: egl::Enum,
1946 attrib_list: &[Attrib],
1947 ) -> Result<EglImage, Error> {
1948 let image = GlContext::egl_create_image_with_fallback(
1949 &self.gl_context.egl,
1950 self.gl_context.display.as_display(),
1951 unsafe { egl::Context::from_ptr(egl::NO_CONTEXT) },
1952 target,
1953 unsafe { egl::ClientBuffer::from_ptr(null_mut()) },
1954 attrib_list,
1955 )?;
1956 Ok(EglImage {
1957 egl_image: image,
1958 display: self.gl_context.display.as_display(),
1959 egl: self.gl_context.egl.clone(),
1960 })
1961 }
1962
1963 fn reshape_segmentation_to_rgba(&self, segmentation: &[u8], shape: [usize; 3]) -> Vec<u8> {
1965 let [height, width, classes] = shape;
1966
1967 let n_layer_stride = height * width * 4;
1968 let n_row_stride = width * 4;
1969 let n_col_stride = 4;
1970 let row_stride = width * classes;
1971 let col_stride = classes;
1972
1973 let mut new_segmentation = vec![0u8; n_layer_stride * classes.div_ceil(4)];
1974
1975 for i in 0..height {
1976 for j in 0..width {
1977 for k in 0..classes.div_ceil(4) * 4 {
1978 if k >= classes {
1979 new_segmentation[n_layer_stride * (k / 4)
1980 + i * n_row_stride
1981 + j * n_col_stride
1982 + k % 4] = 0;
1983 } else {
1984 new_segmentation[n_layer_stride * (k / 4)
1985 + i * n_row_stride
1986 + j * n_col_stride
1987 + k % 4] = segmentation[i * row_stride + j * col_stride + k];
1988 }
1989 }
1990 }
1991 }
1992
1993 new_segmentation
1994 }
1995
1996 #[cfg(feature = "decoder")]
1997 fn render_modelpack_segmentation(
1998 &mut self,
1999 dst_roi: RegionOfInterest,
2000 segmentation: &[u8],
2001 shape: [usize; 3],
2002 ) -> Result<(), crate::Error> {
2003 log::debug!("start render_segmentation_to_image");
2004
2005 let new_segmentation = self.reshape_segmentation_to_rgba(segmentation, shape);
2008
2009 let [height, width, classes] = shape;
2010
2011 let format = gls::gl::RGBA;
2012 let texture_target = gls::gl::TEXTURE_2D_ARRAY;
2013 self.segmentation_program
2014 .load_uniform_1i(c"background_index", shape[2] as i32 - 1)?;
2015
2016 gls::use_program(self.segmentation_program.id);
2017
2018 gls::bind_texture(texture_target, self.segmentation_texture.id);
2019 gls::active_texture(gls::gl::TEXTURE0);
2020 gls::tex_parameteri(
2021 texture_target,
2022 gls::gl::TEXTURE_MIN_FILTER,
2023 gls::gl::LINEAR as i32,
2024 );
2025 gls::tex_parameteri(
2026 texture_target,
2027 gls::gl::TEXTURE_MAG_FILTER,
2028 gls::gl::LINEAR as i32,
2029 );
2030 gls::tex_parameteri(
2031 texture_target,
2032 gls::gl::TEXTURE_WRAP_S,
2033 gls::gl::CLAMP_TO_EDGE as i32,
2034 );
2035
2036 gls::tex_parameteri(
2037 texture_target,
2038 gls::gl::TEXTURE_WRAP_T,
2039 gls::gl::CLAMP_TO_EDGE as i32,
2040 );
2041
2042 gls::tex_image3d(
2043 texture_target,
2044 0,
2045 format as i32,
2046 width as i32,
2047 height as i32,
2048 classes.div_ceil(4) as i32,
2049 0,
2050 format,
2051 gls::gl::UNSIGNED_BYTE,
2052 Some(&new_segmentation),
2053 );
2054
2055 let src_roi = RegionOfInterest {
2056 left: 0.,
2057 top: 1.,
2058 right: 1.,
2059 bottom: 0.,
2060 };
2061
2062 unsafe {
2063 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.vertex_buffer.id);
2064 gls::gl::EnableVertexAttribArray(self.vertex_buffer.buffer_index);
2065
2066 let camera_vertices: [f32; 12] = [
2067 dst_roi.left,
2068 dst_roi.top,
2069 0., dst_roi.right,
2071 dst_roi.top,
2072 0., dst_roi.right,
2074 dst_roi.bottom,
2075 0., dst_roi.left,
2077 dst_roi.bottom,
2078 0., ];
2080 gls::gl::BufferSubData(
2081 gls::gl::ARRAY_BUFFER,
2082 0,
2083 (size_of::<f32>() * camera_vertices.len()) as isize,
2084 camera_vertices.as_ptr() as *const c_void,
2085 );
2086
2087 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.texture_buffer.id);
2088 gls::gl::EnableVertexAttribArray(self.texture_buffer.buffer_index);
2089
2090 let texture_vertices: [f32; 8] = [
2091 src_roi.left,
2092 src_roi.top,
2093 src_roi.right,
2094 src_roi.top,
2095 src_roi.right,
2096 src_roi.bottom,
2097 src_roi.left,
2098 src_roi.bottom,
2099 ];
2100 gls::gl::BufferSubData(
2101 gls::gl::ARRAY_BUFFER,
2102 0,
2103 (size_of::<f32>() * 8) as isize,
2104 (texture_vertices[0..]).as_ptr() as *const c_void,
2105 );
2106
2107 let vertices_index: [u32; 4] = [0, 1, 2, 3];
2108 gls::gl::DrawElements(
2109 gls::gl::TRIANGLE_FAN,
2110 vertices_index.len() as i32,
2111 gls::gl::UNSIGNED_INT,
2112 vertices_index.as_ptr() as *const c_void,
2113 );
2114 }
2115
2116 Ok(())
2117 }
2118
2119 #[cfg(feature = "decoder")]
2120 fn render_yolo_segmentation(
2121 &mut self,
2122 dst_roi: RegionOfInterest,
2123 segmentation: &[u8],
2124 shape: [usize; 2],
2125 class: usize,
2126 ) -> Result<(), crate::Error> {
2127 log::debug!("start render_yolo_segmentation");
2128
2129 let [height, width] = shape;
2130
2131 let format = gls::gl::RED;
2132 let texture_target = gls::gl::TEXTURE_2D;
2133 gls::use_program(self.instanced_segmentation_program.id);
2134 self.instanced_segmentation_program
2135 .load_uniform_1i(c"class_index", class as i32)?;
2136 gls::bind_texture(texture_target, self.segmentation_texture.id);
2137 gls::active_texture(gls::gl::TEXTURE0);
2138 gls::tex_parameteri(
2139 texture_target,
2140 gls::gl::TEXTURE_MIN_FILTER,
2141 gls::gl::LINEAR as i32,
2142 );
2143 gls::tex_parameteri(
2144 texture_target,
2145 gls::gl::TEXTURE_MAG_FILTER,
2146 gls::gl::LINEAR as i32,
2147 );
2148 gls::tex_parameteri(
2149 texture_target,
2150 gls::gl::TEXTURE_WRAP_S,
2151 gls::gl::CLAMP_TO_EDGE as i32,
2152 );
2153
2154 gls::tex_parameteri(
2155 texture_target,
2156 gls::gl::TEXTURE_WRAP_T,
2157 gls::gl::CLAMP_TO_EDGE as i32,
2158 );
2159
2160 gls::tex_image2d(
2161 texture_target,
2162 0,
2163 format as i32,
2164 width as i32,
2165 height as i32,
2166 0,
2167 format,
2168 gls::gl::UNSIGNED_BYTE,
2169 Some(segmentation),
2170 );
2171
2172 let src_roi = RegionOfInterest {
2173 left: 0.,
2174 top: 1.,
2175 right: 1.,
2176 bottom: 0.,
2177 };
2178
2179 unsafe {
2180 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.vertex_buffer.id);
2181 gls::gl::EnableVertexAttribArray(self.vertex_buffer.buffer_index);
2182
2183 let camera_vertices: [f32; 12] = [
2184 dst_roi.left,
2185 dst_roi.top,
2186 0., dst_roi.right,
2188 dst_roi.top,
2189 0., dst_roi.right,
2191 dst_roi.bottom,
2192 0., dst_roi.left,
2194 dst_roi.bottom,
2195 0., ];
2197 gls::gl::BufferSubData(
2198 gls::gl::ARRAY_BUFFER,
2199 0,
2200 (size_of::<f32>() * camera_vertices.len()) as isize,
2201 camera_vertices.as_ptr() as *const c_void,
2202 );
2203
2204 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.texture_buffer.id);
2205 gls::gl::EnableVertexAttribArray(self.texture_buffer.buffer_index);
2206
2207 let texture_vertices: [f32; 8] = [
2208 src_roi.left,
2209 src_roi.top,
2210 src_roi.right,
2211 src_roi.top,
2212 src_roi.right,
2213 src_roi.bottom,
2214 src_roi.left,
2215 src_roi.bottom,
2216 ];
2217 gls::gl::BufferSubData(
2218 gls::gl::ARRAY_BUFFER,
2219 0,
2220 (size_of::<f32>() * 8) as isize,
2221 (texture_vertices).as_ptr() as *const c_void,
2222 );
2223
2224 let vertices_index: [u32; 4] = [0, 1, 2, 3];
2225 gls::gl::DrawElements(
2226 gls::gl::TRIANGLE_FAN,
2227 vertices_index.len() as i32,
2228 gls::gl::UNSIGNED_INT,
2229 vertices_index.as_ptr() as *const c_void,
2230 );
2231 gls::gl::Finish();
2232 }
2233
2234 Ok(())
2235 }
2236
2237 fn render_segmentation(
2238 &mut self,
2239 detect: &[DetectBox],
2240 segmentation: &[Segmentation],
2241 ) -> crate::Result<()> {
2242 if segmentation.is_empty() {
2243 return Ok(());
2244 }
2245
2246 let is_modelpack = segmentation[0].segmentation.shape()[2] > 1;
2247 let cvt_screen_coord = |normalized| normalized * 2.0 - 1.0;
2249 if is_modelpack {
2250 let seg = &segmentation[0];
2251 let dst_roi = RegionOfInterest {
2252 left: cvt_screen_coord(seg.xmin),
2253 top: cvt_screen_coord(seg.ymax),
2254 right: cvt_screen_coord(seg.xmax),
2255 bottom: cvt_screen_coord(seg.ymin),
2256 };
2257 let segment = seg.segmentation.as_standard_layout();
2258 let slice = segment.as_slice().ok_or(Error::Internal(
2259 "Cannot get slice of segmentation".to_owned(),
2260 ))?;
2261
2262 self.render_modelpack_segmentation(
2263 dst_roi,
2264 slice,
2265 [
2266 seg.segmentation.shape()[0],
2267 seg.segmentation.shape()[1],
2268 seg.segmentation.shape()[2],
2269 ],
2270 )?;
2271 } else {
2272 for (seg, det) in segmentation.iter().zip(detect) {
2273 let dst_roi = RegionOfInterest {
2274 left: cvt_screen_coord(seg.xmin),
2275 top: cvt_screen_coord(seg.ymax),
2276 right: cvt_screen_coord(seg.xmax),
2277 bottom: cvt_screen_coord(seg.ymin),
2278 };
2279
2280 let segment = seg.segmentation.as_standard_layout();
2281 let slice = segment.as_slice().ok_or(Error::Internal(
2282 "Cannot get slice of segmentation".to_owned(),
2283 ))?;
2284
2285 self.render_yolo_segmentation(
2286 dst_roi,
2287 slice,
2288 [seg.segmentation.shape()[0], seg.segmentation.shape()[1]],
2289 det.label,
2290 )?;
2291 }
2292 }
2293
2294 gls::disable(gls::gl::BLEND);
2295 Ok(())
2296 }
2297
2298 fn render_box(&mut self, dst: &TensorImage, detect: &[DetectBox]) -> Result<(), Error> {
2299 unsafe {
2300 gls::gl::UseProgram(self.color_program.id);
2301 let rescale = |x: f32| x * 2.0 - 1.0;
2302 let thickness = 3.0;
2303 for d in detect {
2304 self.color_program
2305 .load_uniform_1i(c"class_index", d.label as i32)?;
2306 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, self.vertex_buffer.id);
2307 gls::gl::EnableVertexAttribArray(self.vertex_buffer.buffer_index);
2308 let bbox: [f32; 4] = d.bbox.into();
2309 let outer_box = [
2310 bbox[0] - thickness / dst.width() as f32,
2311 bbox[1] - thickness / dst.height() as f32,
2312 bbox[2] + thickness / dst.width() as f32,
2313 bbox[3] + thickness / dst.height() as f32,
2314 ];
2315 let camera_vertices: [f32; 24] = [
2316 rescale(bbox[0]),
2317 rescale(bbox[3]),
2318 0., rescale(bbox[2]),
2320 rescale(bbox[3]),
2321 0., rescale(bbox[2]),
2323 rescale(bbox[1]),
2324 0., rescale(bbox[0]),
2326 rescale(bbox[1]),
2327 0., rescale(outer_box[0]),
2329 rescale(outer_box[3]),
2330 0., rescale(outer_box[2]),
2332 rescale(outer_box[3]),
2333 0., rescale(outer_box[2]),
2335 rescale(outer_box[1]),
2336 0., rescale(outer_box[0]),
2338 rescale(outer_box[1]),
2339 0., ];
2341 gls::gl::BufferData(
2342 gls::gl::ARRAY_BUFFER,
2343 (size_of::<f32>() * camera_vertices.len()) as isize,
2344 camera_vertices.as_ptr() as *const c_void,
2345 gls::gl::DYNAMIC_DRAW,
2346 );
2347
2348 let vertices_index: [u32; 10] = [0, 1, 5, 2, 6, 3, 7, 0, 4, 5];
2349 gls::gl::DrawElements(
2350 gls::gl::TRIANGLE_STRIP,
2351 vertices_index.len() as i32,
2352 gls::gl::UNSIGNED_INT,
2353 vertices_index.as_ptr() as *const c_void,
2354 );
2355 }
2356 }
2357 check_gl_error(function!(), line!())?;
2358 Ok(())
2359 }
2360}
2361struct EglImage {
2362 egl_image: egl::Image,
2363 egl: Rc<Egl>,
2364 display: egl::Display,
2365}
2366
2367impl Drop for EglImage {
2368 fn drop(&mut self) {
2369 if self.egl_image.as_ptr() == egl::NO_IMAGE {
2370 return;
2371 }
2372
2373 let e = GlContext::egl_destory_image_with_fallback(&self.egl, self.display, self.egl_image);
2374 if let Err(e) = e {
2375 error!("Could not destroy EGL image: {e:?}");
2376 }
2377 }
2378}
2379
2380struct Texture {
2381 id: u32,
2382 target: gls::gl::types::GLenum,
2383 width: usize,
2384 height: usize,
2385 format: gls::gl::types::GLenum,
2386}
2387
2388impl Default for Texture {
2389 fn default() -> Self {
2390 Self::new()
2391 }
2392}
2393
2394impl Texture {
2395 fn new() -> Self {
2396 let mut id = 0;
2397 unsafe { gls::gl::GenTextures(1, &raw mut id) };
2398 Self {
2399 id,
2400 target: 0,
2401 width: 0,
2402 height: 0,
2403 format: 0,
2404 }
2405 }
2406
2407 fn update_texture(
2408 &mut self,
2409 target: gls::gl::types::GLenum,
2410 width: usize,
2411 height: usize,
2412 format: gls::gl::types::GLenum,
2413 data: &[u8],
2414 ) {
2415 if target != self.target
2416 || width != self.width
2417 || height != self.height
2418 || format != self.format
2419 {
2420 unsafe {
2421 gls::gl::TexImage2D(
2422 target,
2423 0,
2424 format as i32,
2425 width as i32,
2426 height as i32,
2427 0,
2428 format,
2429 gls::gl::UNSIGNED_BYTE,
2430 data.as_ptr() as *const c_void,
2431 );
2432 }
2433 self.target = target;
2434 self.format = format;
2435 self.width = width;
2436 self.height = height;
2437 } else {
2438 unsafe {
2439 gls::gl::TexSubImage2D(
2440 target,
2441 0,
2442 0,
2443 0,
2444 width as i32,
2445 height as i32,
2446 format,
2447 gls::gl::UNSIGNED_BYTE,
2448 data.as_ptr() as *const c_void,
2449 );
2450 }
2451 }
2452 }
2453}
2454
2455impl Drop for Texture {
2456 fn drop(&mut self) {
2457 unsafe { gls::gl::DeleteTextures(1, &raw mut self.id) };
2458 }
2459}
2460
2461struct Buffer {
2462 id: u32,
2463 buffer_index: u32,
2464}
2465
2466impl Buffer {
2467 fn new(buffer_index: u32, size_per_point: usize, max_points: usize) -> Buffer {
2468 let mut id = 0;
2469 unsafe {
2470 gls::gl::EnableVertexAttribArray(buffer_index);
2471 gls::gl::GenBuffers(1, &raw mut id);
2472 gls::gl::BindBuffer(gls::gl::ARRAY_BUFFER, id);
2473 gls::gl::VertexAttribPointer(
2474 buffer_index,
2475 size_per_point as i32,
2476 gls::gl::FLOAT,
2477 gls::gl::FALSE,
2478 0,
2479 null(),
2480 );
2481 gls::gl::BufferData(
2482 gls::gl::ARRAY_BUFFER,
2483 (size_of::<f32>() * size_per_point * max_points) as isize,
2484 null(),
2485 gls::gl::DYNAMIC_DRAW,
2486 );
2487 }
2488
2489 Buffer { id, buffer_index }
2490 }
2491}
2492
2493impl Drop for Buffer {
2494 fn drop(&mut self) {
2495 unsafe { gls::gl::DeleteBuffers(1, &raw mut self.id) };
2496 }
2497}
2498
2499struct FrameBuffer {
2500 id: u32,
2501}
2502
2503impl FrameBuffer {
2504 fn new() -> FrameBuffer {
2505 let mut id = 0;
2506 unsafe {
2507 gls::gl::GenFramebuffers(1, &raw mut id);
2508 }
2509
2510 FrameBuffer { id }
2511 }
2512
2513 fn bind(&self) {
2514 unsafe { gls::gl::BindFramebuffer(gls::gl::FRAMEBUFFER, self.id) };
2515 }
2516
2517 fn unbind(&self) {
2518 unsafe { gls::gl::BindFramebuffer(gls::gl::FRAMEBUFFER, 0) };
2519 }
2520}
2521
2522impl Drop for FrameBuffer {
2523 fn drop(&mut self) {
2524 self.unbind();
2525 unsafe {
2526 gls::gl::DeleteFramebuffers(1, &raw mut self.id);
2527 }
2528 }
2529}
2530
2531pub struct GlProgram {
2532 id: u32,
2533 vertex_id: u32,
2534 fragment_id: u32,
2535}
2536
2537impl GlProgram {
2538 fn new(vertex_shader: &str, fragment_shader: &str) -> Result<Self, crate::Error> {
2539 let id = unsafe { gls::gl::CreateProgram() };
2540 let vertex_id = unsafe { gls::gl::CreateShader(gls::gl::VERTEX_SHADER) };
2541 if compile_shader_from_str(vertex_id, vertex_shader, "shader_vert").is_err() {
2542 log::debug!("Vertex shader source:\n{}", vertex_shader);
2543 return Err(crate::Error::OpenGl(format!(
2544 "Shader compile error: {vertex_shader}"
2545 )));
2546 }
2547 unsafe {
2548 gls::gl::AttachShader(id, vertex_id);
2549 }
2550
2551 let fragment_id = unsafe { gls::gl::CreateShader(gls::gl::FRAGMENT_SHADER) };
2552 if compile_shader_from_str(fragment_id, fragment_shader, "shader_frag").is_err() {
2553 log::debug!("Fragment shader source:\n{}", fragment_shader);
2554 return Err(crate::Error::OpenGl(format!(
2555 "Shader compile error: {fragment_shader}"
2556 )));
2557 }
2558
2559 unsafe {
2560 gls::gl::AttachShader(id, fragment_id);
2561 gls::gl::LinkProgram(id);
2562 gls::gl::UseProgram(id);
2563 }
2564
2565 Ok(Self {
2566 id,
2567 vertex_id,
2568 fragment_id,
2569 })
2570 }
2571
2572 #[allow(dead_code)]
2573 fn load_uniform_1f(&self, name: &CStr, value: f32) -> Result<(), crate::Error> {
2574 unsafe {
2575 gls::gl::UseProgram(self.id);
2576 let location = gls::gl::GetUniformLocation(self.id, name.as_ptr());
2577 gls::gl::Uniform1f(location, value);
2578 }
2579 Ok(())
2580 }
2581
2582 #[allow(dead_code)]
2583 fn load_uniform_1i(&self, name: &CStr, value: i32) -> Result<(), crate::Error> {
2584 unsafe {
2585 gls::gl::UseProgram(self.id);
2586 let location = gls::gl::GetUniformLocation(self.id, name.as_ptr());
2587 gls::gl::Uniform1i(location, value);
2588 }
2589 Ok(())
2590 }
2591
2592 fn load_uniform_4fv(&self, name: &CStr, value: &[[f32; 4]]) -> Result<(), crate::Error> {
2593 unsafe {
2594 gls::gl::UseProgram(self.id);
2595 let location = gls::gl::GetUniformLocation(self.id, name.as_ptr());
2596 if location == -1 {
2597 return Err(crate::Error::OpenGl(format!(
2598 "Could not find uniform location for '{}'",
2599 name.to_string_lossy().into_owned()
2600 )));
2601 }
2602 gls::gl::Uniform4fv(location, value.len() as i32, value.as_flattened().as_ptr());
2603 }
2604 check_gl_error(function!(), line!())?;
2605 Ok(())
2606 }
2607}
2608
2609impl Drop for GlProgram {
2610 fn drop(&mut self) {
2611 unsafe {
2612 gls::gl::DeleteProgram(self.id);
2613 gls::gl::DeleteShader(self.fragment_id);
2614 gls::gl::DeleteShader(self.vertex_id);
2615 }
2616 }
2617}
2618
2619fn compile_shader_from_str(shader: u32, shader_source: &str, shader_name: &str) -> Result<(), ()> {
2620 let src = match CString::from_str(shader_source) {
2621 Ok(v) => v,
2622 Err(_) => return Err(()),
2623 };
2624 let src_ptr = src.as_ptr();
2625 unsafe {
2626 gls::gl::ShaderSource(shader, 1, &raw const src_ptr, null());
2627 gls::gl::CompileShader(shader);
2628 let mut is_compiled = 0;
2629 gls::gl::GetShaderiv(shader, gls::gl::COMPILE_STATUS, &raw mut is_compiled);
2630 if is_compiled == 0 {
2631 let mut max_length = 0;
2632 gls::gl::GetShaderiv(shader, gls::gl::INFO_LOG_LENGTH, &raw mut max_length);
2633 let mut error_log: Vec<u8> = vec![0; max_length as usize];
2634 gls::gl::GetShaderInfoLog(
2635 shader,
2636 max_length,
2637 &raw mut max_length,
2638 error_log.as_mut_ptr() as *mut c_char,
2639 );
2640 error!(
2641 "Shader '{}' failed: {:?}\n",
2642 shader_name,
2643 CString::from_vec_with_nul(error_log)
2644 .unwrap()
2645 .into_string()
2646 .unwrap()
2647 );
2648 gls::gl::DeleteShader(shader);
2649 return Err(());
2650 }
2651 Ok(())
2652 }
2653}
2654
2655fn check_gl_error(name: &str, line: u32) -> Result<(), Error> {
2656 unsafe {
2657 let err = gls::gl::GetError();
2658 if err != gls::gl::NO_ERROR {
2659 error!("GL Error: {name}:{line}: {err:#X}");
2660 return Err(Error::OpenGl(format!("{err:#X}")));
2662 }
2663 }
2664 Ok(())
2665}
2666
2667fn fourcc_to_drm(fourcc: FourCharCode) -> DrmFourcc {
2668 match fourcc {
2669 RGBA => DrmFourcc::Abgr8888,
2670 YUYV => DrmFourcc::Yuyv,
2671 RGB => DrmFourcc::Bgr888,
2672 GREY => DrmFourcc::R8,
2673 NV12 => DrmFourcc::Nv12,
2674 _ => todo!(),
2675 }
2676}
2677
2678mod egl_ext {
2679 #![allow(dead_code)]
2680 pub(crate) const LINUX_DMA_BUF: u32 = 0x3270;
2681 pub(crate) const LINUX_DRM_FOURCC: u32 = 0x3271;
2682 pub(crate) const DMA_BUF_PLANE0_FD: u32 = 0x3272;
2683 pub(crate) const DMA_BUF_PLANE0_OFFSET: u32 = 0x3273;
2684 pub(crate) const DMA_BUF_PLANE0_PITCH: u32 = 0x3274;
2685 pub(crate) const DMA_BUF_PLANE1_FD: u32 = 0x3275;
2686 pub(crate) const DMA_BUF_PLANE1_OFFSET: u32 = 0x3276;
2687 pub(crate) const DMA_BUF_PLANE1_PITCH: u32 = 0x3277;
2688 pub(crate) const DMA_BUF_PLANE2_FD: u32 = 0x3278;
2689 pub(crate) const DMA_BUF_PLANE2_OFFSET: u32 = 0x3279;
2690 pub(crate) const DMA_BUF_PLANE2_PITCH: u32 = 0x327A;
2691 pub(crate) const YUV_COLOR_SPACE_HINT: u32 = 0x327B;
2692 pub(crate) const SAMPLE_RANGE_HINT: u32 = 0x327C;
2693 pub(crate) const YUV_CHROMA_HORIZONTAL_SITING_HINT: u32 = 0x327D;
2694 pub(crate) const YUV_CHROMA_VERTICAL_SITING_HINT: u32 = 0x327E;
2695
2696 pub(crate) const ITU_REC601: u32 = 0x327F;
2697 pub(crate) const ITU_REC709: u32 = 0x3280;
2698 pub(crate) const ITU_REC2020: u32 = 0x3281;
2699
2700 pub(crate) const YUV_FULL_RANGE: u32 = 0x3282;
2701 pub(crate) const YUV_NARROW_RANGE: u32 = 0x3283;
2702
2703 pub(crate) const YUV_CHROMA_SITING_0: u32 = 0x3284;
2704 pub(crate) const YUV_CHROMA_SITING_0_5: u32 = 0x3285;
2705
2706 pub(crate) const PLATFORM_GBM_KHR: u32 = 0x31D7;
2707
2708 pub(crate) const PLATFORM_DEVICE_EXT: u32 = 0x313F;
2709}
2710
2711fn generate_vertex_shader() -> &'static str {
2712 "\
2713#version 300 es
2714precision mediump float;
2715layout(location = 0) in vec3 pos;
2716layout(location = 1) in vec2 texCoord;
2717
2718out vec3 fragPos;
2719out vec2 tc;
2720
2721void main() {
2722 fragPos = pos;
2723 tc = texCoord;
2724
2725 gl_Position = vec4(pos, 1.0);
2726}
2727"
2728}
2729
2730fn generate_texture_fragment_shader() -> &'static str {
2731 "\
2732#version 300 es
2733
2734precision mediump float;
2735uniform sampler2D tex;
2736in vec3 fragPos;
2737in vec2 tc;
2738
2739out vec4 color;
2740
2741void main(){
2742 color = texture(tex, tc);
2743}
2744"
2745}
2746
2747fn generate_texture_fragment_shader_yuv() -> &'static str {
2748 "\
2749#version 300 es
2750#extension GL_OES_EGL_image_external_essl3 : require
2751precision mediump float;
2752uniform samplerExternalOES tex;
2753in vec3 fragPos;
2754in vec2 tc;
2755
2756out vec4 color;
2757
2758void main(){
2759 color = texture(tex, tc);
2760}
2761"
2762}
2763
2764fn generate_planar_rgb_shader() -> &'static str {
2765 "\
2766#version 300 es
2767#extension GL_OES_EGL_image_external_essl3 : require
2768precision mediump float;
2769uniform samplerExternalOES tex;
2770in vec3 fragPos;
2771in vec2 tc;
2772
2773out vec4 color;
2774
2775void main(){
2776 color = texture(tex, tc);
2777}
2778"
2779}
2780
2781fn generate_segmentation_shader() -> &'static str {
2784 "\
2785#version 300 es
2786precision mediump float;
2787precision mediump sampler2DArray;
2788
2789uniform sampler2DArray tex;
2790uniform vec4 colors[20];
2791uniform int background_index;
2792
2793in vec3 fragPos;
2794in vec2 tc;
2795in vec4 fragColor;
2796
2797out vec4 color;
2798
2799float max_arg(const in vec4 args, out int argmax) {
2800 if (args[0] >= args[1] && args[0] >= args[2] && args[0] >= args[3]) {
2801 argmax = 0;
2802 return args[0];
2803 }
2804 if (args[1] >= args[0] && args[1] >= args[2] && args[1] >= args[3]) {
2805 argmax = 1;
2806 return args[1];
2807 }
2808 if (args[2] >= args[0] && args[2] >= args[1] && args[2] >= args[3]) {
2809 argmax = 2;
2810 return args[2];
2811 }
2812 argmax = 3;
2813 return args[3];
2814}
2815
2816void main() {
2817 mediump int layers = textureSize(tex, 0).z;
2818 float max_all = -4.0;
2819 int max_ind = 0;
2820 for (int i = 0; i < layers; i++) {
2821 vec4 d = texture(tex, vec3(tc, i));
2822 int max_ind_ = 0;
2823 float max_ = max_arg(d, max_ind_);
2824 if (max_ <= max_all) { continue; }
2825 max_all = max_;
2826 max_ind = i*4 + max_ind_;
2827 }
2828 if (max_ind == background_index) {
2829 discard;
2830 }
2831 max_ind = max_ind % 20;
2832 color = colors[max_ind];
2833}
2834"
2835}
2836
2837fn generate_instanced_segmentation_shader() -> &'static str {
2838 "\
2839#version 300 es
2840precision mediump float;
2841uniform sampler2D mask0;
2842uniform vec4 colors[20];
2843uniform int class_index;
2844in vec3 fragPos;
2845in vec2 tc;
2846in vec4 fragColor;
2847
2848out vec4 color;
2849void main() {
2850 float r0 = texture(mask0, tc).r;
2851 int arg = int(r0>=0.5);
2852 if (arg == 0) {
2853 discard;
2854 }
2855 color = colors[class_index % 20];
2856}
2857"
2858}
2859
2860fn generate_color_shader() -> &'static str {
2861 "\
2862#version 300 es
2863precision mediump float;
2864uniform vec4 colors[20];
2865uniform int class_index;
2866
2867out vec4 color;
2868void main() {
2869 int index = class_index % 20;
2870 color = colors[index];
2871}
2872"
2873}
2874
2875#[cfg(test)]
2876#[cfg(feature = "opengl")]
2877mod gl_tests {
2878 use super::*;
2879 use crate::{TensorImage, RGBA};
2880 #[cfg(feature = "dma_test_formats")]
2881 use crate::{NV12, YUYV};
2882 use edgefirst_tensor::TensorTrait;
2883 #[cfg(feature = "dma_test_formats")]
2884 use edgefirst_tensor::{TensorMapTrait, TensorMemory};
2885 use image::buffer::ConvertBuffer;
2886 use ndarray::Array3;
2887
2888 #[test]
2889 #[cfg(feature = "decoder")]
2890 fn test_segmentation() {
2891 use edgefirst_decoder::Segmentation;
2892
2893 if !is_opengl_available() {
2894 eprintln!("SKIPPED: {} - OpenGL not available", function!());
2895 return;
2896 }
2897
2898 let mut image = TensorImage::load(
2899 include_bytes!("../../../testdata/giraffe.jpg"),
2900 Some(RGBA),
2901 None,
2902 )
2903 .unwrap();
2904
2905 let mut segmentation = Array3::from_shape_vec(
2906 (2, 160, 160),
2907 include_bytes!("../../../testdata/modelpack_seg_2x160x160.bin").to_vec(),
2908 )
2909 .unwrap();
2910 segmentation.swap_axes(0, 1);
2911 segmentation.swap_axes(1, 2);
2912 let segmentation = segmentation.as_standard_layout().to_owned();
2913
2914 let seg = Segmentation {
2915 segmentation,
2916 xmin: 0.0,
2917 ymin: 0.0,
2918 xmax: 1.0,
2919 ymax: 1.0,
2920 };
2921
2922 let mut renderer = GLProcessorThreaded::new().unwrap();
2923 renderer.render_to_image(&mut image, &[], &[seg]).unwrap();
2924 }
2925
2926 #[test]
2927 #[cfg(feature = "decoder")]
2928 fn test_segmentation_mem() {
2929 use edgefirst_decoder::Segmentation;
2930
2931 if !is_opengl_available() {
2932 eprintln!("SKIPPED: {} - OpenGL not available", function!());
2933 return;
2934 }
2935
2936 let mut image = TensorImage::load(
2937 include_bytes!("../../../testdata/giraffe.jpg"),
2938 Some(RGBA),
2939 Some(edgefirst_tensor::TensorMemory::Mem),
2940 )
2941 .unwrap();
2942
2943 let mut segmentation = Array3::from_shape_vec(
2944 (2, 160, 160),
2945 include_bytes!("../../../testdata/modelpack_seg_2x160x160.bin").to_vec(),
2946 )
2947 .unwrap();
2948 segmentation.swap_axes(0, 1);
2949 segmentation.swap_axes(1, 2);
2950 let segmentation = segmentation.as_standard_layout().to_owned();
2951
2952 let seg = Segmentation {
2953 segmentation,
2954 xmin: 0.0,
2955 ymin: 0.0,
2956 xmax: 1.0,
2957 ymax: 1.0,
2958 };
2959
2960 let mut renderer = GLProcessorThreaded::new().unwrap();
2961 renderer.render_to_image(&mut image, &[], &[seg]).unwrap();
2962 }
2963
2964 #[test]
2965 #[cfg(feature = "decoder")]
2966 fn test_segmentation_yolo() {
2967 use edgefirst_decoder::Segmentation;
2968 use ndarray::Array3;
2969
2970 if !is_opengl_available() {
2971 eprintln!("SKIPPED: {} - OpenGL not available", function!());
2972 return;
2973 }
2974
2975 let mut image = TensorImage::load(
2976 include_bytes!("../../../testdata/giraffe.jpg"),
2977 Some(RGBA),
2978 None,
2979 )
2980 .unwrap();
2981
2982 let segmentation = Array3::from_shape_vec(
2983 (76, 55, 1),
2984 include_bytes!("../../../testdata/yolov8_seg_crop_76x55.bin").to_vec(),
2985 )
2986 .unwrap();
2987
2988 let detect = DetectBox {
2989 bbox: [0.59375, 0.25, 0.9375, 0.725].into(),
2990 score: 0.99,
2991 label: 1,
2992 };
2993
2994 let seg = Segmentation {
2995 segmentation,
2996 xmin: 0.59375,
2997 ymin: 0.25,
2998 xmax: 0.9375,
2999 ymax: 0.725,
3000 };
3001
3002 let mut renderer = GLProcessorThreaded::new().unwrap();
3003 renderer
3004 .set_class_colors(&[[255, 255, 0, 233], [128, 128, 255, 100]])
3005 .unwrap();
3006 renderer
3007 .render_to_image(&mut image, &[detect], &[seg])
3008 .unwrap();
3009
3010 let expected = TensorImage::load(
3011 include_bytes!("../../../testdata/output_render_gl.jpg"),
3012 Some(RGBA),
3013 None,
3014 )
3015 .unwrap();
3016
3017 compare_images(&image, &expected, 0.99, function!());
3018 }
3019
3020 #[test]
3021 #[cfg(feature = "decoder")]
3022 fn test_boxes() {
3023 use edgefirst_decoder::DetectBox;
3024
3025 if !is_opengl_available() {
3026 eprintln!("SKIPPED: {} - OpenGL not available", function!());
3027 return;
3028 }
3029
3030 let mut image = TensorImage::load(
3031 include_bytes!("../../../testdata/giraffe.jpg"),
3032 Some(RGBA),
3033 None,
3034 )
3035 .unwrap();
3036
3037 let detect = DetectBox {
3038 bbox: [0.59375, 0.25, 0.9375, 0.725].into(),
3039 score: 0.99,
3040 label: 0,
3041 };
3042 let mut renderer = GLProcessorThreaded::new().unwrap();
3043 renderer
3044 .set_class_colors(&[[255, 255, 0, 233], [128, 128, 255, 100]])
3045 .unwrap();
3046 renderer
3047 .render_to_image(&mut image, &[detect], &[])
3048 .unwrap();
3049 }
3050
3051 static GL_AVAILABLE: std::sync::OnceLock<bool> = std::sync::OnceLock::new();
3052 fn is_opengl_available() -> bool {
3054 #[cfg(all(target_os = "linux", feature = "opengl"))]
3055 {
3056 *GL_AVAILABLE.get_or_init(|| GLProcessorThreaded::new().is_ok())
3057 }
3058
3059 #[cfg(not(all(target_os = "linux", feature = "opengl")))]
3060 {
3061 false
3062 }
3063 }
3064
3065 fn compare_images(img1: &TensorImage, img2: &TensorImage, threshold: f64, name: &str) {
3066 assert_eq!(img1.height(), img2.height(), "Heights differ");
3067 assert_eq!(img1.width(), img2.width(), "Widths differ");
3068 assert_eq!(img1.fourcc(), img2.fourcc(), "FourCC differ");
3069 assert!(
3070 matches!(img1.fourcc(), RGB | RGBA | GREY | PLANAR_RGB),
3071 "FourCC must be RGB or RGBA for comparison"
3072 );
3073
3074 let image1 = match img1.fourcc() {
3075 RGB => image::RgbImage::from_vec(
3076 img1.width() as u32,
3077 img1.height() as u32,
3078 img1.tensor().map().unwrap().to_vec(),
3079 )
3080 .unwrap(),
3081 RGBA => image::RgbaImage::from_vec(
3082 img1.width() as u32,
3083 img1.height() as u32,
3084 img1.tensor().map().unwrap().to_vec(),
3085 )
3086 .unwrap()
3087 .convert(),
3088 GREY => image::GrayImage::from_vec(
3089 img1.width() as u32,
3090 img1.height() as u32,
3091 img1.tensor().map().unwrap().to_vec(),
3092 )
3093 .unwrap()
3094 .convert(),
3095 PLANAR_RGB => image::GrayImage::from_vec(
3096 img1.width() as u32,
3097 (img1.height() * 3) as u32,
3098 img1.tensor().map().unwrap().to_vec(),
3099 )
3100 .unwrap()
3101 .convert(),
3102 _ => return,
3103 };
3104
3105 let image2 = match img2.fourcc() {
3106 RGB => image::RgbImage::from_vec(
3107 img2.width() as u32,
3108 img2.height() as u32,
3109 img2.tensor().map().unwrap().to_vec(),
3110 )
3111 .unwrap(),
3112 RGBA => image::RgbaImage::from_vec(
3113 img2.width() as u32,
3114 img2.height() as u32,
3115 img2.tensor().map().unwrap().to_vec(),
3116 )
3117 .unwrap()
3118 .convert(),
3119 GREY => image::GrayImage::from_vec(
3120 img2.width() as u32,
3121 img2.height() as u32,
3122 img2.tensor().map().unwrap().to_vec(),
3123 )
3124 .unwrap()
3125 .convert(),
3126 PLANAR_RGB => image::GrayImage::from_vec(
3127 img2.width() as u32,
3128 (img2.height() * 3) as u32,
3129 img2.tensor().map().unwrap().to_vec(),
3130 )
3131 .unwrap()
3132 .convert(),
3133 _ => return,
3134 };
3135
3136 let similarity = image_compare::rgb_similarity_structure(
3137 &image_compare::Algorithm::RootMeanSquared,
3138 &image1,
3139 &image2,
3140 )
3141 .expect("Image Comparison failed");
3142 if similarity.score < threshold {
3143 similarity
3146 .image
3147 .to_color_map()
3148 .save(format!("{name}.png"))
3149 .unwrap();
3150 panic!(
3151 "{name}: converted image and target image have similarity score too low: {} < {}",
3152 similarity.score, threshold
3153 )
3154 }
3155 }
3156
3157 #[cfg(feature = "dma_test_formats")]
3164 fn load_raw_image(
3165 width: usize,
3166 height: usize,
3167 fourcc: FourCharCode,
3168 memory: Option<TensorMemory>,
3169 bytes: &[u8],
3170 ) -> Result<TensorImage, crate::Error> {
3171 let img = TensorImage::new(width, height, fourcc, memory)?;
3172 let mut map = img.tensor().map()?;
3173 map.as_mut_slice()[..bytes.len()].copy_from_slice(bytes);
3174 Ok(img)
3175 }
3176
3177 #[test]
3179 #[cfg(all(target_os = "linux", feature = "dma_test_formats"))]
3180 fn test_opengl_nv12_to_rgba_reference() {
3181 let src = load_raw_image(
3183 1280,
3184 720,
3185 NV12,
3186 Some(TensorMemory::Dma),
3187 include_bytes!("../../../testdata/camera720p.nv12"),
3188 )
3189 .unwrap();
3190
3191 let reference = load_raw_image(
3193 1280,
3194 720,
3195 RGBA,
3196 None,
3197 include_bytes!("../../../testdata/camera720p.rgba"),
3198 )
3199 .unwrap();
3200
3201 let mut dst = TensorImage::new(1280, 720, RGBA, Some(TensorMemory::Dma)).unwrap();
3203 let mut gl = GLProcessorThreaded::new().unwrap();
3204 gl.convert(&src, &mut dst, Rotation::None, Flip::None, Crop::no_crop())
3205 .unwrap();
3206
3207 let cpu_dst = TensorImage::new(1280, 720, RGBA, None).unwrap();
3209 cpu_dst
3210 .tensor()
3211 .map()
3212 .unwrap()
3213 .as_mut_slice()
3214 .copy_from_slice(dst.tensor().map().unwrap().as_slice());
3215
3216 compare_images(&reference, &cpu_dst, 0.98, "opengl_nv12_to_rgba_reference");
3217 }
3218
3219 #[test]
3221 #[cfg(all(target_os = "linux", feature = "dma_test_formats"))]
3222 fn test_opengl_yuyv_to_rgba_reference() {
3223 let src = load_raw_image(
3225 1280,
3226 720,
3227 YUYV,
3228 Some(TensorMemory::Dma),
3229 include_bytes!("../../../testdata/camera720p.yuyv"),
3230 )
3231 .unwrap();
3232
3233 let reference = load_raw_image(
3235 1280,
3236 720,
3237 RGBA,
3238 None,
3239 include_bytes!("../../../testdata/camera720p.rgba"),
3240 )
3241 .unwrap();
3242
3243 let mut dst = TensorImage::new(1280, 720, RGBA, Some(TensorMemory::Dma)).unwrap();
3245 let mut gl = GLProcessorThreaded::new().unwrap();
3246 gl.convert(&src, &mut dst, Rotation::None, Flip::None, Crop::no_crop())
3247 .unwrap();
3248
3249 let cpu_dst = TensorImage::new(1280, 720, RGBA, None).unwrap();
3251 cpu_dst
3252 .tensor()
3253 .map()
3254 .unwrap()
3255 .as_mut_slice()
3256 .copy_from_slice(dst.tensor().map().unwrap().as_slice());
3257
3258 compare_images(&reference, &cpu_dst, 0.98, "opengl_yuyv_to_rgba_reference");
3259 }
3260}