Skip to main content

oximedia_codec/
image.rs

1//! Image I/O for thumbnails and frame extraction.
2//!
3//! This module provides patent-free image encoding and decoding support:
4//!
5//! - **PNG** - Lossless compression, supports transparency
6//! - **JPEG** - Lossy compression (decode only, patent concerns)
7//! - **WebP** - Modern format with both lossy and lossless modes
8//!
9//! # Examples
10//!
11//! ## Decoding
12//!
13//! ```ignore
14//! use oximedia_codec::image::{ImageDecoder, ImageFormat};
15//!
16//! let data = std::fs::read("frame.png")?;
17//! let decoder = ImageDecoder::new(&data)?;
18//! let frame = decoder.decode()?;
19//! println!("Decoded {}x{} frame", frame.width, frame.height);
20//! ```
21//!
22//! ## Encoding
23//!
24//! ```ignore
25//! use oximedia_codec::image::{ImageEncoder, ImageFormat, EncoderConfig};
26//!
27//! let config = EncoderConfig::png();
28//! let encoder = ImageEncoder::new(config);
29//! let data = encoder.encode(&frame)?;
30//! std::fs::write("output.png", &data)?;
31//! ```
32
33use crate::error::{CodecError, CodecResult};
34use crate::frame::{Plane, VideoFrame};
35use bytes::Bytes;
36use oximedia_core::PixelFormat;
37use std::io::Cursor;
38
39/// Supported image formats.
40#[derive(Clone, Copy, Debug, PartialEq, Eq)]
41pub enum ImageFormat {
42    /// PNG - Portable Network Graphics (lossless).
43    Png,
44    /// JPEG - Joint Photographic Experts Group (lossy, decode only).
45    Jpeg,
46    /// WebP - Modern image format (lossy/lossless).
47    WebP,
48}
49
50impl ImageFormat {
51    /// Detect format from file signature.
52    ///
53    /// # Errors
54    ///
55    /// Returns error if format cannot be detected.
56    pub fn from_bytes(data: &[u8]) -> CodecResult<Self> {
57        if data.len() < 12 {
58            return Err(CodecError::InvalidData("Data too short".into()));
59        }
60
61        // PNG signature: 89 50 4E 47 0D 0A 1A 0A
62        if data.starts_with(&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]) {
63            return Ok(Self::Png);
64        }
65
66        // JPEG signature: FF D8 FF
67        if data.starts_with(&[0xFF, 0xD8, 0xFF]) {
68            return Ok(Self::Jpeg);
69        }
70
71        // WebP signature: RIFF....WEBP
72        if data.starts_with(b"RIFF") && data.len() >= 12 && &data[8..12] == b"WEBP" {
73            return Ok(Self::WebP);
74        }
75
76        Err(CodecError::UnsupportedFeature(
77            "Unknown image format".into(),
78        ))
79    }
80
81    /// Get file extension for this format.
82    #[must_use]
83    pub const fn extension(&self) -> &'static str {
84        match self {
85            Self::Png => "png",
86            Self::Jpeg => "jpg",
87            Self::WebP => "webp",
88        }
89    }
90
91    /// Check if format supports alpha channel.
92    #[must_use]
93    pub const fn supports_alpha(&self) -> bool {
94        match self {
95            Self::Png | Self::WebP => true,
96            Self::Jpeg => false,
97        }
98    }
99}
100
101/// Image encoder configuration.
102#[derive(Clone, Debug)]
103pub struct EncoderConfig {
104    /// Output format.
105    pub format: ImageFormat,
106    /// Quality setting (0-100, higher is better).
107    /// Only used for lossy formats (WebP lossy mode).
108    pub quality: u8,
109    /// Use lossless compression (WebP only).
110    pub lossless: bool,
111}
112
113impl EncoderConfig {
114    /// Create PNG encoder config (lossless).
115    #[must_use]
116    pub const fn png() -> Self {
117        Self {
118            format: ImageFormat::Png,
119            quality: 100,
120            lossless: true,
121        }
122    }
123
124    /// Create WebP encoder config with lossy compression.
125    ///
126    /// # Arguments
127    ///
128    /// * `quality` - Quality setting (0-100, higher is better)
129    #[must_use]
130    pub const fn webp_lossy(quality: u8) -> Self {
131        Self {
132            format: ImageFormat::WebP,
133            quality,
134            lossless: false,
135        }
136    }
137
138    /// Create WebP encoder config with lossless compression.
139    #[must_use]
140    pub const fn webp_lossless() -> Self {
141        Self {
142            format: ImageFormat::WebP,
143            quality: 100,
144            lossless: true,
145        }
146    }
147}
148
149impl Default for EncoderConfig {
150    fn default() -> Self {
151        Self::png()
152    }
153}
154
155/// Image decoder for converting image files to video frames.
156pub struct ImageDecoder {
157    format: ImageFormat,
158    data: Bytes,
159}
160
161impl ImageDecoder {
162    /// Create a new image decoder.
163    ///
164    /// # Errors
165    ///
166    /// Returns error if format cannot be detected.
167    pub fn new(data: &[u8]) -> CodecResult<Self> {
168        let format = ImageFormat::from_bytes(data)?;
169        Ok(Self {
170            format,
171            data: Bytes::copy_from_slice(data),
172        })
173    }
174
175    /// Get the detected format.
176    #[must_use]
177    pub const fn format(&self) -> ImageFormat {
178        self.format
179    }
180
181    /// Decode the image to a video frame.
182    ///
183    /// # Errors
184    ///
185    /// Returns error if decoding fails.
186    #[allow(clippy::too_many_lines)]
187    pub fn decode(&self) -> CodecResult<VideoFrame> {
188        match self.format {
189            ImageFormat::Png => self.decode_png(),
190            ImageFormat::Jpeg => self.decode_jpeg(),
191            ImageFormat::WebP => self.decode_webp(),
192        }
193    }
194
195    #[cfg(feature = "image-io")]
196    fn decode_png(&self) -> CodecResult<VideoFrame> {
197        let decoder = png::Decoder::new(Cursor::new(&self.data));
198        let mut reader = decoder
199            .read_info()
200            .map_err(|e| CodecError::DecoderError(format!("PNG decode error: {e}")))?;
201
202        let info = reader.info();
203        let width = info.width;
204        let height = info.height;
205        let color_type = info.color_type;
206
207        // Allocate buffer for decoded image
208        let buffer_size = reader.output_buffer_size().ok_or_else(|| {
209            CodecError::DecoderError("Cannot determine PNG output buffer size".into())
210        })?;
211        let mut buf = vec![0u8; buffer_size];
212        let output_info = reader
213            .next_frame(&mut buf)
214            .map_err(|e| CodecError::DecoderError(format!("PNG decode error: {e}")))?;
215
216        // Convert to appropriate pixel format
217        let (format, data) = match color_type {
218            png::ColorType::Rgb => {
219                // RGB 8-bit
220                (
221                    PixelFormat::Rgb24,
222                    buf[..output_info.buffer_size()].to_vec(),
223                )
224            }
225            png::ColorType::Rgba => {
226                // RGBA 8-bit
227                (
228                    PixelFormat::Rgba32,
229                    buf[..output_info.buffer_size()].to_vec(),
230                )
231            }
232            png::ColorType::Grayscale => {
233                // Grayscale 8-bit
234                (
235                    PixelFormat::Gray8,
236                    buf[..output_info.buffer_size()].to_vec(),
237                )
238            }
239            png::ColorType::GrayscaleAlpha => {
240                // Convert grayscale+alpha to RGBA
241                let size = (width * height) as usize;
242                let mut rgba = Vec::with_capacity(size * 4);
243                for chunk in buf[..output_info.buffer_size()].chunks_exact(2) {
244                    let gray = chunk[0];
245                    let alpha = chunk[1];
246                    rgba.extend_from_slice(&[gray, gray, gray, alpha]);
247                }
248                (PixelFormat::Rgba32, rgba)
249            }
250            png::ColorType::Indexed => {
251                return Err(CodecError::UnsupportedFeature(
252                    "Indexed PNG not supported".into(),
253                ))
254            }
255        };
256
257        // Create frame with single plane (packed format)
258        let stride = data.len() / height as usize;
259        let plane = Plane {
260            data,
261            stride,
262            width,
263            height,
264        };
265
266        let mut frame = VideoFrame::new(format, width, height);
267        frame.planes = vec![plane];
268
269        Ok(frame)
270    }
271
272    #[cfg(not(feature = "image-io"))]
273    fn decode_png(&self) -> CodecResult<VideoFrame> {
274        Err(CodecError::UnsupportedFeature(
275            "PNG support not enabled".into(),
276        ))
277    }
278
279    #[cfg(feature = "image-io")]
280    fn decode_jpeg(&self) -> CodecResult<VideoFrame> {
281        let mut decoder = jpeg_decoder::Decoder::new(Cursor::new(&self.data));
282        let pixels = decoder
283            .decode()
284            .map_err(|e| CodecError::DecoderError(format!("JPEG decode error: {e}")))?;
285
286        let info = decoder
287            .info()
288            .ok_or_else(|| CodecError::DecoderError("No JPEG info available".into()))?;
289
290        let width = u32::from(info.width);
291        let height = u32::from(info.height);
292
293        // JPEG decoder outputs RGB or grayscale
294        let (format, data) = match info.pixel_format {
295            jpeg_decoder::PixelFormat::RGB24 => (PixelFormat::Rgb24, pixels),
296            jpeg_decoder::PixelFormat::L8 => (PixelFormat::Gray8, pixels),
297            jpeg_decoder::PixelFormat::CMYK32 => {
298                // Convert CMYK to RGB
299                let mut rgb = Vec::with_capacity((width * height * 3) as usize);
300                for chunk in pixels.chunks_exact(4) {
301                    let c = f32::from(chunk[0]) / 255.0;
302                    let m = f32::from(chunk[1]) / 255.0;
303                    let y = f32::from(chunk[2]) / 255.0;
304                    let k = f32::from(chunk[3]) / 255.0;
305
306                    let r = ((1.0 - c) * (1.0 - k) * 255.0) as u8;
307                    let g = ((1.0 - m) * (1.0 - k) * 255.0) as u8;
308                    let b = ((1.0 - y) * (1.0 - k) * 255.0) as u8;
309
310                    rgb.extend_from_slice(&[r, g, b]);
311                }
312                (PixelFormat::Rgb24, rgb)
313            }
314            _ => {
315                return Err(CodecError::UnsupportedFeature(format!(
316                    "JPEG pixel format {:?} not supported",
317                    info.pixel_format
318                )))
319            }
320        };
321
322        let stride = data.len() / height as usize;
323        let plane = Plane {
324            data,
325            stride,
326            width,
327            height,
328        };
329
330        let mut frame = VideoFrame::new(format, width, height);
331        frame.planes = vec![plane];
332
333        Ok(frame)
334    }
335
336    #[cfg(not(feature = "image-io"))]
337    fn decode_jpeg(&self) -> CodecResult<VideoFrame> {
338        Err(CodecError::UnsupportedFeature(
339            "JPEG support not enabled".into(),
340        ))
341    }
342
343    #[cfg(feature = "image-io")]
344    fn decode_webp(&self) -> CodecResult<VideoFrame> {
345        use crate::webp::alpha::decode_alpha;
346        use crate::webp::riff::{WebPContainer, WebPEncoding};
347        use crate::webp::vp8l_decoder::Vp8lDecoder;
348
349        let container = WebPContainer::parse(&self.data)?;
350        let (width, height) = container.dimensions()?;
351
352        match container.encoding {
353            WebPEncoding::Lossless => {
354                let chunk = container.bitstream_chunk().ok_or_else(|| {
355                    CodecError::DecoderError("No VP8L bitstream chunk found".into())
356                })?;
357                let mut decoder = Vp8lDecoder::new();
358                let decoded = decoder.decode(&chunk.data)?;
359                Self::decoded_image_to_frame(&decoded)
360            }
361            WebPEncoding::Lossy => {
362                let chunk = container.bitstream_chunk().ok_or_else(|| {
363                    CodecError::DecoderError("No VP8 bitstream chunk found".into())
364                })?;
365                Self::decode_vp8_to_frame(&chunk.data, width, height)
366            }
367            WebPEncoding::Extended => {
368                // Check for VP8L (lossless) bitstream first
369                let vp8l_chunk = container
370                    .chunks
371                    .iter()
372                    .find(|c| c.chunk_type == crate::webp::riff::ChunkType::Vp8L);
373                let vp8_chunk = container
374                    .chunks
375                    .iter()
376                    .find(|c| c.chunk_type == crate::webp::riff::ChunkType::Vp8);
377                let alpha_chunk = container.alpha_chunk();
378
379                if let Some(vp8l) = vp8l_chunk {
380                    // Extended with VP8L (lossless with alpha)
381                    let mut decoder = Vp8lDecoder::new();
382                    let decoded = decoder.decode(&vp8l.data)?;
383                    Self::decoded_image_to_frame(&decoded)
384                } else if let Some(vp8) = vp8_chunk {
385                    // Extended with VP8 (lossy, possibly with separate alpha)
386                    let mut frame = Self::decode_vp8_to_frame(&vp8.data, width, height)?;
387
388                    // Merge ALPH chunk alpha into the frame if present
389                    if let Some(alph) = alpha_chunk {
390                        let alpha_plane = decode_alpha(&alph.data, width, height)?;
391                        // Convert RGB24 frame to RGBA32 with the decoded alpha
392                        if frame.format == PixelFormat::Rgb24 && !frame.planes.is_empty() {
393                            let rgb = &frame.planes[0].data;
394                            let mut rgba =
395                                Vec::with_capacity((width as usize) * (height as usize) * 4);
396                            for (i, rgb_chunk) in rgb.chunks_exact(3).enumerate() {
397                                rgba.extend_from_slice(rgb_chunk);
398                                let a = alpha_plane.get(i).copied().unwrap_or(255);
399                                rgba.push(a);
400                            }
401                            let stride = (width as usize) * 4;
402                            frame = VideoFrame::new(PixelFormat::Rgba32, width, height);
403                            frame.planes = vec![Plane {
404                                data: rgba,
405                                stride,
406                                width,
407                                height,
408                            }];
409                        }
410                    }
411                    Ok(frame)
412                } else {
413                    Err(CodecError::DecoderError(
414                        "Extended WebP has no VP8 or VP8L bitstream".into(),
415                    ))
416                }
417            }
418        }
419    }
420
421    /// Convert a VP8L `DecodedImage` (ARGB u32 pixels) to a `VideoFrame`.
422    fn decoded_image_to_frame(
423        decoded: &crate::webp::vp8l_decoder::DecodedImage,
424    ) -> CodecResult<VideoFrame> {
425        let width = decoded.width;
426        let height = decoded.height;
427        let has_alpha = decoded.has_alpha;
428
429        let mut rgba = Vec::with_capacity((width as usize) * (height as usize) * 4);
430        for &pixel in &decoded.pixels {
431            let a = ((pixel >> 24) & 0xFF) as u8;
432            let r = ((pixel >> 16) & 0xFF) as u8;
433            let g = ((pixel >> 8) & 0xFF) as u8;
434            let b = (pixel & 0xFF) as u8;
435            rgba.extend_from_slice(&[r, g, b, a]);
436        }
437
438        if !has_alpha {
439            // Convert to RGB24
440            let mut rgb = Vec::with_capacity((width as usize) * (height as usize) * 3);
441            for chunk in rgba.chunks_exact(4) {
442                rgb.extend_from_slice(&chunk[..3]);
443            }
444            let stride = rgb.len() / height as usize;
445            let plane = Plane {
446                data: rgb,
447                stride,
448                width,
449                height,
450            };
451            let mut frame = VideoFrame::new(PixelFormat::Rgb24, width, height);
452            frame.planes = vec![plane];
453            Ok(frame)
454        } else {
455            let stride = rgba.len() / height as usize;
456            let plane = Plane {
457                data: rgba,
458                stride,
459                width,
460                height,
461            };
462            let mut frame = VideoFrame::new(PixelFormat::Rgba32, width, height);
463            frame.planes = vec![plane];
464            Ok(frame)
465        }
466    }
467
468    /// Decode a VP8 lossy bitstream to an RGB24 `VideoFrame`.
469    #[cfg(feature = "vp8")]
470    fn decode_vp8_to_frame(data: &[u8], _width: u32, _height: u32) -> CodecResult<VideoFrame> {
471        use crate::traits::{DecoderConfig, VideoDecoder};
472        use crate::vp8::Vp8Decoder;
473
474        let config = DecoderConfig::default();
475        let mut decoder = Vp8Decoder::new(config)?;
476        decoder.send_packet(data, 0)?;
477        let yuv_frame = decoder
478            .receive_frame()?
479            .ok_or_else(|| CodecError::DecoderError("VP8 decoder produced no frame".into()))?;
480
481        // VP8 decoder produces YUV420p; convert to RGB24
482        if yuv_frame.format == PixelFormat::Yuv420p {
483            convert_yuv420p_to_rgb(&yuv_frame)
484        } else if yuv_frame.format == PixelFormat::Rgb24 {
485            Ok(yuv_frame)
486        } else {
487            Err(CodecError::UnsupportedFeature(format!(
488                "VP8 decoder produced unexpected format: {}",
489                yuv_frame.format
490            )))
491        }
492    }
493
494    /// Decode a VP8 lossy bitstream to an RGB24 `VideoFrame` (stub when vp8 feature disabled).
495    #[cfg(not(feature = "vp8"))]
496    fn decode_vp8_to_frame(_data: &[u8], _width: u32, _height: u32) -> CodecResult<VideoFrame> {
497        Err(CodecError::UnsupportedFeature(
498            "VP8 lossy decoding requires the 'vp8' feature".into(),
499        ))
500    }
501
502    #[cfg(not(feature = "image-io"))]
503    fn decode_webp(&self) -> CodecResult<VideoFrame> {
504        Err(CodecError::UnsupportedFeature(
505            "WebP support not enabled".into(),
506        ))
507    }
508}
509
510/// Image encoder for converting video frames to image files.
511pub struct ImageEncoder {
512    config: EncoderConfig,
513}
514
515impl ImageEncoder {
516    /// Create a new image encoder.
517    #[must_use]
518    pub const fn new(config: EncoderConfig) -> Self {
519        Self { config }
520    }
521
522    /// Encode a video frame to image data.
523    ///
524    /// # Errors
525    ///
526    /// Returns error if encoding fails or frame format is unsupported.
527    pub fn encode(&self, frame: &VideoFrame) -> CodecResult<Vec<u8>> {
528        match self.config.format {
529            ImageFormat::Png => self.encode_png(frame),
530            ImageFormat::Jpeg => Err(CodecError::UnsupportedFeature(
531                "JPEG encoding not supported (patent concerns)".into(),
532            )),
533            ImageFormat::WebP => self.encode_webp(frame),
534        }
535    }
536
537    #[cfg(feature = "image-io")]
538    #[allow(clippy::too_many_lines)]
539    fn encode_png(&self, frame: &VideoFrame) -> CodecResult<Vec<u8>> {
540        let mut output = Vec::new();
541        let mut encoder = png::Encoder::new(Cursor::new(&mut output), frame.width, frame.height);
542
543        // Set color type based on pixel format
544        let (color_type, bit_depth) = match frame.format {
545            PixelFormat::Rgb24 => (png::ColorType::Rgb, png::BitDepth::Eight),
546            PixelFormat::Rgba32 => (png::ColorType::Rgba, png::BitDepth::Eight),
547            PixelFormat::Gray8 => (png::ColorType::Grayscale, png::BitDepth::Eight),
548            PixelFormat::Gray16 => (png::ColorType::Grayscale, png::BitDepth::Sixteen),
549            _ => {
550                return Err(CodecError::UnsupportedFeature(format!(
551                    "Pixel format {} not supported for PNG encoding",
552                    frame.format
553                )))
554            }
555        };
556
557        encoder.set_color(color_type);
558        encoder.set_depth(bit_depth);
559        encoder.set_compression(png::Compression::default());
560
561        let mut writer = encoder
562            .write_header()
563            .map_err(|e| CodecError::Internal(format!("PNG encode error: {e}")))?;
564
565        // Get pixel data from frame
566        if frame.planes.is_empty() {
567            return Err(CodecError::InvalidData("Frame has no planes".into()));
568        }
569
570        writer
571            .write_image_data(&frame.planes[0].data)
572            .map_err(|e| CodecError::Internal(format!("PNG encode error: {e}")))?;
573
574        writer
575            .finish()
576            .map_err(|e| CodecError::Internal(format!("PNG encode error: {e}")))?;
577
578        Ok(output)
579    }
580
581    #[cfg(not(feature = "image-io"))]
582    fn encode_png(&self, _frame: &VideoFrame) -> CodecResult<Vec<u8>> {
583        Err(CodecError::UnsupportedFeature(
584            "PNG support not enabled".into(),
585        ))
586    }
587
588    #[cfg(feature = "image-io")]
589    fn encode_webp(&self, frame: &VideoFrame) -> CodecResult<Vec<u8>> {
590        // Get RGB/RGBA data from frame
591        let (width, height, data) = match frame.format {
592            PixelFormat::Rgb24 | PixelFormat::Rgba32 => {
593                if frame.planes.is_empty() {
594                    return Err(CodecError::InvalidData("Frame has no planes".into()));
595                }
596                (frame.width, frame.height, &frame.planes[0].data)
597            }
598            PixelFormat::Gray8 => {
599                // Convert grayscale to RGB
600                if frame.planes.is_empty() {
601                    return Err(CodecError::InvalidData("Frame has no planes".into()));
602                }
603                let gray_data = &frame.planes[0].data;
604                let mut rgb = Vec::with_capacity(gray_data.len() * 3);
605                for &gray in gray_data.iter() {
606                    rgb.extend_from_slice(&[gray, gray, gray]);
607                }
608                return self.encode_webp_rgb(frame.width, frame.height, &rgb, false);
609            }
610            _ => {
611                return Err(CodecError::UnsupportedFeature(format!(
612                    "Pixel format {} not supported for WebP encoding",
613                    frame.format
614                )))
615            }
616        };
617
618        let has_alpha = frame.format == PixelFormat::Rgba32;
619        self.encode_webp_rgb(width, height, data, has_alpha)
620    }
621
622    fn encode_webp_rgb(
623        &self,
624        width: u32,
625        height: u32,
626        data: &[u8],
627        has_alpha: bool,
628    ) -> CodecResult<Vec<u8>> {
629        use crate::webp::alpha::encode_alpha;
630        use crate::webp::encoder::WebPLossyEncoder;
631        use crate::webp::riff::WebPWriter;
632        use crate::webp::vp8l_encoder::Vp8lEncoder;
633
634        if self.config.lossless {
635            // Convert RGB/RGBA bytes to ARGB u32 pixels
636            let pixel_count = (width as usize) * (height as usize);
637            let mut pixels = Vec::with_capacity(pixel_count);
638
639            if has_alpha {
640                for chunk in data.chunks_exact(4) {
641                    let r = u32::from(chunk[0]);
642                    let g = u32::from(chunk[1]);
643                    let b = u32::from(chunk[2]);
644                    let a = u32::from(chunk[3]);
645                    pixels.push((a << 24) | (r << 16) | (g << 8) | b);
646                }
647            } else {
648                for chunk in data.chunks_exact(3) {
649                    let r = u32::from(chunk[0]);
650                    let g = u32::from(chunk[1]);
651                    let b = u32::from(chunk[2]);
652                    pixels.push((0xFF << 24) | (r << 16) | (g << 8) | b);
653                }
654            }
655
656            let encoder = Vp8lEncoder::new(100);
657            let vp8l_data = encoder.encode(&pixels, width, height, has_alpha)?;
658            Ok(WebPWriter::write_lossless(&vp8l_data))
659        } else {
660            let quality = self.config.quality.clamp(0, 100);
661            let lossy_encoder = WebPLossyEncoder::new(quality);
662
663            if has_alpha {
664                let (vp8_data, alpha_data) = lossy_encoder.encode_rgba(data, width, height)?;
665                let alpha_chunk = encode_alpha(&alpha_data, width, height)?;
666                Ok(WebPWriter::write_extended(
667                    &vp8_data,
668                    Some(&alpha_chunk),
669                    width,
670                    height,
671                ))
672            } else {
673                let vp8_data = lossy_encoder.encode_rgb(data, width, height)?;
674                Ok(WebPWriter::write_lossy(&vp8_data))
675            }
676        }
677    }
678
679    #[cfg(not(feature = "image-io"))]
680    fn encode_webp(&self, _frame: &VideoFrame) -> CodecResult<Vec<u8>> {
681        Err(CodecError::UnsupportedFeature(
682            "WebP support not enabled".into(),
683        ))
684    }
685}
686
687/// Convert RGB to YUV color space.
688///
689/// Uses BT.709 coefficients for HD content.
690#[must_use]
691#[allow(clippy::cast_possible_truncation)]
692#[allow(clippy::cast_sign_loss)]
693pub fn rgb_to_yuv(r: u8, g: u8, b: u8) -> (u8, u8, u8) {
694    let r = f32::from(r);
695    let g = f32::from(g);
696    let b = f32::from(b);
697
698    let y = 0.2126 * r + 0.7152 * g + 0.0722 * b;
699    let u = (b - y) / 1.8556 + 128.0;
700    let v = (r - y) / 1.5748 + 128.0;
701
702    (
703        y.clamp(0.0, 255.0) as u8,
704        u.clamp(0.0, 255.0) as u8,
705        v.clamp(0.0, 255.0) as u8,
706    )
707}
708
709/// Convert YUV to RGB color space.
710///
711/// Uses BT.709 coefficients for HD content.
712#[must_use]
713#[allow(clippy::cast_possible_truncation)]
714#[allow(clippy::cast_sign_loss)]
715pub fn yuv_to_rgb(y: u8, u: u8, v: u8) -> (u8, u8, u8) {
716    let y = f32::from(y);
717    let u = f32::from(u) - 128.0;
718    let v = f32::from(v) - 128.0;
719
720    let r = y + 1.5748 * v;
721    let g = y - 0.1873 * u - 0.4681 * v;
722    let b = y + 1.8556 * u;
723
724    (
725        r.clamp(0.0, 255.0) as u8,
726        g.clamp(0.0, 255.0) as u8,
727        b.clamp(0.0, 255.0) as u8,
728    )
729}
730
731/// Convert a video frame from RGB to YUV420p format.
732///
733/// # Errors
734///
735/// Returns error if frame is not in RGB24 or Rgba32 format.
736pub fn convert_rgb_to_yuv420p(frame: &VideoFrame) -> CodecResult<VideoFrame> {
737    if !matches!(frame.format, PixelFormat::Rgb24 | PixelFormat::Rgba32) {
738        return Err(CodecError::InvalidParameter(
739            "Frame must be RGB24 or Rgba32".into(),
740        ));
741    }
742
743    if frame.planes.is_empty() {
744        return Err(CodecError::InvalidData("Frame has no planes".into()));
745    }
746
747    let width = frame.width as usize;
748    let height = frame.height as usize;
749    let rgb_data = &frame.planes[0].data;
750    let bytes_per_pixel = if frame.format == PixelFormat::Rgb24 {
751        3
752    } else {
753        4
754    };
755
756    // Allocate YUV planes
757    let y_size = width * height;
758    let uv_width = width / 2;
759    let uv_height = height / 2;
760    let uv_size = uv_width * uv_height;
761
762    let mut y_plane = vec![0u8; y_size];
763    let mut u_plane = vec![0u8; uv_size];
764    let mut v_plane = vec![0u8; uv_size];
765
766    // Convert RGB to YUV420p
767    for y in 0..height {
768        for x in 0..width {
769            let rgb_idx = (y * width + x) * bytes_per_pixel;
770            let r = rgb_data[rgb_idx];
771            let g = rgb_data[rgb_idx + 1];
772            let b = rgb_data[rgb_idx + 2];
773
774            let (y_val, u_val, v_val) = rgb_to_yuv(r, g, b);
775            y_plane[y * width + x] = y_val;
776
777            // Subsample U and V (4:2:0)
778            if x % 2 == 0 && y % 2 == 0 {
779                let uv_idx = (y / 2) * uv_width + (x / 2);
780                u_plane[uv_idx] = u_val;
781                v_plane[uv_idx] = v_val;
782            }
783        }
784    }
785
786    let mut yuv_frame = VideoFrame::new(PixelFormat::Yuv420p, frame.width, frame.height);
787    yuv_frame.planes = vec![
788        Plane {
789            data: y_plane,
790            stride: width,
791            width: frame.width,
792            height: frame.height,
793        },
794        Plane {
795            data: u_plane,
796            stride: uv_width,
797            width: frame.width / 2,
798            height: frame.height / 2,
799        },
800        Plane {
801            data: v_plane,
802            stride: uv_width,
803            width: frame.width / 2,
804            height: frame.height / 2,
805        },
806    ];
807    yuv_frame.timestamp = frame.timestamp;
808    yuv_frame.frame_type = frame.frame_type;
809    yuv_frame.color_info = frame.color_info;
810
811    Ok(yuv_frame)
812}
813
814/// Convert a video frame from YUV420p to RGB24 format.
815///
816/// # Errors
817///
818/// Returns error if frame is not in YUV420p format.
819pub fn convert_yuv420p_to_rgb(frame: &VideoFrame) -> CodecResult<VideoFrame> {
820    if frame.format != PixelFormat::Yuv420p {
821        return Err(CodecError::InvalidParameter("Frame must be YUV420p".into()));
822    }
823
824    if frame.planes.len() != 3 {
825        return Err(CodecError::InvalidData("YUV420p requires 3 planes".into()));
826    }
827
828    let width = frame.width as usize;
829    let height = frame.height as usize;
830    let y_data = &frame.planes[0].data;
831    let u_data = &frame.planes[1].data;
832    let v_data = &frame.planes[2].data;
833
834    let rgb_size = width * height * 3;
835    let mut rgb_data = vec![0u8; rgb_size];
836
837    let uv_width = width / 2;
838
839    // Convert YUV420p to RGB
840    for y in 0..height {
841        for x in 0..width {
842            let y_val = y_data[y * width + x];
843            let uv_idx = (y / 2) * uv_width + (x / 2);
844            let u_val = u_data[uv_idx];
845            let v_val = v_data[uv_idx];
846
847            let (r, g, b) = yuv_to_rgb(y_val, u_val, v_val);
848
849            let rgb_idx = (y * width + x) * 3;
850            rgb_data[rgb_idx] = r;
851            rgb_data[rgb_idx + 1] = g;
852            rgb_data[rgb_idx + 2] = b;
853        }
854    }
855
856    let mut rgb_frame = VideoFrame::new(PixelFormat::Rgb24, frame.width, frame.height);
857    rgb_frame.planes = vec![Plane {
858        data: rgb_data,
859        stride: width * 3,
860        width: frame.width,
861        height: frame.height,
862    }];
863    rgb_frame.timestamp = frame.timestamp;
864    rgb_frame.frame_type = frame.frame_type;
865    rgb_frame.color_info = frame.color_info;
866
867    Ok(rgb_frame)
868}