1use crate::error::{CodecError, CodecResult};
34use crate::frame::{Plane, VideoFrame};
35use bytes::Bytes;
36use oximedia_core::PixelFormat;
37use std::io::Cursor;
38
39#[derive(Clone, Copy, Debug, PartialEq, Eq)]
41pub enum ImageFormat {
42 Png,
44 Jpeg,
46 WebP,
48}
49
50impl ImageFormat {
51 pub fn from_bytes(data: &[u8]) -> CodecResult<Self> {
57 if data.len() < 12 {
58 return Err(CodecError::InvalidData("Data too short".into()));
59 }
60
61 if data.starts_with(&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]) {
63 return Ok(Self::Png);
64 }
65
66 if data.starts_with(&[0xFF, 0xD8, 0xFF]) {
68 return Ok(Self::Jpeg);
69 }
70
71 if data.starts_with(b"RIFF") && data.len() >= 12 && &data[8..12] == b"WEBP" {
73 return Ok(Self::WebP);
74 }
75
76 Err(CodecError::UnsupportedFeature(
77 "Unknown image format".into(),
78 ))
79 }
80
81 #[must_use]
83 pub const fn extension(&self) -> &'static str {
84 match self {
85 Self::Png => "png",
86 Self::Jpeg => "jpg",
87 Self::WebP => "webp",
88 }
89 }
90
91 #[must_use]
93 pub const fn supports_alpha(&self) -> bool {
94 match self {
95 Self::Png | Self::WebP => true,
96 Self::Jpeg => false,
97 }
98 }
99}
100
101#[derive(Clone, Debug)]
103pub struct EncoderConfig {
104 pub format: ImageFormat,
106 pub quality: u8,
109 pub lossless: bool,
111}
112
113impl EncoderConfig {
114 #[must_use]
116 pub const fn png() -> Self {
117 Self {
118 format: ImageFormat::Png,
119 quality: 100,
120 lossless: true,
121 }
122 }
123
124 #[must_use]
130 pub const fn webp_lossy(quality: u8) -> Self {
131 Self {
132 format: ImageFormat::WebP,
133 quality,
134 lossless: false,
135 }
136 }
137
138 #[must_use]
140 pub const fn webp_lossless() -> Self {
141 Self {
142 format: ImageFormat::WebP,
143 quality: 100,
144 lossless: true,
145 }
146 }
147}
148
149impl Default for EncoderConfig {
150 fn default() -> Self {
151 Self::png()
152 }
153}
154
155pub struct ImageDecoder {
157 format: ImageFormat,
158 data: Bytes,
159}
160
161impl ImageDecoder {
162 pub fn new(data: &[u8]) -> CodecResult<Self> {
168 let format = ImageFormat::from_bytes(data)?;
169 Ok(Self {
170 format,
171 data: Bytes::copy_from_slice(data),
172 })
173 }
174
175 #[must_use]
177 pub const fn format(&self) -> ImageFormat {
178 self.format
179 }
180
181 #[allow(clippy::too_many_lines)]
187 pub fn decode(&self) -> CodecResult<VideoFrame> {
188 match self.format {
189 ImageFormat::Png => self.decode_png(),
190 ImageFormat::Jpeg => self.decode_jpeg(),
191 ImageFormat::WebP => self.decode_webp(),
192 }
193 }
194
195 #[cfg(feature = "image-io")]
196 fn decode_png(&self) -> CodecResult<VideoFrame> {
197 let decoder = png::Decoder::new(Cursor::new(&self.data));
198 let mut reader = decoder
199 .read_info()
200 .map_err(|e| CodecError::DecoderError(format!("PNG decode error: {e}")))?;
201
202 let info = reader.info();
203 let width = info.width;
204 let height = info.height;
205 let color_type = info.color_type;
206
207 let buffer_size = reader.output_buffer_size().ok_or_else(|| {
209 CodecError::DecoderError("Cannot determine PNG output buffer size".into())
210 })?;
211 let mut buf = vec![0u8; buffer_size];
212 let output_info = reader
213 .next_frame(&mut buf)
214 .map_err(|e| CodecError::DecoderError(format!("PNG decode error: {e}")))?;
215
216 let (format, data) = match color_type {
218 png::ColorType::Rgb => {
219 (
221 PixelFormat::Rgb24,
222 buf[..output_info.buffer_size()].to_vec(),
223 )
224 }
225 png::ColorType::Rgba => {
226 (
228 PixelFormat::Rgba32,
229 buf[..output_info.buffer_size()].to_vec(),
230 )
231 }
232 png::ColorType::Grayscale => {
233 (
235 PixelFormat::Gray8,
236 buf[..output_info.buffer_size()].to_vec(),
237 )
238 }
239 png::ColorType::GrayscaleAlpha => {
240 let size = (width * height) as usize;
242 let mut rgba = Vec::with_capacity(size * 4);
243 for chunk in buf[..output_info.buffer_size()].chunks_exact(2) {
244 let gray = chunk[0];
245 let alpha = chunk[1];
246 rgba.extend_from_slice(&[gray, gray, gray, alpha]);
247 }
248 (PixelFormat::Rgba32, rgba)
249 }
250 png::ColorType::Indexed => {
251 return Err(CodecError::UnsupportedFeature(
252 "Indexed PNG not supported".into(),
253 ))
254 }
255 };
256
257 let stride = data.len() / height as usize;
259 let plane = Plane {
260 data,
261 stride,
262 width,
263 height,
264 };
265
266 let mut frame = VideoFrame::new(format, width, height);
267 frame.planes = vec![plane];
268
269 Ok(frame)
270 }
271
272 #[cfg(not(feature = "image-io"))]
273 fn decode_png(&self) -> CodecResult<VideoFrame> {
274 Err(CodecError::UnsupportedFeature(
275 "PNG support not enabled".into(),
276 ))
277 }
278
279 #[cfg(feature = "image-io")]
280 fn decode_jpeg(&self) -> CodecResult<VideoFrame> {
281 let mut decoder = jpeg_decoder::Decoder::new(Cursor::new(&self.data));
282 let pixels = decoder
283 .decode()
284 .map_err(|e| CodecError::DecoderError(format!("JPEG decode error: {e}")))?;
285
286 let info = decoder
287 .info()
288 .ok_or_else(|| CodecError::DecoderError("No JPEG info available".into()))?;
289
290 let width = u32::from(info.width);
291 let height = u32::from(info.height);
292
293 let (format, data) = match info.pixel_format {
295 jpeg_decoder::PixelFormat::RGB24 => (PixelFormat::Rgb24, pixels),
296 jpeg_decoder::PixelFormat::L8 => (PixelFormat::Gray8, pixels),
297 jpeg_decoder::PixelFormat::CMYK32 => {
298 let mut rgb = Vec::with_capacity((width * height * 3) as usize);
300 for chunk in pixels.chunks_exact(4) {
301 let c = f32::from(chunk[0]) / 255.0;
302 let m = f32::from(chunk[1]) / 255.0;
303 let y = f32::from(chunk[2]) / 255.0;
304 let k = f32::from(chunk[3]) / 255.0;
305
306 let r = ((1.0 - c) * (1.0 - k) * 255.0) as u8;
307 let g = ((1.0 - m) * (1.0 - k) * 255.0) as u8;
308 let b = ((1.0 - y) * (1.0 - k) * 255.0) as u8;
309
310 rgb.extend_from_slice(&[r, g, b]);
311 }
312 (PixelFormat::Rgb24, rgb)
313 }
314 _ => {
315 return Err(CodecError::UnsupportedFeature(format!(
316 "JPEG pixel format {:?} not supported",
317 info.pixel_format
318 )))
319 }
320 };
321
322 let stride = data.len() / height as usize;
323 let plane = Plane {
324 data,
325 stride,
326 width,
327 height,
328 };
329
330 let mut frame = VideoFrame::new(format, width, height);
331 frame.planes = vec![plane];
332
333 Ok(frame)
334 }
335
336 #[cfg(not(feature = "image-io"))]
337 fn decode_jpeg(&self) -> CodecResult<VideoFrame> {
338 Err(CodecError::UnsupportedFeature(
339 "JPEG support not enabled".into(),
340 ))
341 }
342
343 #[cfg(feature = "image-io")]
344 fn decode_webp(&self) -> CodecResult<VideoFrame> {
345 let decoder = webp::Decoder::new(&self.data);
347 let decoded = decoder
348 .decode()
349 .ok_or_else(|| CodecError::DecoderError("WebP decode error".into()))?;
350
351 let width = decoded.width();
352 let height = decoded.height();
353
354 let data = decoded.to_owned();
356 let format = if decoded.is_alpha() {
357 PixelFormat::Rgba32
358 } else {
359 let mut rgb = Vec::with_capacity((width * height * 3) as usize);
361 for chunk in data.chunks_exact(4) {
362 rgb.extend_from_slice(&chunk[..3]);
363 }
364 let stride = rgb.len() / height as usize;
365 let plane = Plane {
366 data: rgb,
367 stride,
368 width,
369 height,
370 };
371 let mut frame = VideoFrame::new(PixelFormat::Rgb24, width, height);
372 frame.planes = vec![plane];
373 return Ok(frame);
374 };
375
376 let stride = data.len() / height as usize;
377 let plane = Plane {
378 data,
379 stride,
380 width,
381 height,
382 };
383
384 let mut frame = VideoFrame::new(format, width, height);
385 frame.planes = vec![plane];
386
387 Ok(frame)
388 }
389
390 #[cfg(not(feature = "image-io"))]
391 fn decode_webp(&self) -> CodecResult<VideoFrame> {
392 Err(CodecError::UnsupportedFeature(
393 "WebP support not enabled".into(),
394 ))
395 }
396}
397
398pub struct ImageEncoder {
400 config: EncoderConfig,
401}
402
403impl ImageEncoder {
404 #[must_use]
406 pub const fn new(config: EncoderConfig) -> Self {
407 Self { config }
408 }
409
410 pub fn encode(&self, frame: &VideoFrame) -> CodecResult<Vec<u8>> {
416 match self.config.format {
417 ImageFormat::Png => self.encode_png(frame),
418 ImageFormat::Jpeg => Err(CodecError::UnsupportedFeature(
419 "JPEG encoding not supported (patent concerns)".into(),
420 )),
421 ImageFormat::WebP => self.encode_webp(frame),
422 }
423 }
424
425 #[cfg(feature = "image-io")]
426 #[allow(clippy::too_many_lines)]
427 fn encode_png(&self, frame: &VideoFrame) -> CodecResult<Vec<u8>> {
428 let mut output = Vec::new();
429 let mut encoder = png::Encoder::new(Cursor::new(&mut output), frame.width, frame.height);
430
431 let (color_type, bit_depth) = match frame.format {
433 PixelFormat::Rgb24 => (png::ColorType::Rgb, png::BitDepth::Eight),
434 PixelFormat::Rgba32 => (png::ColorType::Rgba, png::BitDepth::Eight),
435 PixelFormat::Gray8 => (png::ColorType::Grayscale, png::BitDepth::Eight),
436 PixelFormat::Gray16 => (png::ColorType::Grayscale, png::BitDepth::Sixteen),
437 _ => {
438 return Err(CodecError::UnsupportedFeature(format!(
439 "Pixel format {} not supported for PNG encoding",
440 frame.format
441 )))
442 }
443 };
444
445 encoder.set_color(color_type);
446 encoder.set_depth(bit_depth);
447 encoder.set_compression(png::Compression::default());
448
449 let mut writer = encoder
450 .write_header()
451 .map_err(|e| CodecError::Internal(format!("PNG encode error: {e}")))?;
452
453 if frame.planes.is_empty() {
455 return Err(CodecError::InvalidData("Frame has no planes".into()));
456 }
457
458 writer
459 .write_image_data(&frame.planes[0].data)
460 .map_err(|e| CodecError::Internal(format!("PNG encode error: {e}")))?;
461
462 writer
463 .finish()
464 .map_err(|e| CodecError::Internal(format!("PNG encode error: {e}")))?;
465
466 Ok(output)
467 }
468
469 #[cfg(not(feature = "image-io"))]
470 fn encode_png(&self, _frame: &VideoFrame) -> CodecResult<Vec<u8>> {
471 Err(CodecError::UnsupportedFeature(
472 "PNG support not enabled".into(),
473 ))
474 }
475
476 #[cfg(feature = "image-io")]
477 fn encode_webp(&self, frame: &VideoFrame) -> CodecResult<Vec<u8>> {
478 let (width, height, data) = match frame.format {
480 PixelFormat::Rgb24 | PixelFormat::Rgba32 => {
481 if frame.planes.is_empty() {
482 return Err(CodecError::InvalidData("Frame has no planes".into()));
483 }
484 (frame.width, frame.height, &frame.planes[0].data)
485 }
486 PixelFormat::Gray8 => {
487 if frame.planes.is_empty() {
489 return Err(CodecError::InvalidData("Frame has no planes".into()));
490 }
491 let gray_data = &frame.planes[0].data;
492 let mut rgb = Vec::with_capacity(gray_data.len() * 3);
493 for &gray in gray_data.iter() {
494 rgb.extend_from_slice(&[gray, gray, gray]);
495 }
496 return self.encode_webp_rgb(frame.width, frame.height, &rgb, false);
497 }
498 _ => {
499 return Err(CodecError::UnsupportedFeature(format!(
500 "Pixel format {} not supported for WebP encoding",
501 frame.format
502 )))
503 }
504 };
505
506 let has_alpha = frame.format == PixelFormat::Rgba32;
507 self.encode_webp_rgb(width, height, data, has_alpha)
508 }
509
510 #[cfg(feature = "image-io")]
511 fn encode_webp_rgb(
512 &self,
513 width: u32,
514 height: u32,
515 data: &[u8],
516 has_alpha: bool,
517 ) -> CodecResult<Vec<u8>> {
518 let encoder = if has_alpha {
519 webp::Encoder::from_rgba(data, width, height)
520 } else {
521 webp::Encoder::from_rgb(data, width, height)
522 };
523
524 let encoded = if self.config.lossless {
525 encoder.encode_lossless()
526 } else {
527 let quality = self.config.quality.clamp(0, 100);
528 encoder.encode(f32::from(quality))
529 };
530
531 Ok(encoded.to_vec())
532 }
533
534 #[cfg(not(feature = "image-io"))]
535 fn encode_webp(&self, _frame: &VideoFrame) -> CodecResult<Vec<u8>> {
536 Err(CodecError::UnsupportedFeature(
537 "WebP support not enabled".into(),
538 ))
539 }
540}
541
542#[must_use]
546#[allow(clippy::cast_possible_truncation)]
547#[allow(clippy::cast_sign_loss)]
548pub fn rgb_to_yuv(r: u8, g: u8, b: u8) -> (u8, u8, u8) {
549 let r = f32::from(r);
550 let g = f32::from(g);
551 let b = f32::from(b);
552
553 let y = 0.2126 * r + 0.7152 * g + 0.0722 * b;
554 let u = (b - y) / 1.8556 + 128.0;
555 let v = (r - y) / 1.5748 + 128.0;
556
557 (
558 y.clamp(0.0, 255.0) as u8,
559 u.clamp(0.0, 255.0) as u8,
560 v.clamp(0.0, 255.0) as u8,
561 )
562}
563
564#[must_use]
568#[allow(clippy::cast_possible_truncation)]
569#[allow(clippy::cast_sign_loss)]
570pub fn yuv_to_rgb(y: u8, u: u8, v: u8) -> (u8, u8, u8) {
571 let y = f32::from(y);
572 let u = f32::from(u) - 128.0;
573 let v = f32::from(v) - 128.0;
574
575 let r = y + 1.5748 * v;
576 let g = y - 0.1873 * u - 0.4681 * v;
577 let b = y + 1.8556 * u;
578
579 (
580 r.clamp(0.0, 255.0) as u8,
581 g.clamp(0.0, 255.0) as u8,
582 b.clamp(0.0, 255.0) as u8,
583 )
584}
585
586pub fn convert_rgb_to_yuv420p(frame: &VideoFrame) -> CodecResult<VideoFrame> {
592 if !matches!(frame.format, PixelFormat::Rgb24 | PixelFormat::Rgba32) {
593 return Err(CodecError::InvalidParameter(
594 "Frame must be RGB24 or Rgba32".into(),
595 ));
596 }
597
598 if frame.planes.is_empty() {
599 return Err(CodecError::InvalidData("Frame has no planes".into()));
600 }
601
602 let width = frame.width as usize;
603 let height = frame.height as usize;
604 let rgb_data = &frame.planes[0].data;
605 let bytes_per_pixel = if frame.format == PixelFormat::Rgb24 {
606 3
607 } else {
608 4
609 };
610
611 let y_size = width * height;
613 let uv_width = width / 2;
614 let uv_height = height / 2;
615 let uv_size = uv_width * uv_height;
616
617 let mut y_plane = vec![0u8; y_size];
618 let mut u_plane = vec![0u8; uv_size];
619 let mut v_plane = vec![0u8; uv_size];
620
621 for y in 0..height {
623 for x in 0..width {
624 let rgb_idx = (y * width + x) * bytes_per_pixel;
625 let r = rgb_data[rgb_idx];
626 let g = rgb_data[rgb_idx + 1];
627 let b = rgb_data[rgb_idx + 2];
628
629 let (y_val, u_val, v_val) = rgb_to_yuv(r, g, b);
630 y_plane[y * width + x] = y_val;
631
632 if x % 2 == 0 && y % 2 == 0 {
634 let uv_idx = (y / 2) * uv_width + (x / 2);
635 u_plane[uv_idx] = u_val;
636 v_plane[uv_idx] = v_val;
637 }
638 }
639 }
640
641 let mut yuv_frame = VideoFrame::new(PixelFormat::Yuv420p, frame.width, frame.height);
642 yuv_frame.planes = vec![
643 Plane {
644 data: y_plane,
645 stride: width,
646 width: frame.width,
647 height: frame.height,
648 },
649 Plane {
650 data: u_plane,
651 stride: uv_width,
652 width: frame.width / 2,
653 height: frame.height / 2,
654 },
655 Plane {
656 data: v_plane,
657 stride: uv_width,
658 width: frame.width / 2,
659 height: frame.height / 2,
660 },
661 ];
662 yuv_frame.timestamp = frame.timestamp;
663 yuv_frame.frame_type = frame.frame_type;
664 yuv_frame.color_info = frame.color_info;
665
666 Ok(yuv_frame)
667}
668
669pub fn convert_yuv420p_to_rgb(frame: &VideoFrame) -> CodecResult<VideoFrame> {
675 if frame.format != PixelFormat::Yuv420p {
676 return Err(CodecError::InvalidParameter("Frame must be YUV420p".into()));
677 }
678
679 if frame.planes.len() != 3 {
680 return Err(CodecError::InvalidData("YUV420p requires 3 planes".into()));
681 }
682
683 let width = frame.width as usize;
684 let height = frame.height as usize;
685 let y_data = &frame.planes[0].data;
686 let u_data = &frame.planes[1].data;
687 let v_data = &frame.planes[2].data;
688
689 let rgb_size = width * height * 3;
690 let mut rgb_data = vec![0u8; rgb_size];
691
692 let uv_width = width / 2;
693
694 for y in 0..height {
696 for x in 0..width {
697 let y_val = y_data[y * width + x];
698 let uv_idx = (y / 2) * uv_width + (x / 2);
699 let u_val = u_data[uv_idx];
700 let v_val = v_data[uv_idx];
701
702 let (r, g, b) = yuv_to_rgb(y_val, u_val, v_val);
703
704 let rgb_idx = (y * width + x) * 3;
705 rgb_data[rgb_idx] = r;
706 rgb_data[rgb_idx + 1] = g;
707 rgb_data[rgb_idx + 2] = b;
708 }
709 }
710
711 let mut rgb_frame = VideoFrame::new(PixelFormat::Rgb24, frame.width, frame.height);
712 rgb_frame.planes = vec![Plane {
713 data: rgb_data,
714 stride: width * 3,
715 width: frame.width,
716 height: frame.height,
717 }];
718 rgb_frame.timestamp = frame.timestamp;
719 rgb_frame.frame_type = frame.frame_type;
720 rgb_frame.color_info = frame.color_info;
721
722 Ok(rgb_frame)
723}