use oxideav_core::{PixelFormat, Result, VideoFrame};
use oxideav_pixfmt::ConvertOptions;
use crate::object::Canvas;
use crate::render::RenderedFrame;
use crate::source::{SceneSource, SourceFormat};
pub fn adapt_frame_to(frame: VideoFrame, target: PixelFormat) -> Result<VideoFrame> {
if frame.format == target {
return Ok(frame);
}
oxideav_pixfmt::convert(&frame, target, &ConvertOptions::default())
}
pub fn adapt_frame_to_canvas(frame: VideoFrame, canvas: &Canvas) -> Result<VideoFrame> {
match canvas {
Canvas::Raster { pixel_format, .. } => adapt_frame_to(frame, *pixel_format),
Canvas::Vector { .. } => Ok(frame),
}
}
pub struct AdaptedSource<S: SceneSource> {
inner: S,
target: PixelFormat,
}
impl<S: SceneSource> AdaptedSource<S> {
pub fn new(inner: S, target: PixelFormat) -> Self {
AdaptedSource { inner, target }
}
pub fn inner(&self) -> &S {
&self.inner
}
pub fn inner_mut(&mut self) -> &mut S {
&mut self.inner
}
}
impl<S: SceneSource> SceneSource for AdaptedSource<S> {
fn format(&self) -> SourceFormat {
let mut f = self.inner.format();
if let Canvas::Raster {
ref mut pixel_format,
..
} = f.canvas
{
*pixel_format = self.target;
}
f
}
fn pull(&mut self) -> Result<Option<RenderedFrame>> {
let Some(mut frame) = self.inner.pull()? else {
return Ok(None);
};
if let Some(video) = frame.video.take() {
frame.video = Some(adapt_frame_to(video, self.target)?);
}
Ok(Some(frame))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::scene::Scene;
use crate::source::SceneSource;
use oxideav_core::{Rational, TimeBase, VideoFrame, VideoPlane};
fn yuv420p_frame(width: u32, height: u32) -> VideoFrame {
let y_size = (width * height) as usize;
let c_size = ((width / 2) * (height / 2)) as usize;
VideoFrame {
format: PixelFormat::Yuv420P,
width,
height,
pts: None,
time_base: TimeBase::new(1, 30),
planes: vec![
VideoPlane {
stride: width as usize,
data: vec![128; y_size],
},
VideoPlane {
stride: (width / 2) as usize,
data: vec![128; c_size],
},
VideoPlane {
stride: (width / 2) as usize,
data: vec![128; c_size],
},
],
}
}
#[test]
fn adapt_to_same_format_is_identity() {
let f = yuv420p_frame(8, 8);
let out = adapt_frame_to(f.clone(), PixelFormat::Yuv420P).unwrap();
assert_eq!(out.format, PixelFormat::Yuv420P);
assert_eq!(out.planes[0].data, f.planes[0].data);
}
#[test]
fn adapt_to_canvas_vector_passes_through() {
let f = yuv420p_frame(8, 8);
let canvas = Canvas::Vector {
width: 595.0,
height: 842.0,
unit: crate::object::LengthUnit::Point,
};
let out = adapt_frame_to_canvas(f, &canvas).unwrap();
assert_eq!(out.format, PixelFormat::Yuv420P);
}
struct StaticSource {
fmt: SourceFormat,
frames_left: u32,
}
impl SceneSource for StaticSource {
fn format(&self) -> SourceFormat {
self.fmt.clone()
}
fn pull(&mut self) -> Result<Option<RenderedFrame>> {
if self.frames_left == 0 {
return Ok(None);
}
self.frames_left -= 1;
Ok(Some(RenderedFrame {
video: Some(yuv420p_frame(8, 8)),
audio: Vec::new(),
operations: Vec::new(),
}))
}
}
#[test]
fn adapted_source_reports_target_format() {
let scene = Scene {
framerate: Rational::new(30, 1),
..Scene::default()
};
let inner = StaticSource {
fmt: SourceFormat::from_scene(&scene),
frames_left: 1,
};
let adapted = AdaptedSource::new(inner, PixelFormat::Rgba);
match adapted.format().canvas {
Canvas::Raster { pixel_format, .. } => assert_eq!(pixel_format, PixelFormat::Rgba),
_ => panic!("expected Raster"),
}
}
#[test]
fn adapted_source_converts_on_pull() {
let scene = Scene::default();
let inner = StaticSource {
fmt: SourceFormat::from_scene(&scene),
frames_left: 1,
};
let mut adapted = AdaptedSource::new(inner, PixelFormat::Rgba);
let out = adapted.pull().unwrap().expect("frame");
let video = out.video.unwrap();
assert_eq!(video.format, PixelFormat::Rgba);
assert_eq!(video.width, 8);
assert_eq!(video.height, 8);
}
}