use std::path::Path;
use super::encoders::{Codec, Encoder, Quality, create_encoder};
use super::tick::Tick;
use crate::core::{Plot, PlottingError, Result};
#[derive(Clone, Debug)]
pub struct VideoConfig {
pub width: u32,
pub height: u32,
pub framerate: u32,
pub quality: Quality,
pub codec: Codec,
}
impl Default for VideoConfig {
fn default() -> Self {
Self {
width: 800,
height: 600,
framerate: 30,
quality: Quality::Medium,
codec: Codec::Auto,
}
}
}
impl VideoConfig {
pub fn new() -> Self {
Self::default()
}
pub fn dimensions(mut self, width: u32, height: u32) -> Self {
self.width = width;
self.height = height;
self
}
pub fn framerate(mut self, fps: u32) -> Self {
self.framerate = fps;
self
}
pub fn quality(mut self, quality: Quality) -> Self {
self.quality = quality;
self
}
pub fn codec(mut self, codec: Codec) -> Self {
self.codec = codec;
self
}
pub fn from_path<P: AsRef<Path>>(path: P) -> Self {
let ext = path
.as_ref()
.extension()
.and_then(|e| e.to_str())
.unwrap_or("gif");
let codec = Codec::from_extension(ext).unwrap_or(Codec::Gif);
Self {
codec,
..Default::default()
}
}
pub fn frame_delay_cs(&self) -> u16 {
((100.0 / self.framerate as f64).round() as u16).max(1)
}
pub fn frame_duration(&self) -> f64 {
1.0 / self.framerate as f64
}
}
pub struct FrameCapture {
width: u32,
height: u32,
buffer: Vec<u8>,
}
impl FrameCapture {
pub fn new(width: u32, height: u32) -> Self {
let buffer_size = (width * height * 3) as usize;
Self {
width,
height,
buffer: vec![0u8; buffer_size],
}
}
pub fn dimensions(&self) -> (u32, u32) {
(self.width, self.height)
}
pub fn resize(&mut self, width: u32, height: u32) {
if self.width != width || self.height != height {
self.width = width;
self.height = height;
let buffer_size = (width * height * 3) as usize;
self.buffer.resize(buffer_size, 0);
}
}
pub fn capture(&mut self, plot: &Plot) -> Result<&[u8]> {
let sized_plot = plot.clone().set_output_pixels(self.width, self.height);
let image = sized_plot.render()?;
let rgba_data = &image.pixels;
let pixels = (self.width * self.height) as usize;
for i in 0..pixels {
self.buffer[i * 3] = rgba_data[i * 4]; self.buffer[i * 3 + 1] = rgba_data[i * 4 + 1]; self.buffer[i * 3 + 2] = rgba_data[i * 4 + 2]; }
Ok(&self.buffer)
}
pub fn capture_with_figure(
&mut self,
plot: &Plot,
figure_size: Option<(f32, f32, u32)>, ) -> Result<&[u8]> {
let sized_plot = if let Some((fig_width, fig_height, dpi)) = figure_size {
plot.clone().size(fig_width, fig_height).dpi(dpi)
} else {
plot.clone().set_output_pixels(self.width, self.height)
};
let image = sized_plot.render()?;
let rgba_data = &image.pixels;
let actual_pixels = (image.width * image.height) as usize;
let required_size = actual_pixels * 3;
if self.buffer.len() != required_size {
self.buffer.resize(required_size, 0);
self.width = image.width;
self.height = image.height;
}
for i in 0..actual_pixels {
self.buffer[i * 3] = rgba_data[i * 4]; self.buffer[i * 3 + 1] = rgba_data[i * 4 + 1]; self.buffer[i * 3 + 2] = rgba_data[i * 4 + 2]; }
Ok(&self.buffer)
}
pub fn capture_sized(&mut self, plot: &Plot, width: u32, height: u32) -> Result<&[u8]> {
self.resize(width, height);
self.capture(plot)
}
pub fn buffer_copy(&self) -> Vec<u8> {
self.buffer.clone()
}
}
pub struct VideoStream {
encoder: Box<dyn Encoder>,
config: VideoConfig,
frame_count: u64,
initialized: bool,
}
impl VideoStream {
pub fn new<P: AsRef<Path>>(path: P, config: VideoConfig) -> Result<Self> {
let encoder = create_encoder(path.as_ref(), config.quality)?;
Ok(Self {
encoder,
config,
frame_count: 0,
initialized: false,
})
}
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> {
let config = VideoConfig::from_path(&path);
Self::new(path, config)
}
pub fn config(&self) -> &VideoConfig {
&self.config
}
pub fn frame_count(&self) -> u64 {
self.frame_count
}
pub fn record_frame(&mut self, rgb_data: &[u8], tick: &Tick) -> Result<()> {
if !self.initialized {
self.encoder.init(self.config.width, self.config.height)?;
self.initialized = true;
}
let timestamp_ms = (tick.time * 1000.0) as u64;
self.encoder.encode_frame(rgb_data, timestamp_ms)?;
self.frame_count += 1;
Ok(())
}
pub fn record_frame_sized(
&mut self,
rgb_data: &[u8],
width: u32,
height: u32,
tick: &Tick,
) -> Result<()> {
if !self.initialized {
self.config.width = width;
self.config.height = height;
}
self.record_frame(rgb_data, tick)
}
pub fn save(self) -> Result<()> {
if self.frame_count == 0 {
return Err(PlottingError::RenderError("No frames recorded".into()));
}
self.encoder.finalize()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_video_config_default() {
let config = VideoConfig::default();
assert_eq!(config.width, 800);
assert_eq!(config.height, 600);
assert_eq!(config.framerate, 30);
}
#[test]
fn test_video_config_builder() {
let config = VideoConfig::new()
.dimensions(1920, 1080)
.framerate(60)
.quality(Quality::High);
assert_eq!(config.width, 1920);
assert_eq!(config.height, 1080);
assert_eq!(config.framerate, 60);
assert_eq!(config.quality, Quality::High);
}
#[test]
fn test_video_config_from_path() {
let config = VideoConfig::from_path("test.gif");
assert_eq!(config.codec, Codec::Gif);
let config = VideoConfig::from_path("test.mp4");
assert_eq!(config.codec, Codec::Av1);
}
#[test]
fn test_frame_delay() {
let config = VideoConfig::new().framerate(30);
assert_eq!(config.frame_delay_cs(), 3);
let config = VideoConfig::new().framerate(60);
assert_eq!(config.frame_delay_cs(), 2);
let config = VideoConfig::new().framerate(10);
assert_eq!(config.frame_delay_cs(), 10);
}
#[test]
fn test_frame_capture_new() {
let capture = FrameCapture::new(100, 50);
assert_eq!(capture.dimensions(), (100, 50));
assert_eq!(capture.buffer.len(), 100 * 50 * 3);
}
#[test]
fn test_frame_capture_resize() {
let mut capture = FrameCapture::new(100, 100);
capture.resize(200, 150);
assert_eq!(capture.dimensions(), (200, 150));
assert_eq!(capture.buffer.len(), 200 * 150 * 3);
}
}