use std::path::{Path, PathBuf};
use std::time::Duration;
use crate::image::temporal::{TemporalCoherence, TemporalConfig};
use crate::image::{ColorMode, DitheringMethod, ImageRenderer};
use crate::{BrailleGrid, DotmaxError, Result};
use super::MediaPlayer;
extern crate ffmpeg_next as ffmpeg;
use ffmpeg::format::{input, Pixel};
use ffmpeg::media::Type;
use ffmpeg::software::scaling::{context::Context as ScalingContext, flag::Flags};
use ffmpeg::util::frame::video::Video as VideoFrame;
struct SendableScaler(ScalingContext);
#[allow(clippy::non_send_fields_in_send_ty)]
unsafe impl Send for SendableScaler {}
pub struct VideoPlayer {
path: PathBuf,
input_context: ffmpeg::format::context::Input,
video_stream_index: usize,
decoder: ffmpeg::decoder::Video,
scaler: SendableScaler,
width: u32,
height: u32,
fps: f64,
video_duration: Option<Duration>,
estimated_frame_count: Option<usize>,
current_frame: usize,
playback_ended: bool,
terminal_width: usize,
terminal_height: usize,
decoded_frame: VideoFrame,
rgb_frame: VideoFrame,
eof_sent: bool,
rgb_buffer: Vec<u8>,
dithering: DitheringMethod,
threshold: Option<u8>,
brightness: f32,
contrast: f32,
gamma: f32,
color_mode: ColorMode,
temporal_coherence: TemporalCoherence,
}
impl std::fmt::Debug for VideoPlayer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("VideoPlayer")
.field("path", &self.path)
.field("width", &self.width)
.field("height", &self.height)
.field("fps", &self.fps)
.field("duration", &self.video_duration)
.field("frame_count", &self.estimated_frame_count)
.field("current_frame", &self.current_frame)
.field("dithering", &self.dithering)
.field("threshold", &self.threshold)
.field("brightness", &self.brightness)
.field("contrast", &self.contrast)
.field("gamma", &self.gamma)
.field("color_mode", &self.color_mode)
.finish_non_exhaustive()
}
}
impl VideoPlayer {
pub fn new(path: impl AsRef<Path>) -> Result<Self> {
let path = path.as_ref().to_path_buf();
ffmpeg::init().map_err(|e| DotmaxError::VideoError {
path: path.clone(),
message: format!("FFmpeg initialization failed: {e}"),
})?;
let input_context = input(&path).map_err(|e| DotmaxError::VideoError {
path: path.clone(),
message: format!("Failed to open video file: {e}"),
})?;
let video_stream = input_context
.streams()
.best(Type::Video)
.ok_or_else(|| DotmaxError::VideoError {
path: path.clone(),
message: "No video stream found in file".to_string(),
})?;
let video_stream_index = video_stream.index();
let codec_params = video_stream.parameters();
let context = ffmpeg::codec::context::Context::from_parameters(codec_params)
.map_err(|e| DotmaxError::VideoError {
path: path.clone(),
message: format!("Failed to create codec context: {e}"),
})?;
let decoder = context.decoder().video().map_err(|e| DotmaxError::VideoError {
path: path.clone(),
message: format!("Failed to create video decoder: {e}"),
})?;
let width = decoder.width();
let height = decoder.height();
let fps = video_stream.avg_frame_rate();
let fps = if fps.denominator() != 0 {
f64::from(fps.numerator()) / f64::from(fps.denominator())
} else {
30.0 };
let video_duration = if input_context.duration() > 0 {
let duration_us =
(input_context.duration() as u64 * 1_000_000) / ffmpeg::ffi::AV_TIME_BASE as u64;
Some(Duration::from_micros(duration_us))
} else {
None
};
let estimated_frame_count = video_duration.map(|d| (d.as_secs_f64() * fps) as usize);
let (terminal_width, terminal_height) = crossterm::terminal::size()
.map(|(w, h)| (w as usize, h as usize))
.unwrap_or((80, 24));
let target_pixel_width = (terminal_width * 2) as u32;
let target_pixel_height = (terminal_height * 4) as u32;
let scaler = SendableScaler(
ScalingContext::get(
decoder.format(),
width,
height,
Pixel::RGB24,
target_pixel_width,
target_pixel_height,
Flags::BILINEAR,
)
.map_err(|e| DotmaxError::VideoError {
path: path.clone(),
message: format!("Failed to create scaler: {e}"),
})?,
);
tracing::info!(
"Opened video: {:?}, {}x{} @ {:.2} fps, duration: {:?}",
path,
width,
height,
fps,
video_duration
);
let rgb_buffer_size = (target_pixel_width * target_pixel_height * 3) as usize;
Ok(Self {
path,
input_context,
video_stream_index,
decoder,
scaler,
width,
height,
fps,
video_duration,
estimated_frame_count,
current_frame: 0,
playback_ended: false,
terminal_width,
terminal_height,
decoded_frame: VideoFrame::empty(),
rgb_frame: VideoFrame::empty(),
eof_sent: false,
rgb_buffer: vec![0u8; rgb_buffer_size],
dithering: DitheringMethod::Bayer,
threshold: None, brightness: 1.0,
contrast: 1.0,
gamma: 1.0,
color_mode: ColorMode::Monochrome,
temporal_coherence: TemporalCoherence::new(TemporalConfig::video()),
})
}
#[must_use]
pub const fn width(&self) -> u32 {
self.width
}
#[must_use]
pub const fn height(&self) -> u32 {
self.height
}
#[must_use]
pub const fn fps(&self) -> f64 {
self.fps
}
#[must_use]
pub const fn duration(&self) -> Option<Duration> {
self.video_duration
}
#[must_use]
pub const fn current_frame_index(&self) -> usize {
self.current_frame
}
#[must_use]
pub fn dithering(mut self, method: DitheringMethod) -> Self {
self.dithering = method;
self
}
#[must_use]
pub fn threshold(mut self, threshold: Option<u8>) -> Self {
self.threshold = threshold;
self
}
#[must_use]
pub fn brightness(mut self, brightness: f32) -> Self {
self.brightness = brightness;
self
}
#[must_use]
pub fn contrast(mut self, contrast: f32) -> Self {
self.contrast = contrast;
self
}
#[must_use]
pub fn gamma(mut self, gamma: f32) -> Self {
self.gamma = gamma;
self
}
#[must_use]
pub fn color_mode(mut self, mode: ColorMode) -> Self {
self.color_mode = mode;
self
}
#[must_use]
pub const fn get_dithering(&self) -> DitheringMethod {
self.dithering
}
#[must_use]
pub const fn get_threshold(&self) -> Option<u8> {
self.threshold
}
#[must_use]
pub const fn get_brightness(&self) -> f32 {
self.brightness
}
#[must_use]
pub const fn get_contrast(&self) -> f32 {
self.contrast
}
#[must_use]
pub const fn get_gamma(&self) -> f32 {
self.gamma
}
#[must_use]
pub const fn get_color_mode(&self) -> ColorMode {
self.color_mode
}
pub fn set_dithering(&mut self, method: DitheringMethod) {
self.dithering = method;
}
pub fn set_threshold(&mut self, threshold: Option<u8>) {
self.threshold = threshold;
}
pub fn set_brightness(&mut self, brightness: f32) {
self.brightness = brightness;
}
pub fn set_contrast(&mut self, contrast: f32) {
self.contrast = contrast;
}
pub fn set_gamma(&mut self, gamma: f32) {
self.gamma = gamma;
}
pub fn set_color_mode(&mut self, mode: ColorMode) {
self.color_mode = mode;
}
#[must_use]
pub fn temporal_config(&self) -> &TemporalConfig {
self.temporal_coherence.config()
}
pub fn set_temporal_config(&mut self, config: TemporalConfig) {
self.temporal_coherence.set_config(config);
}
#[must_use]
pub fn temporal_coherence(mut self, config: TemporalConfig) -> Self {
self.temporal_coherence.set_config(config);
self
}
pub fn reset_temporal_state(&mut self) {
self.temporal_coherence.reset();
}
fn decode_next_frame(&mut self) -> Option<Result<()>> {
if self.playback_ended {
return None;
}
loop {
match self.decoder.receive_frame(&mut self.decoded_frame) {
Ok(()) => {
return Some(Ok(()));
}
Err(ffmpeg::Error::Other { errno }) if errno == ffmpeg::error::EAGAIN => {
}
Err(ffmpeg::Error::Eof) => {
self.playback_ended = true;
return None;
}
Err(e) => {
return Some(Err(DotmaxError::VideoError {
path: self.path.clone(),
message: format!("Frame decode error: {e}"),
}));
}
}
let mut found_video_packet = false;
for (stream, packet) in self.input_context.packets() {
if stream.index() == self.video_stream_index {
if let Err(e) = self.decoder.send_packet(&packet) {
tracing::warn!("Error sending packet to decoder: {}", e);
}
found_video_packet = true;
break;
}
}
if !found_video_packet {
if self.eof_sent {
self.playback_ended = true;
return None;
}
self.eof_sent = true;
if let Err(e) = self.decoder.send_eof() {
tracing::warn!("Error sending EOF to decoder: {}", e);
}
}
}
}
fn frame_to_grid(&mut self) -> Result<BrailleGrid> {
self.scaler
.0
.run(&self.decoded_frame, &mut self.rgb_frame)
.map_err(|e| DotmaxError::VideoError {
path: self.path.clone(),
message: format!("Frame scaling error: {e}"),
})?;
let data = self.rgb_frame.data(0);
let stride = self.rgb_frame.stride(0);
let target_width = (self.terminal_width * 2) as u32;
let target_height = (self.terminal_height * 4) as u32;
let expected_size = (target_width * target_height * 3) as usize;
if self.rgb_buffer.len() != expected_size {
self.rgb_buffer.resize(expected_size, 0);
}
let mut offset = 0;
for y in 0..target_height {
let row_start = (y as usize) * stride;
let row_len = (target_width as usize) * 3;
self.rgb_buffer[offset..offset + row_len]
.copy_from_slice(&data[row_start..row_start + row_len]);
offset += row_len;
}
let img =
image::RgbImage::from_raw(target_width, target_height, self.rgb_buffer.clone())
.ok_or_else(|| DotmaxError::VideoError {
path: self.path.clone(),
message: "Failed to create image from frame data".to_string(),
})?;
let rgba_img = image::DynamicImage::ImageRgb8(img).into_rgba8();
let mut renderer = ImageRenderer::new()
.load_from_rgba(rgba_img)
.resize(self.terminal_width, self.terminal_height, false)? .dithering(self.dithering)
.color_mode(self.color_mode);
if let Some(t) = self.threshold {
renderer = renderer.threshold(t);
}
if (self.brightness - 1.0).abs() > f32::EPSILON {
renderer = renderer.brightness(self.brightness)?;
}
if (self.contrast - 1.0).abs() > f32::EPSILON {
renderer = renderer.contrast(self.contrast)?;
}
if (self.gamma - 1.0).abs() > f32::EPSILON {
renderer = renderer.gamma(self.gamma)?;
}
let grid = renderer.render()?;
Ok(grid)
}
fn frame_delay(&self) -> Duration {
if self.fps > 0.0 {
Duration::from_secs_f64(1.0 / self.fps)
} else {
Duration::from_millis(33) }
}
}
impl MediaPlayer for VideoPlayer {
fn next_frame(&mut self) -> Option<Result<(BrailleGrid, Duration)>> {
match self.decode_next_frame() {
Some(Ok(())) => {}
Some(Err(e)) => return Some(Err(e)),
None => return None,
}
let grid = match self.frame_to_grid() {
Ok(g) => g,
Err(e) => return Some(Err(e)),
};
let delay = self.frame_delay();
self.current_frame += 1;
Some(Ok((grid, delay)))
}
fn reset(&mut self) {
if let Err(e) = self
.input_context
.seek(0, std::ops::RangeFull)
{
tracing::warn!("Failed to seek to beginning: {}", e);
if let Ok(new_player) = Self::new(&self.path) {
*self = new_player;
return;
}
}
self.decoder.flush();
self.current_frame = 0;
self.playback_ended = false;
self.eof_sent = false;
self.temporal_coherence.reset();
}
fn frame_count(&self) -> Option<usize> {
self.estimated_frame_count
}
fn loop_count(&self) -> Option<u16> {
Some(1)
}
fn handle_resize(&mut self, width: usize, height: usize) {
if self.terminal_width == width && self.terminal_height == height {
return; }
self.terminal_width = width;
self.terminal_height = height;
let target_pixel_width = (width * 2) as u32;
let target_pixel_height = (height * 4) as u32;
match ScalingContext::get(
self.decoder.format(),
self.width,
self.height,
Pixel::RGB24,
target_pixel_width,
target_pixel_height,
Flags::BILINEAR,
) {
Ok(new_scaler) => {
self.scaler = SendableScaler(new_scaler);
let rgb_buffer_size = (target_pixel_width * target_pixel_height * 3) as usize;
self.rgb_buffer.resize(rgb_buffer_size, 0);
tracing::debug!("VideoPlayer resized to {}x{}", width, height);
}
Err(e) => {
tracing::warn!("Failed to resize video scaler: {}", e);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn _assert_video_player_send() {
fn assert_send<T: Send>() {}
assert_send::<VideoPlayer>();
}
#[test]
fn test_video_player_new_nonexistent() {
let player = VideoPlayer::new("nonexistent_video.mp4");
assert!(player.is_err(), "Should fail for nonexistent file");
}
#[test]
fn test_video_player_new_invalid_file() {
use std::io::Write;
let temp_dir = std::env::temp_dir();
let temp_file = temp_dir.join("invalid_video_test.mp4");
let mut file = std::fs::File::create(&temp_file).unwrap();
file.write_all(&[0x00, 0x00, 0x00, 0x00]).unwrap();
drop(file);
let player = VideoPlayer::new(&temp_file);
assert!(player.is_err(), "Should fail for invalid video file");
let _ = std::fs::remove_file(&temp_file);
}
#[test]
fn test_video_player_debug() {
let debug_output = format!(
"VideoPlayer path={:?} width={} height={}",
PathBuf::from("test.mp4"),
1920,
1080
);
assert!(debug_output.contains("VideoPlayer"));
}
}