use anyhow::{anyhow, Context, Result};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use std::ffi::OsStr;
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
pub mod convert;
pub mod crop;
pub mod loop_detect;
pub mod preprocessing;
pub mod render;
pub mod video;
#[derive(Debug, Clone, Default)]
pub struct FfmpegConfig {
pub ffmpeg_path: Option<PathBuf>,
pub ffprobe_path: Option<PathBuf>,
}
impl FfmpegConfig {
pub fn new() -> Self {
Self::default()
}
pub fn with_ffmpeg<P: Into<PathBuf>>(mut self, path: P) -> Self {
self.ffmpeg_path = Some(path.into());
self
}
pub fn with_ffprobe<P: Into<PathBuf>>(mut self, path: P) -> Self {
self.ffprobe_path = Some(path.into());
self
}
pub(crate) fn ffmpeg_cmd(&self) -> &OsStr {
self.ffmpeg_path
.as_ref()
.map(|p| p.as_os_str())
.unwrap_or(OsStr::new("ffmpeg"))
}
pub(crate) fn ffprobe_cmd(&self) -> &OsStr {
self.ffprobe_path
.as_ref()
.map(|p| p.as_os_str())
.unwrap_or(OsStr::new("ffprobe"))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum ProgressPhase {
ExtractingFrames,
ExtractingAudio,
ConvertingFrames,
RenderingVideo,
Complete,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Progress {
pub phase: ProgressPhase,
pub completed: usize,
pub total: usize,
pub percentage: f64,
pub message: String,
}
impl Progress {
pub fn extracting_frames() -> Self {
Self {
phase: ProgressPhase::ExtractingFrames,
completed: 0,
total: 0,
percentage: 0.0,
message: "Extracting frames from video...".to_string(),
}
}
pub fn extracting_frames_progress(current_time_us: u64, total_duration_us: u64) -> Self {
let percentage = if total_duration_us > 0 {
(current_time_us as f64 / total_duration_us as f64) * 100.0
} else {
0.0
};
Self {
phase: ProgressPhase::ExtractingFrames,
completed: current_time_us as usize,
total: total_duration_us as usize,
percentage,
message: format!("Extracting frames: {:.1}%", percentage),
}
}
pub fn extracting_audio() -> Self {
Self {
phase: ProgressPhase::ExtractingAudio,
completed: 0,
total: 0,
percentage: 0.0,
message: "Extracting audio from video...".to_string(),
}
}
pub fn converting_frames(completed: usize, total: usize) -> Self {
let percentage = if total > 0 {
(completed as f64 / total as f64) * 100.0
} else {
0.0
};
Self {
phase: ProgressPhase::ConvertingFrames,
completed,
total,
percentage,
message: format!("Converting frame {} of {}", completed, total),
}
}
pub fn rendering_video(completed: usize, total: usize) -> Self {
let percentage = if total > 0 {
(completed as f64 / total as f64) * 100.0
} else {
0.0
};
Self {
phase: ProgressPhase::RenderingVideo,
completed,
total,
percentage,
message: format!("Rendering frame {} of {}", completed, total),
}
}
pub fn complete(total_frames: usize) -> Self {
Self {
phase: ProgressPhase::Complete,
completed: total_frames,
total: total_frames,
percentage: 100.0,
message: format!("Conversion complete: {} frames", total_frames),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversionResult {
pub frame_count: usize,
pub columns: u32,
pub font_ratio: f32,
pub luminance: u8,
pub fps: Option<u32>,
pub output_mode: String,
pub audio_extracted: bool,
pub output_dir: PathBuf,
pub background_color: String,
pub color: String,
}
#[derive(Debug, Serialize)]
struct Details {
version: String,
frames: usize,
luminance: u8,
font_ratio: f32,
columns: u32,
#[serde(skip_serializing_if = "Option::is_none")]
fps: Option<u32>,
output: String,
audio: bool,
background_color: String,
color: String,
}
impl ConversionResult {
fn to_details(&self) -> Details {
Details {
version: env!("CARGO_PKG_VERSION").to_string(),
frames: self.frame_count,
luminance: self.luminance,
font_ratio: self.font_ratio,
columns: self.columns,
fps: self.fps,
output: self.output_mode.clone(),
audio: self.audio_extracted,
background_color: self.background_color.clone(),
color: self.color.clone(),
}
}
pub fn write_details_file(&self) -> Result<PathBuf> {
let details_path = self.output_dir.join("details.toml");
let toml_string = toml::to_string_pretty(&self.to_details()).context("serializing details to TOML")?;
fs::write(&details_path, &toml_string).with_context(|| format!("writing details file to {}", details_path.display()))?;
Ok(details_path)
}
pub fn to_details_string(&self) -> String {
toml::to_string_pretty(&self.to_details()).expect("failed to serialize details to TOML")
}
}
#[derive(Debug, Deserialize, Clone)]
pub struct Preset {
pub columns: u32,
pub fps: u32,
pub font_ratio: f32,
pub luminance: u8,
}
fn default_ascii_chars() -> String {
" .'`^,:;Il!i><~+_-?][}{1)(|/tfjrxnuvczXYUJCLQ0OZmwqpdbkhao*#MW&8%B@$".to_string()
}
fn default_start_str() -> String {
"0".to_string()
}
fn default_end_str() -> String {
String::new()
}
#[derive(Debug, Deserialize, Clone)]
pub struct AppConfig {
pub presets: std::collections::HashMap<String, Preset>,
pub default_preset: String,
#[serde(default = "default_ascii_chars")]
pub ascii_chars: String,
#[serde(default = "default_start_str")]
pub default_start: String,
#[serde(default = "default_end_str")]
pub default_end: String,
}
impl Default for AppConfig {
fn default() -> Self {
let default_json = r#"{
"presets": {
"default": {"columns": 400, "fps": 30, "font_ratio": 0.7, "luminance": 20},
"small": {"columns": 80, "fps": 24, "font_ratio": 0.44, "luminance": 20},
"large": {"columns": 800, "fps": 60, "font_ratio": 0.7, "luminance": 20}
},
"default_preset": "default",
"ascii_chars": " .'`^,:;Il!i><~+_-?][}{1)(|/tfjrxnuvczXYUJCLQ0OZmwqpdbkhao*#MW&8%B@$",
"default_start": "0",
"default_end": ""
}"#;
serde_json::from_str(default_json).unwrap()
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum OutputMode {
TextOnly,
ColorOnly,
TextAndColor,
}
#[derive(Debug, Clone)]
pub struct ConversionOptions {
pub columns: Option<u32>,
pub font_ratio: f32,
pub luminance: u8,
pub ascii_chars: String,
pub output_mode: OutputMode,
}
impl Default for ConversionOptions {
fn default() -> Self {
Self {
columns: Some(400),
font_ratio: 0.7,
luminance: 20,
ascii_chars: default_ascii_chars(),
output_mode: OutputMode::TextOnly,
}
}
}
impl ConversionOptions {
pub fn with_columns(mut self, columns: u32) -> Self {
self.columns = Some(columns);
self
}
pub fn with_font_ratio(mut self, font_ratio: f32) -> Self {
self.font_ratio = font_ratio;
self
}
pub fn with_luminance(mut self, luminance: u8) -> Self {
self.luminance = luminance;
self
}
pub fn with_ascii_chars(mut self, ascii_chars: String) -> Self {
self.ascii_chars = ascii_chars;
self
}
pub fn with_output_mode(mut self, mode: OutputMode) -> Self {
self.output_mode = mode;
self
}
pub fn from_preset(preset: &Preset, ascii_chars: String) -> Self {
Self {
columns: Some(preset.columns),
font_ratio: preset.font_ratio,
luminance: preset.luminance,
ascii_chars,
output_mode: OutputMode::TextOnly,
}
}
}
#[derive(Debug, Clone)]
pub struct VideoOptions {
pub fps: u32,
pub start: Option<String>,
pub end: Option<String>,
pub columns: u32,
pub extract_audio: bool,
pub preprocess_filter: Option<String>,
}
impl Default for VideoOptions {
fn default() -> Self {
Self {
fps: 30,
start: None,
end: None,
columns: 400,
extract_audio: false,
preprocess_filter: None,
}
}
}
#[derive(Debug, Clone)]
pub struct ToVideoOptions {
pub output_path: PathBuf,
pub font_size: f32,
pub crf: u8,
pub mux_audio: bool,
pub use_colors: Option<bool>,
}
impl Default for ToVideoOptions {
fn default() -> Self {
Self {
output_path: PathBuf::from("output.mp4"),
font_size: 14.0,
crf: 18,
mux_audio: false,
use_colors: None,
}
}
}
pub struct AsciiConverter {
config: AppConfig,
ffmpeg_config: FfmpegConfig,
}
impl AsciiConverter {
pub fn new() -> Self {
Self {config: AppConfig::default(), ffmpeg_config: FfmpegConfig::default()}
}
pub fn with_config(config: AppConfig) -> Result<Self> {
if !config.ascii_chars.is_ascii() {
return Err(anyhow!("Config contains non-ASCII characters in ascii_chars field. This will cause corrupted output. Please use only ASCII characters."));
}
Ok(Self {config, ffmpeg_config: FfmpegConfig::default()})
}
pub fn with_ffmpeg_config(mut self, ffmpeg_config: FfmpegConfig) -> Self {
self.ffmpeg_config = ffmpeg_config;
self
}
pub fn from_config_file(path: &Path) -> Result<Self> {
let text = fs::read_to_string(path)
.with_context(|| format!("reading config {}", path.display()))?;
let config: AppConfig = serde_json::from_str(&text).context("parsing config json")?;
if !config.ascii_chars.is_ascii() {
return Err(anyhow!(
"Config file {} contains non-ASCII characters in ascii_chars field. \
This will cause corrupted output. Please use only ASCII characters.",
path.display()
));
}
Ok(Self {config, ffmpeg_config: FfmpegConfig::default()})
}
pub fn config(&self) -> &AppConfig {
&self.config
}
pub fn ffmpeg_config(&self) -> &FfmpegConfig {
&self.ffmpeg_config
}
pub fn convert_image(&self, input: &Path, output: &Path, options: &ConversionOptions) -> Result<()> {
let ascii_chars = options.ascii_chars.as_bytes();
convert::convert_image_to_ascii(input, output, options.font_ratio, options.luminance, options.columns, ascii_chars, &options.output_mode)
}
pub fn image_to_string(&self, input: &Path, options: &ConversionOptions) -> Result<String> {
let ascii_chars = options.ascii_chars.as_bytes();
convert::image_to_ascii_string(input, options.font_ratio, options.luminance, options.columns, ascii_chars)
}
pub fn convert_video(&self, input: &Path, output_dir: &Path, video_opts: &VideoOptions, conv_opts: &ConversionOptions, keep_images: bool) -> Result<ConversionResult> {
self.convert_video_with_progress(input, output_dir, video_opts, conv_opts, keep_images, None::<fn(usize, usize)>)
}
pub fn convert_video_with_progress<F>(&self, input: &Path, output_dir: &Path, video_opts: &VideoOptions, conv_opts: &ConversionOptions, keep_images: bool, progress_callback: Option<F>) -> Result<ConversionResult> where F: Fn(usize, usize) + Send + Sync {
fs::create_dir_all(output_dir).context("creating output directory")?;
video::extract_video_frames(input, output_dir, video_opts.columns, video_opts.fps, video_opts.start.as_deref(), video_opts.end.as_deref(), video_opts.preprocess_filter.as_deref(), &self.ffmpeg_config)?;
if video_opts.extract_audio {
video::extract_audio(input, output_dir, video_opts.start.as_deref(), video_opts.end.as_deref(), &self.ffmpeg_config)?;
}
let ascii_chars = conv_opts.ascii_chars.as_bytes();
let total_frames = convert::convert_directory_parallel_with_progress(output_dir, output_dir, conv_opts.font_ratio, conv_opts.luminance, keep_images, ascii_chars, &conv_opts.output_mode, progress_callback)?;
let output_mode_str = match conv_opts.output_mode {
OutputMode::TextOnly => "text-only",
OutputMode::ColorOnly => "color-only",
OutputMode::TextAndColor => "text+color",
};
let result = ConversionResult {
frame_count: total_frames,
columns: conv_opts.columns.unwrap_or(video_opts.columns),
font_ratio: conv_opts.font_ratio,
luminance: conv_opts.luminance,
fps: Some(video_opts.fps),
output_mode: output_mode_str.to_string(),
audio_extracted: video_opts.extract_audio,
output_dir: output_dir.to_path_buf(),
background_color: "black".to_string(),
color: "white".to_string(),
};
result.write_details_file()?;
Ok(result)
}
pub fn convert_video_with_detailed_progress<F>(&self, input: &Path, output_dir: &Path, video_opts: &VideoOptions,conv_opts: &ConversionOptions, keep_images: bool, progress_callback: F) -> Result<ConversionResult> where F: Fn(Progress) + Send + Sync {
fs::create_dir_all(output_dir).context("creating output directory")?;
video::extract_video_frames_with_progress(input, output_dir, video_opts, &self.ffmpeg_config, &progress_callback)?;
if video_opts.extract_audio {
progress_callback(Progress::extracting_audio());
video::extract_audio(input, output_dir, video_opts.start.as_deref(), video_opts.end.as_deref(), &self.ffmpeg_config)?;
}
let ascii_chars = conv_opts.ascii_chars.as_bytes();
let total_frames = convert::convert_directory_parallel_with_detailed_progress(output_dir, output_dir, conv_opts.font_ratio, conv_opts.luminance, keep_images, ascii_chars, &conv_opts.output_mode, &progress_callback)?;
progress_callback(Progress::complete(total_frames));
let output_mode_str = match conv_opts.output_mode {
OutputMode::TextOnly => "text-only",
OutputMode::ColorOnly => "color-only",
OutputMode::TextAndColor => "text+color",
};
let result = ConversionResult {
frame_count: total_frames,
columns: conv_opts.columns.unwrap_or(video_opts.columns),
font_ratio: conv_opts.font_ratio,
luminance: conv_opts.luminance,
fps: Some(video_opts.fps),
output_mode: output_mode_str.to_string(),
audio_extracted: video_opts.extract_audio,
output_dir: output_dir.to_path_buf(),
background_color: "black".to_string(),
color: "white".to_string(),
};
result.write_details_file()?;
Ok(result)
}
pub fn convert_directory(&self, input_dir: &Path, output_dir: &Path, options: &ConversionOptions, keep_images: bool) -> Result<usize> {
fs::create_dir_all(output_dir)?;
let ascii_chars = options.ascii_chars.as_bytes();
convert::convert_directory_parallel(input_dir, output_dir, options.font_ratio, options.luminance, keep_images, ascii_chars, &options.output_mode)
}
pub fn convert_directory_with_progress<F>(&self, input_dir: &Path, output_dir: &Path, options: &ConversionOptions, keep_images: bool, progress_callback: F) -> Result<usize> where F: Fn(Progress) + Send + Sync {
fs::create_dir_all(output_dir)?;
let ascii_chars = options.ascii_chars.as_bytes();
convert::convert_directory_parallel_with_detailed_progress(input_dir, output_dir, options.font_ratio, options.luminance, keep_images, ascii_chars, &options.output_mode, &progress_callback)
}
pub fn get_preset(&self, name: &str) -> Option<&Preset> {
self.config.presets.get(name)
}
pub fn options_from_preset(&self, preset_name: &str) -> Result<ConversionOptions> {
let preset = self
.get_preset(preset_name)
.ok_or_else(|| anyhow!("Preset '{}' not found", preset_name))?;
Ok(ConversionOptions::from_preset(preset, self.config.ascii_chars.clone()))
}
pub fn convert_video_to_video<F>(&self, input: &Path, video_opts: &VideoOptions, conv_opts: &ConversionOptions, to_video_opts: &ToVideoOptions, progress_callback: F) -> Result<ConversionResult> where F: Fn(Progress) + Send + Sync {
let temp_dir = std::env::temp_dir().join(format!("cascii_tovideo_{}", std::process::id()));
fs::create_dir_all(&temp_dir).context("creating temp directory")?;
let result = self.convert_video_to_video_inner(input, video_opts, conv_opts, to_video_opts, &temp_dir, &progress_callback);
let _ = fs::remove_dir_all(&temp_dir);
result
}
fn convert_video_to_video_inner<F>(&self, input: &Path, video_opts: &VideoOptions, conv_opts: &ConversionOptions, to_video_opts: &ToVideoOptions, temp_dir: &Path, progress_callback: &F) -> Result<ConversionResult> where F: Fn(Progress) + Send + Sync {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
video::extract_video_frames_with_progress(input, temp_dir, video_opts, &self.ffmpeg_config, progress_callback)?;
let audio_path = if to_video_opts.mux_audio {
progress_callback(Progress::extracting_audio());
video::extract_audio(input, temp_dir, video_opts.start.as_deref(), video_opts.end.as_deref(), &self.ffmpeg_config)?;
Some(temp_dir.join("audio.mp3"))
} else {
None
};
let mut png_paths: Vec<PathBuf> = WalkDir::new(temp_dir)
.min_depth(1)
.max_depth(1)
.into_iter()
.filter_map(|e| e.ok())
.map(|e| e.into_path())
.filter(|p| p.extension().map(|e| e == "png").unwrap_or(false))
.collect();
png_paths.sort();
let total_frames = png_paths.len();
if total_frames == 0 {
return Err(anyhow!("No frames extracted from video"));
}
let atlas = render::build_glyph_atlas(to_video_opts.font_size)?;
let ascii_chars = conv_opts.ascii_chars.as_bytes();
let (first_ascii, first_w, first_h, _) = convert::image_to_ascii_with_colors(&png_paths[0], conv_opts.font_ratio, conv_opts.luminance, conv_opts.columns, ascii_chars)?;
let _ = first_ascii; let mut pixel_w = first_w * atlas.cell_width;
let mut pixel_h = first_h * atlas.cell_height;
if pixel_w % 2 != 0 {
pixel_w += 1;
}
if pixel_h % 2 != 0 {
pixel_h += 1;
}
let mut child = render::spawn_ffmpeg_encoder(pixel_w, pixel_h, video_opts.fps, to_video_opts.crf, audio_path.as_deref(), &to_video_opts.output_path, &self.ffmpeg_config)?;
let mut stdin = child.stdin.take().ok_or_else(|| anyhow!("failed to open ffmpeg stdin pipe"))?;
let use_colors = conv_opts.output_mode != OutputMode::TextOnly;
let batch_size = 100;
let completed = Arc::new(AtomicUsize::new(0));
progress_callback(Progress::rendering_video(0, total_frames));
for batch_start in (0..total_frames).step_by(batch_size) {
let batch_end = (batch_start + batch_size).min(total_frames);
let batch = &png_paths[batch_start..batch_end];
let frame_data: Vec<convert::AsciiFrameData> = batch
.par_iter()
.map(|path| {
let (ascii_text, width_chars, height_chars, rgb_colors) = convert::image_to_ascii_with_colors(path, conv_opts.font_ratio, conv_opts.luminance, conv_opts.columns, ascii_chars)?;
Ok(convert::AsciiFrameData {ascii_text, width_chars, height_chars, rgb_colors})
})
.collect::<Result<Vec<_>>>()?;
for frame in &frame_data {
let rgb_buf = render::render_ascii_frame_to_rgb(frame, &atlas, use_colors);
if let Err(e) = stdin.write_all(&rgb_buf) {
drop(stdin);
let output = child.wait_with_output().context("waiting for ffmpeg")?;
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(anyhow!("ffmpeg encoding failed: {} (stderr: {})", e, stderr));
}
let current = completed.fetch_add(1, Ordering::SeqCst) + 1;
let current_percent = if total_frames > 0 {
(current * 100) / total_frames
} else {
0
};
let last_percent = if current > 1 {
((current - 1) * 100) / total_frames
} else {
0
};
if current_percent > last_percent || current == total_frames {
progress_callback(Progress::rendering_video(current, total_frames));
}
}
}
drop(stdin);
let output = child.wait_with_output().context("waiting for ffmpeg")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(anyhow!("ffmpeg encoding failed: {}", stderr));
}
progress_callback(Progress::complete(total_frames));
let output_mode_str = match conv_opts.output_mode {
OutputMode::TextOnly => "text-only",
OutputMode::ColorOnly => "color-only",
OutputMode::TextAndColor => "text+color",
};
Ok(ConversionResult {
frame_count: total_frames,
columns: conv_opts.columns.unwrap_or(video_opts.columns),
font_ratio: conv_opts.font_ratio,
luminance: conv_opts.luminance,
fps: Some(video_opts.fps),
output_mode: output_mode_str.to_string(),
audio_extracted: to_video_opts.mux_audio,
output_dir: to_video_opts.output_path.parent().unwrap_or(Path::new(".")).to_path_buf(),
background_color: "black".to_string(),
color: "white".to_string(),
})
}
pub fn render_frames_to_video<F>(&self, input_dir: &Path, fps: u32, to_video_opts: &ToVideoOptions, progress_callback: F) -> Result<ConversionResult> where F: Fn(Progress) + Send + Sync {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
let mut frame_paths: Vec<PathBuf> = WalkDir::new(input_dir)
.min_depth(1)
.max_depth(1)
.into_iter()
.filter_map(|e| e.ok())
.map(|e| e.into_path())
.filter(|p| p.extension().map(|e| e == "cframe").unwrap_or(false))
.collect();
let use_cframes = !frame_paths.is_empty();
if !use_cframes {
frame_paths = WalkDir::new(input_dir)
.min_depth(1)
.max_depth(1)
.into_iter()
.filter_map(|e| e.ok())
.map(|e| e.into_path())
.filter(|p| {
p.extension().map(|e| e == "txt").unwrap_or(false)
&& p.file_name()
.and_then(|n| n.to_str())
.map(|n| n.starts_with("frame_"))
.unwrap_or(false)
})
.collect();
}
frame_paths.sort();
let total_frames = frame_paths.len();
if total_frames == 0 {
return Err(anyhow!("No .cframe or .txt frame files found in {}", input_dir.display()));
}
let atlas = render::build_glyph_atlas(to_video_opts.font_size)?;
let first_frame = if use_cframes {
convert::read_cframe_to_frame_data(&frame_paths[0])?
} else {
convert::read_txt_to_frame_data(&frame_paths[0])?
};
let mut pixel_w = first_frame.width_chars * atlas.cell_width;
let mut pixel_h = first_frame.height_chars * atlas.cell_height;
if !pixel_w.is_multiple_of(2) {
pixel_w += 1;
}
if !pixel_h.is_multiple_of(2) {
pixel_h += 1;
}
let audio_path = if to_video_opts.mux_audio {
let ap = input_dir.join("audio.mp3");
if ap.exists() {
Some(ap)
} else {
None
}
} else {
None
};
let mut child = render::spawn_ffmpeg_encoder(pixel_w, pixel_h, fps, to_video_opts.crf, audio_path.as_deref(), &to_video_opts.output_path, &self.ffmpeg_config)?;
let mut stdin = child.stdin.take().ok_or_else(|| anyhow!("failed to open ffmpeg stdin pipe"))?;
let batch_size = 100;
let completed = Arc::new(AtomicUsize::new(0));
let render_with_colors = to_video_opts.use_colors.unwrap_or(use_cframes);
progress_callback(Progress::rendering_video(0, total_frames));
for batch_start in (0..total_frames).step_by(batch_size) {
let batch_end = (batch_start + batch_size).min(total_frames);
let batch = &frame_paths[batch_start..batch_end];
let frame_data: Vec<convert::AsciiFrameData> = batch
.par_iter()
.map(|path| {
if use_cframes {
convert::read_cframe_to_frame_data(path)
} else {
convert::read_txt_to_frame_data(path)
}
})
.collect::<Result<Vec<_>>>()?;
for frame in &frame_data {
let rgb_buf = render::render_ascii_frame_to_rgb(frame, &atlas, render_with_colors);
if let Err(e) = stdin.write_all(&rgb_buf) {
drop(stdin);
let output = child.wait_with_output().context("waiting for ffmpeg")?;
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(anyhow!("ffmpeg encoding failed: {} (stderr: {})", e, stderr));
}
let current = completed.fetch_add(1, Ordering::SeqCst) + 1;
let current_percent = if total_frames > 0 {
(current * 100) / total_frames
} else {
0
};
let last_percent = if current > 1 {
((current - 1) * 100) / total_frames
} else {
0
};
if current_percent > last_percent || current == total_frames {
progress_callback(Progress::rendering_video(current, total_frames));
}
}
}
drop(stdin);
let output = child.wait_with_output().context("waiting for ffmpeg")?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(anyhow!("ffmpeg encoding failed: {}", stderr));
}
progress_callback(Progress::complete(total_frames));
let mode_str = if use_cframes { "color" } else { "text-only" };
Ok(ConversionResult {
frame_count: total_frames,
columns: first_frame.width_chars,
font_ratio: 0.0,
luminance: 0,
fps: Some(fps),
output_mode: mode_str.to_string(),
audio_extracted: audio_path.is_some(),
output_dir: to_video_opts.output_path.parent().unwrap_or(Path::new(".")).to_path_buf(),
background_color: "black".to_string(),
color: "white".to_string(),
})
}
}
impl Default for AsciiConverter {
fn default() -> Self {
Self::new()
}
}
pub use crop::{crop_frames, run_trim, CropResult};