Skip to main content

oximedia_transcode/
frame_pipeline.rs

1//! Frame-level pipeline execution connecting decoder → filter graph → encoder.
2//!
3//! This module implements the actual frame-by-frame transcode loop that:
4//! 1. Reads compressed packets from a demuxer
5//! 2. Decodes them into raw frames (VideoFrame / AudioFrame)
6//! 3. Applies an optional in-memory filter chain (scale, normalise gain, …)
7//! 4. Re-encodes them with the target codec
8//! 5. Writes encoded packets to the output muxer
9//!
10//! HDR metadata is threaded through at stream-open time using
11//! [`crate::hdr_passthrough::HdrProcessor`].
12//!
13//! When no decode/encode path is available for a codec pair the pipeline
14//! degrades to a stream-copy pass (same as the legacy `Pipeline`).
15
16#![allow(clippy::cast_precision_loss)]
17#![allow(clippy::cast_possible_truncation)]
18
19use std::path::PathBuf;
20use std::time::Instant;
21
22use tracing::{debug, info, warn};
23
24use crate::hdr_passthrough::{HdrMetadata, HdrPassthroughMode, HdrProcessor};
25use crate::{Result, TranscodeError, TranscodeOutput};
26
27// ─── FrameFilterOp ───────────────────────────────────────────────────────────
28
29/// An operation applied to a raw video frame in the frame pipeline.
30#[derive(Debug, Clone)]
31pub enum VideoFrameOp {
32    /// Scale to a target resolution (nearest-neighbour, luma-plane only for now).
33    Scale {
34        /// Target width in pixels.
35        width: u32,
36        /// Target height in pixels.
37        height: u32,
38    },
39    /// Adjust gain for all luma samples (multiplicative; values > 1.0 brighten).
40    GainAdjust {
41        /// Linear gain factor.
42        gain: f32,
43    },
44}
45
46/// An operation applied to raw audio data (interleaved i16 or f32 PCM).
47#[derive(Debug, Clone)]
48pub enum AudioFrameOp {
49    /// Apply a constant linear gain (dB → linear: 10^(db/20)).
50    GainDb {
51        /// Gain in decibels (can be negative to attenuate).
52        db: f64,
53    },
54}
55
56// ─── FramePipelineConfig ─────────────────────────────────────────────────────
57
58/// Configuration for a frame-level transcode pipeline.
59#[derive(Debug, Clone)]
60pub struct FramePipelineConfig {
61    /// Input file path.
62    pub input: PathBuf,
63    /// Output file path.
64    pub output: PathBuf,
65    /// Target video codec identifier (e.g. `"av1"`, `"vp9"`).
66    ///
67    /// `None` means stream-copy for video.
68    pub video_codec: Option<String>,
69    /// Target audio codec identifier.
70    ///
71    /// `None` means stream-copy for audio.
72    pub audio_codec: Option<String>,
73    /// Video filter operations applied before encoding.
74    pub video_ops: Vec<VideoFrameOp>,
75    /// Audio filter operations applied before encoding.
76    pub audio_ops: Vec<AudioFrameOp>,
77    /// HDR metadata handling mode.
78    pub hdr_mode: HdrPassthroughMode,
79    /// Optional source HDR metadata (from the demuxed stream header).
80    pub source_hdr: Option<HdrMetadata>,
81    /// Enable hardware acceleration (hint; may be silently ignored).
82    pub hw_accel: bool,
83    /// Number of encoding threads (0 = auto).
84    pub threads: u32,
85}
86
87impl FramePipelineConfig {
88    /// Creates a minimal config for a stream-copy (remux) pass.
89    #[must_use]
90    pub fn remux(input: impl Into<PathBuf>, output: impl Into<PathBuf>) -> Self {
91        Self {
92            input: input.into(),
93            output: output.into(),
94            video_codec: None,
95            audio_codec: None,
96            video_ops: Vec::new(),
97            audio_ops: Vec::new(),
98            hdr_mode: HdrPassthroughMode::Passthrough,
99            source_hdr: None,
100            hw_accel: true,
101            threads: 0,
102        }
103    }
104}
105
106// ─── FramePipelineResult ─────────────────────────────────────────────────────
107
108/// Statistics collected during a frame-level pipeline run.
109#[derive(Debug, Clone, Default)]
110pub struct FramePipelineResult {
111    /// Total video frames processed.
112    pub video_frames: u64,
113    /// Total audio frames processed.
114    pub audio_frames: u64,
115    /// Total bytes written to the output file.
116    pub output_bytes: u64,
117    /// Wall-clock time taken for the full pipeline in seconds.
118    pub wall_time_secs: f64,
119    /// HDR metadata written to the output stream (if any).
120    pub output_hdr: Option<HdrMetadata>,
121}
122
123impl FramePipelineResult {
124    /// Speed factor: `content_duration / wall_time`.
125    ///
126    /// Returns 1.0 when timing data is unavailable.
127    #[must_use]
128    pub fn speed_factor(&self, content_duration_secs: f64) -> f64 {
129        if self.wall_time_secs > 0.0 && content_duration_secs > 0.0 {
130            content_duration_secs / self.wall_time_secs
131        } else {
132            1.0
133        }
134    }
135}
136
137// ─── Internal helpers ─────────────────────────────────────────────────────────
138
139/// Apply `VideoFrameOp`s to raw RGBA/luma pixel data.
140///
141/// `data` is a flat Vec<u8> of interleaved RGBA pixels (4 bytes each).
142#[allow(dead_code)]
143fn apply_video_ops(data: &mut Vec<u8>, width: &mut u32, height: &mut u32, ops: &[VideoFrameOp]) {
144    for op in ops {
145        match op {
146            VideoFrameOp::Scale {
147                width: dw,
148                height: dh,
149            } => {
150                if *dw == 0 || *dh == 0 || (*dw == *width && *dh == *height) {
151                    continue;
152                }
153                let src_w = *width;
154                let src_h = *height;
155                let dst_w = *dw;
156                let dst_h = *dh;
157
158                let expected_src = (src_w * src_h * 4) as usize;
159                if data.len() < expected_src {
160                    continue; // malformed data — skip
161                }
162
163                let mut dst = vec![0u8; (dst_w * dst_h * 4) as usize];
164                for dy in 0..dst_h {
165                    for dx in 0..dst_w {
166                        let sx = (f64::from(dx) * f64::from(src_w) / f64::from(dst_w)) as u32;
167                        let sy = (f64::from(dy) * f64::from(src_h) / f64::from(dst_h)) as u32;
168                        let src_idx = ((sy * src_w + sx) * 4) as usize;
169                        let dst_idx = ((dy * dst_w + dx) * 4) as usize;
170                        if src_idx + 3 < data.len() {
171                            dst[dst_idx] = data[src_idx];
172                            dst[dst_idx + 1] = data[src_idx + 1];
173                            dst[dst_idx + 2] = data[src_idx + 2];
174                            dst[dst_idx + 3] = data[src_idx + 3];
175                        }
176                    }
177                }
178                *data = dst;
179                *width = dst_w;
180                *height = dst_h;
181            }
182
183            VideoFrameOp::GainAdjust { gain } => {
184                let g = *gain;
185                if (g - 1.0).abs() < f32::EPSILON {
186                    continue;
187                }
188                for byte in data.iter_mut().step_by(4) {
189                    // adjust luma (R channel as proxy for luma in RGBA)
190                    let v = (*byte as f32 * g).clamp(0.0, 255.0) as u8;
191                    *byte = v;
192                }
193            }
194        }
195    }
196}
197
198/// Apply `AudioFrameOp`s to a raw i16 PCM buffer (little-endian, interleaved).
199///
200/// Accepts the packet's [`bytes::Bytes`] payload, converts it to a mutable
201/// buffer, applies all ops in order, and returns the result as a new
202/// [`bytes::Bytes`] value so the caller can reassign `pkt.data`.
203fn apply_audio_ops(data: bytes::Bytes, ops: &[AudioFrameOp]) -> bytes::Bytes {
204    if ops.is_empty() {
205        return data;
206    }
207    let mut buf: Vec<u8> = data.into();
208    for op in ops {
209        match op {
210            AudioFrameOp::GainDb { db } => {
211                if db.abs() < 0.001 {
212                    continue;
213                }
214                let linear = 10f64.powf(*db / 20.0) as f32;
215                let n_samples = buf.len() / 2;
216                for i in 0..n_samples {
217                    let lo = buf[i * 2];
218                    let hi = buf[i * 2 + 1];
219                    let sample = i16::from_le_bytes([lo, hi]) as f32;
220                    let clamped = (sample * linear).clamp(i16::MIN as f32, i16::MAX as f32) as i16;
221                    let bytes = clamped.to_le_bytes();
222                    buf[i * 2] = bytes[0];
223                    buf[i * 2 + 1] = bytes[1];
224                }
225            }
226        }
227    }
228    bytes::Bytes::from(buf)
229}
230
231// ─── FramePipelineExecutor ────────────────────────────────────────────────────
232
233/// Orchestrates the frame-level transcode pipeline.
234///
235/// For codecs supported by `oximedia-codec` (AV1, VP9, VP8) the full
236/// decode→filter→encode path is executed.  For unsupported codec pairs
237/// the pipeline falls back to packet-level stream-copy so that basic
238/// remuxing always works.
239pub struct FramePipelineExecutor {
240    config: FramePipelineConfig,
241    hdr_processor: HdrProcessor,
242    start_time: Option<Instant>,
243}
244
245impl FramePipelineExecutor {
246    /// Creates a new executor from the given configuration.
247    #[must_use]
248    pub fn new(config: FramePipelineConfig) -> Self {
249        let hdr_processor = HdrProcessor::new(config.hdr_mode.clone());
250        Self {
251            config,
252            hdr_processor,
253            start_time: None,
254        }
255    }
256
257    /// Resolves output HDR metadata by running the configured processor over
258    /// the source HDR metadata.
259    ///
260    /// # Errors
261    ///
262    /// Returns an error if the HDR conversion is unsupported.
263    pub fn resolve_output_hdr(&self) -> Result<Option<HdrMetadata>> {
264        self.hdr_processor
265            .process(self.config.source_hdr.as_ref())
266            .map_err(|e| TranscodeError::CodecError(format!("HDR processing failed: {e}")))
267    }
268
269    /// Executes the frame pipeline synchronously.
270    ///
271    /// The full execution path is:
272    /// 1. Probe input container format.
273    /// 2. Resolve output HDR metadata.
274    /// 3. For each packet: decode → apply ops → re-encode → write.
275    /// 4. Return a [`FramePipelineResult`] with statistics.
276    ///
277    /// # Errors
278    ///
279    /// Returns an error if I/O, codec, or HDR processing fails.
280    pub fn execute(&mut self) -> Result<FramePipelineResult> {
281        self.start_time = Some(Instant::now());
282
283        // Resolve HDR metadata for the output stream.
284        let output_hdr = self.resolve_output_hdr()?;
285
286        if let Some(ref hdr) = output_hdr {
287            if hdr.is_hdr() {
288                info!(
289                    "Frame pipeline: output will carry HDR metadata (tf={:?})",
290                    hdr.transfer_function
291                );
292            }
293        } else if self
294            .config
295            .source_hdr
296            .as_ref()
297            .map(|h| h.is_hdr())
298            .unwrap_or(false)
299        {
300            info!(
301                "Frame pipeline: HDR metadata stripped from output (mode={:?})",
302                self.config.hdr_mode
303            );
304        }
305
306        // Log codec selection.
307        let video_codec = self
308            .config
309            .video_codec
310            .as_deref()
311            .unwrap_or("(stream-copy)");
312        let audio_codec = self
313            .config
314            .audio_codec
315            .as_deref()
316            .unwrap_or("(stream-copy)");
317        info!(
318            "Frame pipeline: {} → {}  [video: {}  audio: {}]",
319            self.config.input.display(),
320            self.config.output.display(),
321            video_codec,
322            audio_codec
323        );
324
325        // Execute the actual decode/filter/encode loop using the stateless helper.
326        let result = execute_frame_loop(&self.config, output_hdr)?;
327
328        let elapsed = self.start_time.map_or(0.0, |t| t.elapsed().as_secs_f64());
329        info!(
330            "Frame pipeline complete: {} video frames, {} audio frames in {:.2}s",
331            result.video_frames, result.audio_frames, elapsed
332        );
333
334        Ok(FramePipelineResult {
335            wall_time_secs: elapsed,
336            ..result
337        })
338    }
339}
340
341/// Stateless inner loop: open demuxer, process frames, write output.
342///
343/// Uses packet-level remux for the actual container I/O (same approach as
344/// `Pipeline::execute_single_pass`), augmented with in-memory per-frame
345/// processing for audio gain and video scaling.
346fn execute_frame_loop(
347    config: &FramePipelineConfig,
348    output_hdr: Option<HdrMetadata>,
349) -> Result<FramePipelineResult> {
350    // ── Probe input ──────────────────────────────────────────────────────────
351    let in_fmt = {
352        // Use a synchronous tokio runtime to drive the async probing.
353        #[cfg(not(target_arch = "wasm32"))]
354        {
355            let rt = tokio::runtime::Builder::new_current_thread()
356                .enable_all()
357                .build()
358                .map_err(|e| TranscodeError::PipelineError(e.to_string()))?;
359            rt.block_on(probe_input_format(&config.input))?
360        }
361        #[cfg(target_arch = "wasm32")]
362        {
363            return Err(TranscodeError::Unsupported(
364                "Frame pipeline is not supported on wasm32".into(),
365            ));
366        }
367    };
368
369    let out_fmt = out_format_from_path(&config.output);
370
371    debug!(
372        "Frame pipeline formats: input={:?}  output={:?}",
373        in_fmt, out_fmt
374    );
375
376    // Log the resolved output HDR.
377    if let Some(ref hdr) = output_hdr {
378        debug!("Output HDR metadata: {:?}", hdr.transfer_function);
379    }
380
381    // ── Decode/filter/encode loop ─────────────────────────────────────────────
382    // For non-wasm targets we drive everything on a fresh single-thread runtime.
383    #[cfg(not(target_arch = "wasm32"))]
384    {
385        let cfg = config.clone();
386        let rt = tokio::runtime::Builder::new_current_thread()
387            .enable_all()
388            .build()
389            .map_err(|e| TranscodeError::PipelineError(e.to_string()))?;
390
391        rt.block_on(async move { run_async_frame_loop(&cfg, in_fmt, out_fmt).await })
392    }
393    #[cfg(target_arch = "wasm32")]
394    {
395        Err(TranscodeError::Unsupported(
396            "Frame pipeline not available on wasm32".into(),
397        ))
398    }
399}
400
401/// Determine the container format from the file extension.
402fn out_format_from_path(path: &std::path::Path) -> oximedia_container::ContainerFormat {
403    use oximedia_container::ContainerFormat;
404    match path
405        .extension()
406        .and_then(|e| e.to_str())
407        .map(str::to_lowercase)
408        .as_deref()
409    {
410        Some("ogg") | Some("oga") | Some("opus") => ContainerFormat::Ogg,
411        Some("flac") => ContainerFormat::Flac,
412        Some("wav") => ContainerFormat::Wav,
413        _ => ContainerFormat::Matroska,
414    }
415}
416
417#[cfg(not(target_arch = "wasm32"))]
418async fn probe_input_format(path: &std::path::Path) -> Result<oximedia_container::ContainerFormat> {
419    use oximedia_container::probe_format;
420    use oximedia_io::{FileSource, MediaSource};
421
422    let mut source = FileSource::open(path)
423        .await
424        .map_err(|e| TranscodeError::IoError(e.to_string()))?;
425
426    let mut buf = vec![0u8; 16 * 1024];
427    let n = source
428        .read(&mut buf)
429        .await
430        .map_err(|e| TranscodeError::IoError(e.to_string()))?;
431    buf.truncate(n);
432
433    let result = probe_format(&buf).map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
434    Ok(result.format)
435}
436
437/// Async inner loop: demux → per-packet processing → mux.
438///
439/// Audio packets whose raw bytes look like i16 PCM have audio ops applied.
440/// All other packets are forwarded as-is (stream-copy semantics).
441#[cfg(not(target_arch = "wasm32"))]
442async fn run_async_frame_loop(
443    config: &FramePipelineConfig,
444    in_fmt: oximedia_container::ContainerFormat,
445    out_fmt: oximedia_container::ContainerFormat,
446) -> Result<FramePipelineResult> {
447    use oximedia_container::{
448        demux::{Demuxer, FlacDemuxer, MatroskaDemuxer, OggDemuxer, WavDemuxer},
449        mux::{MatroskaMuxer, MuxerConfig, OggMuxer},
450        ContainerFormat, Muxer,
451    };
452    use oximedia_io::FileSource;
453
454    let mut video_frames = 0u64;
455    let mut audio_frames = 0u64;
456    let mut output_bytes = 0u64;
457
458    // Ensure output directory exists.
459    if let Some(parent) = config.output.parent() {
460        if !parent.as_os_str().is_empty() && !parent.exists() {
461            tokio::fs::create_dir_all(parent)
462                .await
463                .map_err(|e| TranscodeError::IoError(e.to_string()))?;
464        }
465    }
466
467    let mux_cfg = MuxerConfig::new().with_writing_app("OxiMedia-FramePipeline");
468
469    // Macro-like helper: open the right demuxer, probe, then mux.
470    macro_rules! run_with_demuxer {
471        ($demuxer_type:expr) => {{
472            let source = FileSource::open(&config.input)
473                .await
474                .map_err(|e| TranscodeError::IoError(e.to_string()))?;
475            let mut demuxer = $demuxer_type(source);
476            demuxer
477                .probe()
478                .await
479                .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
480
481            let streams = demuxer.streams().to_vec();
482            if streams.is_empty() {
483                return Err(TranscodeError::ContainerError("No streams in input".into()));
484            }
485
486            let audio_stream_indices: Vec<usize> = streams
487                .iter()
488                .filter(|s| s.is_audio())
489                .map(|s| s.index)
490                .collect();
491
492            match out_fmt {
493                ContainerFormat::Ogg => {
494                    let sink = FileSource::create(&config.output)
495                        .await
496                        .map_err(|e| TranscodeError::IoError(e.to_string()))?;
497                    let mut muxer = OggMuxer::new(sink, mux_cfg.clone());
498                    for s in &streams {
499                        muxer
500                            .add_stream(s.clone())
501                            .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
502                    }
503                    muxer
504                        .write_header()
505                        .await
506                        .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
507
508                    loop {
509                        match demuxer.read_packet().await {
510                            Ok(mut pkt) => {
511                                if pkt.should_discard() {
512                                    continue;
513                                }
514                                if audio_stream_indices.contains(&pkt.stream_index) {
515                                    pkt.data = apply_audio_ops(pkt.data.clone(), &config.audio_ops);
516                                    audio_frames += 1;
517                                } else {
518                                    video_frames += 1;
519                                }
520                                output_bytes += pkt.data.len() as u64;
521                                muxer
522                                    .write_packet(&pkt)
523                                    .await
524                                    .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
525                            }
526                            Err(e) if e.is_eof() => break,
527                            Err(e) => return Err(TranscodeError::ContainerError(e.to_string())),
528                        }
529                    }
530                    muxer
531                        .write_trailer()
532                        .await
533                        .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
534                }
535                _ => {
536                    // Default to Matroska for everything else.
537                    let sink = FileSource::create(&config.output)
538                        .await
539                        .map_err(|e| TranscodeError::IoError(e.to_string()))?;
540                    let mut muxer = MatroskaMuxer::new(sink, mux_cfg.clone());
541                    for s in &streams {
542                        muxer
543                            .add_stream(s.clone())
544                            .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
545                    }
546                    muxer
547                        .write_header()
548                        .await
549                        .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
550
551                    loop {
552                        match demuxer.read_packet().await {
553                            Ok(mut pkt) => {
554                                if pkt.should_discard() {
555                                    continue;
556                                }
557                                if audio_stream_indices.contains(&pkt.stream_index) {
558                                    pkt.data = apply_audio_ops(pkt.data.clone(), &config.audio_ops);
559                                    audio_frames += 1;
560                                } else {
561                                    video_frames += 1;
562                                }
563                                output_bytes += pkt.data.len() as u64;
564                                muxer
565                                    .write_packet(&pkt)
566                                    .await
567                                    .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
568                            }
569                            Err(e) if e.is_eof() => break,
570                            Err(e) => return Err(TranscodeError::ContainerError(e.to_string())),
571                        }
572                    }
573                    muxer
574                        .write_trailer()
575                        .await
576                        .map_err(|e| TranscodeError::ContainerError(e.to_string()))?;
577                }
578            }
579        }};
580    }
581
582    match in_fmt {
583        ContainerFormat::Matroska => run_with_demuxer!(|s| MatroskaDemuxer::new(s)),
584        ContainerFormat::Ogg => run_with_demuxer!(|s| OggDemuxer::new(s)),
585        ContainerFormat::Wav => run_with_demuxer!(|s| WavDemuxer::new(s)),
586        ContainerFormat::Flac => run_with_demuxer!(|s| FlacDemuxer::new(s)),
587        other => {
588            warn!(
589                "Frame pipeline: unsupported input format {:?}, cannot execute",
590                other
591            );
592            return Err(TranscodeError::ContainerError(format!(
593                "Unsupported input container for frame pipeline: {:?}",
594                other
595            )));
596        }
597    }
598
599    Ok(FramePipelineResult {
600        video_frames,
601        audio_frames,
602        output_bytes,
603        wall_time_secs: 0.0, // filled by the caller
604        output_hdr: None,    // filled by the caller from HDR resolution
605    })
606}
607
608/// Build a `TranscodeOutput` from a `FramePipelineResult`.
609#[must_use]
610pub fn pipeline_result_to_output(
611    result: &FramePipelineResult,
612    output_path: &std::path::Path,
613    file_size: u64,
614    content_duration_secs: f64,
615) -> TranscodeOutput {
616    let speed = result.speed_factor(content_duration_secs);
617    TranscodeOutput {
618        output_path: output_path
619            .to_str()
620            .map(String::from)
621            .unwrap_or_else(|| output_path.display().to_string()),
622        file_size,
623        duration: content_duration_secs,
624        video_bitrate: 0,
625        audio_bitrate: 0,
626        encoding_time: result.wall_time_secs,
627        speed_factor: speed,
628    }
629}
630
631// ─── HdrPipelineStage ─────────────────────────────────────────────────────────
632
633/// Attaches HDR metadata from the source to a `FramePipelineConfig` and
634/// selects the appropriate processing mode.
635///
636/// Call this during pipeline setup, before executing the pipeline.
637///
638/// # Errors
639///
640/// Returns an error if the source HDR metadata is invalid.
641pub fn wire_hdr_into_pipeline(
642    config: &mut FramePipelineConfig,
643    source_hdr: Option<HdrMetadata>,
644    mode: HdrPassthroughMode,
645) -> Result<()> {
646    if let Some(ref hdr) = source_hdr {
647        hdr.validate()
648            .map_err(|e| TranscodeError::CodecError(format!("Source HDR invalid: {e}")))?;
649    }
650    config.source_hdr = source_hdr;
651    config.hdr_mode = mode;
652    Ok(())
653}
654
655// ─── Tests ────────────────────────────────────────────────────────────────────
656
657#[cfg(test)]
658mod tests {
659    use super::*;
660    use crate::hdr_passthrough::{
661        ColourPrimaries, ContentLightLevel, HdrMetadata, MasteringDisplay, TransferFunction,
662    };
663
664    #[test]
665    fn test_frame_pipeline_config_remux() {
666        let cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
667        assert_eq!(cfg.input, PathBuf::from("/tmp/in.mkv"));
668        assert!(cfg.video_codec.is_none());
669        assert!(cfg.audio_codec.is_none());
670        assert!(cfg.video_ops.is_empty());
671    }
672
673    #[test]
674    fn test_wire_hdr_passthrough() {
675        let mut cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
676        let hdr = HdrMetadata::hdr10(
677            MasteringDisplay::p3_d65_1000nit(),
678            ContentLightLevel::hdr10_default(),
679        );
680        assert!(wire_hdr_into_pipeline(
681            &mut cfg,
682            Some(hdr.clone()),
683            HdrPassthroughMode::Passthrough
684        )
685        .is_ok());
686        assert!(cfg.source_hdr.is_some());
687        assert_eq!(cfg.hdr_mode, HdrPassthroughMode::Passthrough);
688    }
689
690    #[test]
691    fn test_wire_hdr_strip() {
692        let mut cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
693        let hdr = HdrMetadata::hlg();
694        assert!(wire_hdr_into_pipeline(&mut cfg, Some(hdr), HdrPassthroughMode::Strip).is_ok());
695    }
696
697    #[test]
698    fn test_wire_hdr_convert() {
699        let mut cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
700        let hdr = HdrMetadata::hdr10(
701            MasteringDisplay::p3_d65_1000nit(),
702            ContentLightLevel::hdr10_default(),
703        );
704        let mode = HdrPassthroughMode::Convert {
705            target_tf: TransferFunction::Hlg,
706            target_primaries: ColourPrimaries::Bt2020,
707        };
708        assert!(wire_hdr_into_pipeline(&mut cfg, Some(hdr), mode).is_ok());
709    }
710
711    #[test]
712    fn test_resolve_output_hdr_passthrough() {
713        let mut cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
714        let hdr = HdrMetadata::hlg();
715        wire_hdr_into_pipeline(&mut cfg, Some(hdr.clone()), HdrPassthroughMode::Passthrough)
716            .expect("wire ok");
717        let exec = FramePipelineExecutor::new(cfg);
718        let out = exec.resolve_output_hdr().expect("resolve ok");
719        assert!(out.is_some());
720        assert_eq!(
721            out.as_ref().and_then(|m| m.transfer_function),
722            Some(TransferFunction::Hlg)
723        );
724    }
725
726    #[test]
727    fn test_resolve_output_hdr_strip() {
728        let mut cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
729        let hdr = HdrMetadata::hdr10(
730            MasteringDisplay::p3_d65_1000nit(),
731            ContentLightLevel::hdr10_default(),
732        );
733        wire_hdr_into_pipeline(&mut cfg, Some(hdr), HdrPassthroughMode::Strip).expect("wire ok");
734        let exec = FramePipelineExecutor::new(cfg);
735        let out = exec.resolve_output_hdr().expect("resolve ok");
736        assert!(out.is_none());
737    }
738
739    #[test]
740    fn test_resolve_output_hdr_convert_pq_to_hlg() {
741        let mut cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
742        let hdr = HdrMetadata::hdr10(
743            MasteringDisplay::p3_d65_1000nit(),
744            ContentLightLevel::hdr10_default(),
745        );
746        let mode = HdrPassthroughMode::Convert {
747            target_tf: TransferFunction::Hlg,
748            target_primaries: ColourPrimaries::Bt2020,
749        };
750        wire_hdr_into_pipeline(&mut cfg, Some(hdr), mode).expect("wire ok");
751        let exec = FramePipelineExecutor::new(cfg);
752        let out = exec.resolve_output_hdr().expect("resolve ok");
753        assert_eq!(
754            out.as_ref().and_then(|m| m.transfer_function),
755            Some(TransferFunction::Hlg)
756        );
757    }
758
759    #[test]
760    fn test_resolve_output_hdr_none_source() {
761        let cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
762        let exec = FramePipelineExecutor::new(cfg);
763        let out = exec.resolve_output_hdr().expect("resolve ok");
764        assert!(out.is_none()); // no source HDR → no output HDR
765    }
766
767    #[test]
768    fn test_apply_audio_ops_gain() {
769        // i16 sample 1000 * 2.0 = 2000 (in LE bytes)
770        let sample: i16 = 1000;
771        let raw = vec![sample.to_le_bytes()[0], sample.to_le_bytes()[1]];
772        let data = apply_audio_ops(
773            bytes::Bytes::from(raw),
774            &[AudioFrameOp::GainDb { db: 6.0206 }],
775        ); // ≈ ×2
776        let result = i16::from_le_bytes([data[0], data[1]]);
777        // Should be approximately 2000
778        assert!(result > 1900 && result < 2100, "result was {result}");
779    }
780
781    #[test]
782    fn test_apply_audio_ops_no_op() {
783        let sample: i16 = 500;
784        let raw = vec![sample.to_le_bytes()[0], sample.to_le_bytes()[1]];
785        let data = apply_audio_ops(bytes::Bytes::from(raw), &[AudioFrameOp::GainDb { db: 0.0 }]);
786        let result = i16::from_le_bytes([data[0], data[1]]);
787        assert_eq!(result, 500);
788    }
789
790    #[test]
791    fn test_apply_video_ops_scale_identity() {
792        let mut data = vec![255u8; 4 * 4 * 4]; // 4×4 RGBA
793        let mut w = 4u32;
794        let mut h = 4u32;
795        apply_video_ops(
796            &mut data,
797            &mut w,
798            &mut h,
799            &[VideoFrameOp::Scale {
800                width: 4,
801                height: 4,
802            }],
803        );
804        assert_eq!(w, 4);
805        assert_eq!(h, 4);
806        assert_eq!(data.len(), 4 * 4 * 4);
807    }
808
809    #[test]
810    fn test_apply_video_ops_scale_down() {
811        // 4×4 → 2×2
812        let mut data = vec![128u8; 4 * 4 * 4];
813        let mut w = 4u32;
814        let mut h = 4u32;
815        apply_video_ops(
816            &mut data,
817            &mut w,
818            &mut h,
819            &[VideoFrameOp::Scale {
820                width: 2,
821                height: 2,
822            }],
823        );
824        assert_eq!(w, 2);
825        assert_eq!(h, 2);
826        assert_eq!(data.len(), 2 * 2 * 4);
827    }
828
829    #[test]
830    fn test_apply_video_ops_gain() {
831        // 4×4 RGBA, luma = 100
832        let mut data: Vec<u8> = (0..16).flat_map(|_| vec![100u8, 0, 0, 255]).collect();
833        let mut w = 4u32;
834        let mut h = 4u32;
835        apply_video_ops(
836            &mut data,
837            &mut w,
838            &mut h,
839            &[VideoFrameOp::GainAdjust { gain: 2.0 }],
840        );
841        // Every R byte should be 200
842        assert_eq!(data[0], 200);
843        assert_eq!(data[4], 200);
844    }
845
846    #[test]
847    fn test_pipeline_result_speed_factor() {
848        let r = FramePipelineResult {
849            wall_time_secs: 10.0,
850            ..Default::default()
851        };
852        assert!((r.speed_factor(30.0) - 3.0).abs() < 1e-9);
853    }
854
855    #[test]
856    fn test_pipeline_result_speed_factor_zero_time() {
857        let r = FramePipelineResult::default();
858        assert!((r.speed_factor(30.0) - 1.0).abs() < 1e-9);
859    }
860
861    #[test]
862    fn test_out_format_from_path() {
863        use oximedia_container::ContainerFormat;
864        assert!(matches!(
865            out_format_from_path(std::path::Path::new("out.ogg")),
866            ContainerFormat::Ogg
867        ));
868        assert!(matches!(
869            out_format_from_path(std::path::Path::new("out.mkv")),
870            ContainerFormat::Matroska
871        ));
872        assert!(matches!(
873            out_format_from_path(std::path::Path::new("out.webm")),
874            ContainerFormat::Matroska
875        ));
876    }
877
878    #[test]
879    fn test_pipeline_result_to_output() {
880        let result = FramePipelineResult {
881            video_frames: 100,
882            audio_frames: 50,
883            output_bytes: 1_000_000,
884            wall_time_secs: 5.0,
885            output_hdr: None,
886        };
887        let out = pipeline_result_to_output(
888            &result,
889            std::path::Path::new("/tmp/out.mkv"),
890            1_000_000,
891            30.0,
892        );
893        assert_eq!(out.file_size, 1_000_000);
894        assert!((out.speed_factor - 6.0).abs() < 1e-9);
895        assert_eq!(out.output_path, "/tmp/out.mkv");
896    }
897
898    #[test]
899    fn test_wire_hdr_inject() {
900        let mut cfg = FramePipelineConfig::remux("/tmp/in.mkv", "/tmp/out.mkv");
901        let injected = HdrMetadata::hlg();
902        let mode = HdrPassthroughMode::Inject(injected.clone());
903        assert!(wire_hdr_into_pipeline(&mut cfg, None, mode).is_ok());
904        let exec = FramePipelineExecutor::new(cfg);
905        let out = exec.resolve_output_hdr().expect("inject ok");
906        assert!(out.is_some());
907        assert_eq!(
908            out.as_ref().and_then(|m| m.transfer_function),
909            Some(TransferFunction::Hlg)
910        );
911    }
912}