Skip to main content

oximedia_transcode/
pipeline_context.rs

1//! Frame-level transcode context: decoder/filter-graph/encoder pipeline.
2//!
3//! This module provides the `TranscodeContext` that wires together a
4//! `FrameDecoder`, a `FilterGraph`, and a `FrameEncoder` into a single
5//! execute-loop.  The design is codec-agnostic: callers supply concrete
6//! implementations of the `FrameDecoder` and `FrameEncoder` traits.
7//!
8//! # Example
9//!
10//! ```rust,ignore
11//! use oximedia_transcode::pipeline_context::{
12//!     TranscodeContext, FilterGraph, Frame,
13//! };
14//!
15//! // Supply your own decoder/encoder implementations.
16//! let ctx = TranscodeContext::new(decoder, FilterGraph::new(), encoder);
17//! let stats = ctx.execute()?;
18//! println!("{} frames in, {} frames out", stats.pass.input_frames, stats.pass.output_frames);
19//! ```
20
21#![allow(clippy::module_name_repetitions)]
22
23use std::time::Instant;
24
25use crate::hdr_passthrough::{
26    ColourPrimaries, HdrMetadata, HdrPassthroughMode, HdrProcessor, TransferFunction,
27};
28use crate::{Result, TranscodeError};
29
30// ─── Frame ────────────────────────────────────────────────────────────────────
31
32/// A raw decoded frame flowing through the transcode pipeline.
33///
34/// Frames carry either video (planar YUV pixel data) or audio (interleaved
35/// i16 / f32 PCM) depending on the `is_audio` flag.
36#[derive(Debug, Clone)]
37pub struct Frame {
38    /// Raw pixel (YUV) or sample (PCM) data.
39    pub data: Vec<u8>,
40    /// Presentation timestamp in milliseconds.
41    pub pts_ms: i64,
42    /// `true` for audio frames, `false` for video frames.
43    pub is_audio: bool,
44    /// Video frame width in pixels (0 for audio frames).
45    pub width: u32,
46    /// Video frame height in pixels (0 for audio frames).
47    pub height: u32,
48    /// HDR metadata attached to this video frame, if any.
49    pub hdr_meta: Option<HdrMetadata>,
50}
51
52impl Frame {
53    /// Creates a new video frame.
54    #[must_use]
55    pub fn video(data: Vec<u8>, pts_ms: i64, width: u32, height: u32) -> Self {
56        Self {
57            data,
58            pts_ms,
59            is_audio: false,
60            width,
61            height,
62            hdr_meta: None,
63        }
64    }
65
66    /// Creates a new audio frame.
67    #[must_use]
68    pub fn audio(data: Vec<u8>, pts_ms: i64) -> Self {
69        Self {
70            data,
71            pts_ms,
72            is_audio: true,
73            width: 0,
74            height: 0,
75            hdr_meta: None,
76        }
77    }
78
79    /// Attaches HDR metadata to a video frame (builder-style).
80    #[must_use]
81    pub fn with_hdr(mut self, meta: HdrMetadata) -> Self {
82        self.hdr_meta = Some(meta);
83        self
84    }
85}
86
87// ─── Traits ───────────────────────────────────────────────────────────────────
88
89/// A frame-level decoder that produces raw `Frame` values.
90///
91/// Implementations wrap a container demuxer + codec decoder.
92/// When the input is exhausted `decode_next` returns `None`
93/// and `eof` returns `true`.
94pub trait FrameDecoder: Send {
95    /// Decode the next frame from the input stream.
96    ///
97    /// Returns `None` when no more frames are available (end-of-stream).
98    fn decode_next(&mut self) -> Option<Frame>;
99
100    /// Returns `true` when the input stream is fully consumed.
101    fn eof(&self) -> bool;
102}
103
104/// A frame-level encoder that converts raw `Frame` values to encoded bytes.
105///
106/// Implementations wrap a codec encoder and optional container muxer.
107pub trait FrameEncoder: Send {
108    /// Encode a single decoded frame.
109    ///
110    /// Returns the encoded byte payload (may be empty for encoders that
111    /// buffer internally).
112    ///
113    /// # Errors
114    ///
115    /// Returns an error if encoding fails (e.g. invalid frame dimensions,
116    /// codec state error).
117    fn encode_frame(&mut self, frame: &Frame) -> Result<Vec<u8>>;
118
119    /// Flush any internally buffered frames.
120    ///
121    /// Must be called at end-of-stream.  Returns any remaining encoded bytes.
122    ///
123    /// # Errors
124    ///
125    /// Returns an error if the flush operation fails.
126    fn flush(&mut self) -> Result<Vec<u8>>;
127}
128
129// ─── FilterOp (private) ───────────────────────────────────────────────────────
130
131/// A single filter operation applied to a frame in the `FilterGraph`.
132#[derive(Debug, Clone)]
133enum FilterOp {
134    /// Scale a video frame to the given resolution.
135    VideoScale { width: u32, height: u32 },
136    /// Apply a constant linear gain (in dB) to audio samples.
137    AudioGainDb(f64),
138    /// Apply HDR metadata passthrough / conversion.
139    HdrPassthrough(HdrPassthroughMode),
140}
141
142// ─── FilterGraph ──────────────────────────────────────────────────────────────
143
144/// A composable filter graph applied to frames between decode and encode.
145///
146/// Operations are applied in the order they were added.  Video ops are
147/// skipped for audio frames, and vice-versa.
148///
149/// The baseline implementation is a pass-through: an empty `FilterGraph`
150/// forwards every frame unchanged.
151#[derive(Debug, Clone, Default)]
152pub struct FilterGraph {
153    ops: Vec<FilterOp>,
154}
155
156impl FilterGraph {
157    /// Creates a new, empty (pass-through) filter graph.
158    #[must_use]
159    pub fn new() -> Self {
160        Self { ops: Vec::new() }
161    }
162
163    /// Adds a video scale operation (nearest-neighbour).
164    #[must_use]
165    pub fn add_video_scale(mut self, width: u32, height: u32) -> Self {
166        self.ops.push(FilterOp::VideoScale { width, height });
167        self
168    }
169
170    /// Adds an audio gain operation (dB).
171    #[must_use]
172    pub fn add_audio_gain_db(mut self, db: f64) -> Self {
173        self.ops.push(FilterOp::AudioGainDb(db));
174        self
175    }
176
177    /// Adds an HDR passthrough / conversion operation.
178    #[must_use]
179    pub fn add_hdr_passthrough(mut self, mode: HdrPassthroughMode) -> Self {
180        self.ops.push(FilterOp::HdrPassthrough(mode));
181        self
182    }
183
184    /// Apply all filter operations to `frame`.
185    ///
186    /// Returns `Ok(Some(frame))` to pass the frame on, or `Ok(None)` to
187    /// drop it (future use — currently never drops).
188    ///
189    /// # Errors
190    ///
191    /// Returns an error if an HDR conversion is unsupported or a filter
192    /// operation encounters invalid data.
193    pub fn apply(&self, mut frame: Frame) -> Result<Option<Frame>> {
194        for op in &self.ops {
195            match op {
196                FilterOp::VideoScale { width, height } => {
197                    if !frame.is_audio {
198                        apply_video_scale(&mut frame, *width, *height);
199                    }
200                }
201                FilterOp::AudioGainDb(db) => {
202                    if frame.is_audio {
203                        apply_audio_gain_db(&mut frame, *db);
204                    }
205                }
206                FilterOp::HdrPassthrough(mode) => {
207                    if !frame.is_audio {
208                        let processor = HdrProcessor::new(mode.clone());
209                        let resolved = processor.process(frame.hdr_meta.as_ref()).map_err(|e| {
210                            TranscodeError::CodecError(format!("HDR filter failed: {e}"))
211                        })?;
212                        frame.hdr_meta = resolved;
213                    }
214                }
215            }
216        }
217        Ok(Some(frame))
218    }
219}
220
221// ─── Internal filter helpers ──────────────────────────────────────────────────
222
223/// Nearest-neighbour video scale on RGBA/planar data.
224///
225/// For YUV420 frames the data layout is: Y plane (W×H bytes) followed by
226/// U plane (W/2 × H/2 bytes) and V plane (W/2 × H/2 bytes).  For other
227/// layouts the function treats the data as a flat RGBA buffer (4 bytes/pixel).
228fn apply_video_scale(frame: &mut Frame, dst_w: u32, dst_h: u32) {
229    if dst_w == 0 || dst_h == 0 || (dst_w == frame.width && dst_h == frame.height) {
230        return;
231    }
232    let src_w = frame.width;
233    let src_h = frame.height;
234    if src_w == 0 || src_h == 0 {
235        return;
236    }
237
238    let y_size = (src_w * src_h) as usize;
239    let uv_size = y_size / 4;
240    let expected_yuv = y_size + uv_size * 2;
241
242    if frame.data.len() == expected_yuv {
243        // YUV420 planar
244        let dst_y_size = (dst_w * dst_h) as usize;
245        let dst_uv_size = dst_y_size / 4;
246        let mut out = vec![0u8; dst_y_size + dst_uv_size * 2];
247
248        // Scale Y plane
249        scale_plane(
250            &frame.data[..y_size],
251            src_w,
252            src_h,
253            &mut out[..dst_y_size],
254            dst_w,
255            dst_h,
256        );
257        // Scale U plane
258        let uv_src_w = src_w / 2;
259        let uv_src_h = src_h / 2;
260        let dst_uv_w = dst_w / 2;
261        let dst_uv_h = dst_h / 2;
262        scale_plane(
263            &frame.data[y_size..y_size + uv_size],
264            uv_src_w,
265            uv_src_h,
266            &mut out[dst_y_size..dst_y_size + dst_uv_size],
267            dst_uv_w,
268            dst_uv_h,
269        );
270        // Scale V plane
271        scale_plane(
272            &frame.data[y_size + uv_size..],
273            uv_src_w,
274            uv_src_h,
275            &mut out[dst_y_size + dst_uv_size..],
276            dst_uv_w,
277            dst_uv_h,
278        );
279
280        frame.data = out;
281        frame.width = dst_w;
282        frame.height = dst_h;
283    } else {
284        // Assume RGBA (4 bytes/pixel) or generic planar — scale the whole buffer.
285        let bytes_per_pixel = if frame.data.len() == (src_w * src_h * 4) as usize {
286            4usize
287        } else {
288            1usize
289        };
290
291        let dst_size = (dst_w * dst_h) as usize * bytes_per_pixel;
292        let mut out = vec![0u8; dst_size];
293
294        for dy in 0..dst_h {
295            for dx in 0..dst_w {
296                let sx = (f64::from(dx) * f64::from(src_w) / f64::from(dst_w)) as u32;
297                let sy = (f64::from(dy) * f64::from(src_h) / f64::from(dst_h)) as u32;
298                let src_off = ((sy * src_w + sx) as usize) * bytes_per_pixel;
299                let dst_off = ((dy * dst_w + dx) as usize) * bytes_per_pixel;
300                for b in 0..bytes_per_pixel {
301                    if src_off + b < frame.data.len() && dst_off + b < out.len() {
302                        out[dst_off + b] = frame.data[src_off + b];
303                    }
304                }
305            }
306        }
307
308        frame.data = out;
309        frame.width = dst_w;
310        frame.height = dst_h;
311    }
312}
313
314/// Nearest-neighbour scale of a single planar luma/chroma plane.
315fn scale_plane(src: &[u8], src_w: u32, src_h: u32, dst: &mut [u8], dst_w: u32, dst_h: u32) {
316    if src_w == 0 || src_h == 0 || dst_w == 0 || dst_h == 0 {
317        return;
318    }
319    for dy in 0..dst_h {
320        for dx in 0..dst_w {
321            let sx = (f64::from(dx) * f64::from(src_w) / f64::from(dst_w)) as u32;
322            let sy = (f64::from(dy) * f64::from(src_h) / f64::from(dst_h)) as u32;
323            let src_idx = (sy * src_w + sx) as usize;
324            let dst_idx = (dy * dst_w + dx) as usize;
325            if src_idx < src.len() && dst_idx < dst.len() {
326                dst[dst_idx] = src[src_idx];
327            }
328        }
329    }
330}
331
332/// Apply a dB gain to interleaved i16 PCM LE audio data in-place.
333fn apply_audio_gain_db(frame: &mut Frame, db: f64) {
334    if db.abs() < 0.001 {
335        return;
336    }
337    let linear = 10f64.powf(db / 20.0) as f32;
338    let n_samples = frame.data.len() / 2;
339    for i in 0..n_samples {
340        let lo = frame.data[i * 2];
341        let hi = frame.data[i * 2 + 1];
342        let sample = i16::from_le_bytes([lo, hi]) as f32;
343        let gained = (sample * linear).clamp(i16::MIN as f32, i16::MAX as f32) as i16;
344        let bytes = gained.to_le_bytes();
345        frame.data[i * 2] = bytes[0];
346        frame.data[i * 2 + 1] = bytes[1];
347    }
348}
349
350// ─── PassStats ────────────────────────────────────────────────────────────────
351
352/// Per-pass statistics from a `TranscodeContext::execute` call.
353#[derive(Debug, Clone, Default)]
354pub struct PassStats {
355    /// Total frames that entered the decode→filter→encode loop.
356    pub input_frames: u64,
357    /// Total frames that were written by the encoder (excluding frames
358    /// dropped by the filter graph).
359    pub output_frames: u64,
360    /// Total raw bytes consumed from decoded frames.
361    pub input_bytes: u64,
362    /// Total encoded bytes produced by the encoder.
363    pub output_bytes: u64,
364    /// Number of video frames processed.
365    pub video_frames: u64,
366    /// Number of audio frames processed.
367    pub audio_frames: u64,
368}
369
370// ─── TranscodeStats ───────────────────────────────────────────────────────────
371
372/// Statistics returned by `TranscodeContext::execute`.
373#[derive(Debug, Clone, Default)]
374pub struct TranscodeStats {
375    /// Per-frame counts and byte totals.
376    pub pass: PassStats,
377    /// Wall-clock duration of the execute call in seconds.
378    pub wall_time_secs: f64,
379}
380
381impl TranscodeStats {
382    /// Compute a speed factor (input_frames / wall_time_secs).
383    ///
384    /// Returns 0.0 when timing data is unavailable.
385    #[must_use]
386    pub fn speed_factor(&self) -> f64 {
387        if self.wall_time_secs > 0.0 && self.pass.input_frames > 0 {
388            self.pass.input_frames as f64 / self.wall_time_secs
389        } else {
390            0.0
391        }
392    }
393}
394
395// ─── HdrPassthroughConfig ─────────────────────────────────────────────────────
396
397/// High-level configuration for HDR metadata passthrough in the
398/// transcode pipeline.
399///
400/// This is a simplified overlay on top of `HdrPassthroughMode`: the three
401/// boolean fields map to common production requirements without requiring
402/// callers to construct the full enum.
403#[derive(Debug, Clone, Default)]
404pub struct HdrPassthroughConfig {
405    /// Enable HDR metadata passthrough.  When `false`, all HDR metadata
406    /// is stripped from the output.
407    pub enabled: bool,
408    /// When `true` and `enabled`, convert HDR10 (PQ/ST-2084) input to
409    /// HLG (ITU-R BT.2100).  The pixel-level tone-map must be handled
410    /// separately by a filter op; this flag only updates the stream-level
411    /// transfer-function descriptor.
412    pub convert_hdr10_to_hlg: bool,
413    /// When `true` and `enabled`, inject SMPTE ST 2086 mastering-display
414    /// and CTA-861.3 content-light-level SEI payloads into every output
415    /// packet.
416    pub inject_sei: bool,
417}
418
419impl HdrPassthroughConfig {
420    /// Create a simple pass-through config (no conversion, no SEI injection).
421    #[must_use]
422    pub fn passthrough() -> Self {
423        Self {
424            enabled: true,
425            convert_hdr10_to_hlg: false,
426            inject_sei: false,
427        }
428    }
429
430    /// Create a strip config (remove all HDR metadata).
431    #[must_use]
432    pub fn strip() -> Self {
433        Self {
434            enabled: false,
435            convert_hdr10_to_hlg: false,
436            inject_sei: false,
437        }
438    }
439
440    /// Resolve this config into an `HdrPassthroughMode` for use with
441    /// `HdrProcessor`.
442    #[must_use]
443    pub fn to_mode(&self) -> HdrPassthroughMode {
444        if !self.enabled {
445            return HdrPassthroughMode::Strip;
446        }
447        if self.convert_hdr10_to_hlg {
448            return HdrPassthroughMode::Convert {
449                target_tf: TransferFunction::Hlg,
450                target_primaries: ColourPrimaries::Bt2020,
451            };
452        }
453        HdrPassthroughMode::Passthrough
454    }
455}
456
457// ─── HdrSeiInjector ───────────────────────────────────────────────────────────
458
459/// Stores SMPTE ST 2086 and CTA-861.3 SEI payloads extracted from the input
460/// stream and optionally prepends them to output packet data.
461pub struct HdrSeiInjector {
462    config: HdrPassthroughConfig,
463    /// 24-byte mastering display SEI payload when present.
464    mastering_display_sei: Option<[u8; 24]>,
465    /// 4-byte CLL SEI payload when present.
466    cll_sei: Option<[u8; 4]>,
467}
468
469impl HdrSeiInjector {
470    /// Creates a new injector with the given configuration.
471    #[must_use]
472    pub fn new(config: HdrPassthroughConfig) -> Self {
473        Self {
474            config,
475            mastering_display_sei: None,
476            cll_sei: None,
477        }
478    }
479
480    /// Store HDR metadata from the input stream for later injection into
481    /// output packets.
482    pub fn store_from_metadata(&mut self, meta: &HdrMetadata) {
483        if let Some(md) = &meta.mastering_display {
484            self.mastering_display_sei =
485                Some(crate::hdr_passthrough::encode_mastering_display_sei(md));
486        }
487        if let Some(cll) = &meta.content_light_level {
488            self.cll_sei = Some(crate::hdr_passthrough::encode_cll_sei(cll));
489        }
490    }
491
492    /// Inject stored SEI bytes prepended to `data`.
493    ///
494    /// When `inject_sei` is `false` or no SEI data has been stored, the
495    /// original data is returned unchanged.
496    #[must_use]
497    pub fn inject_into_packet(&self, data: &[u8]) -> Vec<u8> {
498        if !self.config.inject_sei
499            || (self.mastering_display_sei.is_none() && self.cll_sei.is_none())
500        {
501            return data.to_vec();
502        }
503        let mut out = Vec::with_capacity(
504            self.mastering_display_sei.as_ref().map_or(0, |s| s.len())
505                + self.cll_sei.as_ref().map_or(0, |c| c.len())
506                + data.len(),
507        );
508        if let Some(sei) = &self.mastering_display_sei {
509            out.extend_from_slice(sei.as_slice());
510        }
511        if let Some(cll) = &self.cll_sei {
512            out.extend_from_slice(cll.as_slice());
513        }
514        out.extend_from_slice(data);
515        out
516    }
517
518    /// Resolve the output `HdrMetadata` from the input using the configured
519    /// passthrough mode.
520    ///
521    /// # Errors
522    ///
523    /// Returns an error if the HDR conversion is unsupported.
524    pub fn resolve_output_metadata(
525        &self,
526        input: Option<&HdrMetadata>,
527    ) -> Result<Option<HdrMetadata>> {
528        let mode = self.config.to_mode();
529        let processor = HdrProcessor::new(mode);
530        processor
531            .process(input)
532            .map_err(|e| TranscodeError::CodecError(format!("HDR SEI resolve failed: {e}")))
533    }
534
535    /// Returns `true` when SEI injection is enabled and at least one payload
536    /// has been stored.
537    #[must_use]
538    pub fn has_sei_data(&self) -> bool {
539        self.config.inject_sei && (self.mastering_display_sei.is_some() || self.cll_sei.is_some())
540    }
541}
542
543// ─── TranscodeContext ─────────────────────────────────────────────────────────
544
545/// Wires a `FrameDecoder`, `FilterGraph`, and `FrameEncoder` together into
546/// a single execute loop.
547///
548/// # Execute loop
549///
550/// ```text
551/// while !decoder.eof() {
552///     frame = decoder.decode_next()
553///     filtered = filter_graph.apply(frame)
554///     encoded  = encoder.encode_frame(filtered)
555///     accumulate stats
556/// }
557/// encoder.flush()
558/// ```
559pub struct TranscodeContext {
560    /// The frame decoder (source).
561    pub decoder: Box<dyn FrameDecoder>,
562    /// The filter graph applied between decode and encode.
563    pub filter_graph: FilterGraph,
564    /// The frame encoder (sink).
565    pub encoder: Box<dyn FrameEncoder>,
566}
567
568impl TranscodeContext {
569    /// Creates a new context.
570    #[must_use]
571    pub fn new(
572        decoder: Box<dyn FrameDecoder>,
573        filter_graph: FilterGraph,
574        encoder: Box<dyn FrameEncoder>,
575    ) -> Self {
576        Self {
577            decoder,
578            filter_graph,
579            encoder,
580        }
581    }
582
583    /// Execute the full decode → filter → encode pipeline loop.
584    ///
585    /// Returns `TranscodeStats` containing per-frame counts, byte totals,
586    /// and wall-clock timing.
587    ///
588    /// # Errors
589    ///
590    /// Returns an error if encoding or filter operations fail.
591    pub fn execute(&mut self) -> Result<TranscodeStats> {
592        let start = Instant::now();
593        let mut stats = PassStats::default();
594
595        while !self.decoder.eof() {
596            match self.decoder.decode_next() {
597                Some(frame) => {
598                    stats.input_bytes += frame.data.len() as u64;
599                    stats.input_frames += 1;
600                    if frame.is_audio {
601                        stats.audio_frames += 1;
602                    } else {
603                        stats.video_frames += 1;
604                    }
605
606                    match self.filter_graph.apply(frame)? {
607                        Some(filtered) => {
608                            let encoded = self.encoder.encode_frame(&filtered)?;
609                            stats.output_bytes += encoded.len() as u64;
610                            stats.output_frames += 1;
611                        }
612                        None => {
613                            // Frame was dropped by the filter graph — do not increment output_frames.
614                        }
615                    }
616                }
617                None => {
618                    // Decoder returned None before reporting eof; treat as eof.
619                    break;
620                }
621            }
622        }
623
624        // Flush any buffered frames from the encoder.
625        let flushed = self.encoder.flush()?;
626        stats.output_bytes += flushed.len() as u64;
627
628        Ok(TranscodeStats {
629            pass: stats,
630            wall_time_secs: start.elapsed().as_secs_f64(),
631        })
632    }
633}
634
635// ─── Tests ────────────────────────────────────────────────────────────────────
636
637#[cfg(test)]
638mod tests {
639    use super::*;
640    use crate::hdr_passthrough::{ContentLightLevel, MasteringDisplay, TransferFunction};
641
642    // ── Frame constructors ────────────────────────────────────────────────────
643
644    #[test]
645    fn test_frame_video_defaults() {
646        let f = Frame::video(vec![0u8; 12], 42, 4, 3);
647        assert!(!f.is_audio);
648        assert_eq!(f.width, 4);
649        assert_eq!(f.height, 3);
650        assert_eq!(f.pts_ms, 42);
651        assert!(f.hdr_meta.is_none());
652    }
653
654    #[test]
655    fn test_frame_audio_defaults() {
656        let f = Frame::audio(vec![0u8; 16], 100);
657        assert!(f.is_audio);
658        assert_eq!(f.width, 0);
659        assert_eq!(f.height, 0);
660        assert_eq!(f.pts_ms, 100);
661    }
662
663    #[test]
664    fn test_frame_with_hdr() {
665        let meta = HdrMetadata::hlg();
666        let f = Frame::video(vec![0u8; 4], 0, 2, 2).with_hdr(meta.clone());
667        assert!(f.hdr_meta.is_some());
668        assert_eq!(
669            f.hdr_meta.as_ref().and_then(|m| m.transfer_function),
670            Some(TransferFunction::Hlg)
671        );
672    }
673
674    // ── FilterGraph – pass-through ────────────────────────────────────────────
675
676    #[test]
677    fn test_filter_graph_empty_passthrough_video() {
678        let fg = FilterGraph::new();
679        let frame = Frame::video(vec![1u8, 2, 3, 4], 0, 2, 1);
680        let data_before = frame.data.clone();
681        let result = fg.apply(frame).expect("apply should succeed");
682        assert!(result.is_some());
683        assert_eq!(result.as_ref().map(|f| &f.data), Some(&data_before));
684    }
685
686    #[test]
687    fn test_filter_graph_empty_passthrough_audio() {
688        let fg = FilterGraph::new();
689        let frame = Frame::audio(vec![0x10u8, 0x00, 0x20, 0x00], 0);
690        let data_before = frame.data.clone();
691        let result = fg.apply(frame).expect("apply should succeed");
692        assert!(result.is_some());
693        assert_eq!(result.as_ref().map(|f| &f.data), Some(&data_before));
694    }
695
696    // ── FilterGraph – video scale ─────────────────────────────────────────────
697
698    #[test]
699    fn test_filter_graph_video_scale_rgba() {
700        // 4×4 RGBA frame → scale to 2×2.
701        let src_w = 4u32;
702        let src_h = 4u32;
703        let data = vec![0u8; (src_w * src_h * 4) as usize];
704        let fg = FilterGraph::new().add_video_scale(2, 2);
705        let frame = Frame::video(data, 0, src_w, src_h);
706        let result = fg.apply(frame).expect("scale should succeed");
707        let out = result.expect("should produce a frame");
708        assert_eq!(out.width, 2);
709        assert_eq!(out.height, 2);
710        assert_eq!(out.data.len(), 2 * 2 * 4);
711    }
712
713    #[test]
714    fn test_filter_graph_video_scale_yuv420() {
715        // 4×4 YUV420 frame → scale to 2×2.
716        let w = 4u32;
717        let h = 4u32;
718        let y_size = (w * h) as usize;
719        let uv_size = y_size / 4;
720        let data = vec![200u8; y_size + uv_size * 2]; // bright Y, neutral UV
721        let fg = FilterGraph::new().add_video_scale(2, 2);
722        let frame = Frame::video(data, 0, w, h);
723        let result = fg.apply(frame).expect("yuv420 scale should succeed");
724        let out = result.expect("should produce a frame");
725        assert_eq!(out.width, 2);
726        assert_eq!(out.height, 2);
727        let expected_size = (2 * 2 + 2 * (1 * 1)) as usize; // 4 + 2 = 6
728        assert_eq!(out.data.len(), expected_size);
729    }
730
731    #[test]
732    fn test_filter_graph_video_scale_noop_same_dims() {
733        let data = vec![42u8; 16 * 16 * 4];
734        let fg = FilterGraph::new().add_video_scale(16, 16);
735        let frame = Frame::video(data.clone(), 0, 16, 16);
736        let out = fg.apply(frame).expect("noop scale").expect("frame");
737        assert_eq!(out.data, data);
738        assert_eq!(out.width, 16);
739        assert_eq!(out.height, 16);
740    }
741
742    // ── FilterGraph – audio gain ──────────────────────────────────────────────
743
744    #[test]
745    fn test_filter_graph_audio_gain_double() {
746        // +6.02 dB ≈ ×2; 1000 → ~2000.
747        let sample: i16 = 1000;
748        let mut data = sample.to_le_bytes().to_vec();
749        data.extend_from_slice(&sample.to_le_bytes());
750        let fg = FilterGraph::new().add_audio_gain_db(6.0206);
751        let frame = Frame::audio(data, 0);
752        let out = fg.apply(frame).expect("gain apply").expect("frame");
753        let s0 = i16::from_le_bytes([out.data[0], out.data[1]]);
754        assert!((s0 as i32 - 2000).abs() < 10, "expected ~2000, got {s0}");
755    }
756
757    #[test]
758    fn test_filter_graph_audio_gain_zero_db_noop() {
759        let sample: i16 = 5000;
760        let data = sample.to_le_bytes().to_vec();
761        let fg = FilterGraph::new().add_audio_gain_db(0.0);
762        let frame = Frame::audio(data.clone(), 0);
763        let out = fg.apply(frame).expect("0dB gain").expect("frame");
764        assert_eq!(out.data, data);
765    }
766
767    #[test]
768    fn test_filter_graph_audio_gain_skips_video() {
769        // An audio gain op must not modify video frames.
770        let data = vec![0xFFu8; 16];
771        let fg = FilterGraph::new().add_audio_gain_db(20.0);
772        let frame = Frame::video(data.clone(), 0, 4, 1);
773        let out = fg.apply(frame).expect("skip video").expect("frame");
774        assert_eq!(out.data, data);
775    }
776
777    // ── FilterGraph – HDR passthrough ─────────────────────────────────────────
778
779    #[test]
780    fn test_filter_graph_hdr_strip() {
781        let meta = HdrMetadata::hdr10(
782            MasteringDisplay::p3_d65_1000nit(),
783            ContentLightLevel::hdr10_default(),
784        );
785        let fg = FilterGraph::new().add_hdr_passthrough(HdrPassthroughMode::Strip);
786        let frame = Frame::video(vec![0u8; 4], 0, 2, 1).with_hdr(meta);
787        let out = fg.apply(frame).expect("strip hdr").expect("frame");
788        assert!(out.hdr_meta.is_none(), "HDR should be stripped");
789    }
790
791    #[test]
792    fn test_filter_graph_hdr_passthrough() {
793        let meta = HdrMetadata::hlg();
794        let fg = FilterGraph::new().add_hdr_passthrough(HdrPassthroughMode::Passthrough);
795        let frame = Frame::video(vec![0u8; 4], 0, 2, 1).with_hdr(meta);
796        let out = fg.apply(frame).expect("passthrough hdr").expect("frame");
797        assert!(out.hdr_meta.is_some(), "HDR should be preserved");
798        assert_eq!(
799            out.hdr_meta.as_ref().and_then(|m| m.transfer_function),
800            Some(TransferFunction::Hlg)
801        );
802    }
803
804    // ── PassStats / TranscodeStats ────────────────────────────────────────────
805
806    #[test]
807    fn test_pass_stats_default_zeroed() {
808        let s = PassStats::default();
809        assert_eq!(s.input_frames, 0);
810        assert_eq!(s.output_frames, 0);
811        assert_eq!(s.input_bytes, 0);
812        assert_eq!(s.output_bytes, 0);
813        assert_eq!(s.video_frames, 0);
814        assert_eq!(s.audio_frames, 0);
815    }
816
817    #[test]
818    fn test_transcode_stats_speed_factor_zero_when_no_time() {
819        let stats = TranscodeStats {
820            pass: PassStats {
821                input_frames: 100,
822                ..PassStats::default()
823            },
824            wall_time_secs: 0.0,
825        };
826        assert_eq!(stats.speed_factor(), 0.0);
827    }
828
829    #[test]
830    fn test_transcode_stats_speed_factor_computed() {
831        let stats = TranscodeStats {
832            pass: PassStats {
833                input_frames: 100,
834                ..PassStats::default()
835            },
836            wall_time_secs: 2.0,
837        };
838        assert!((stats.speed_factor() - 50.0).abs() < 0.001);
839    }
840
841    // ── HdrPassthroughConfig ──────────────────────────────────────────────────
842
843    #[test]
844    fn test_hdr_passthrough_config_default() {
845        let cfg = HdrPassthroughConfig::default();
846        assert!(!cfg.enabled);
847        assert!(!cfg.convert_hdr10_to_hlg);
848        assert!(!cfg.inject_sei);
849    }
850
851    #[test]
852    fn test_hdr_passthrough_config_strip_mode() {
853        let cfg = HdrPassthroughConfig::strip();
854        assert!(matches!(cfg.to_mode(), HdrPassthroughMode::Strip));
855    }
856
857    #[test]
858    fn test_hdr_passthrough_config_passthrough_mode() {
859        let cfg = HdrPassthroughConfig::passthrough();
860        assert!(matches!(cfg.to_mode(), HdrPassthroughMode::Passthrough));
861    }
862
863    #[test]
864    fn test_hdr_passthrough_config_convert_hdr10_to_hlg() {
865        let cfg = HdrPassthroughConfig {
866            enabled: true,
867            convert_hdr10_to_hlg: true,
868            inject_sei: false,
869        };
870        let mode = cfg.to_mode();
871        match mode {
872            HdrPassthroughMode::Convert { target_tf, .. } => {
873                assert_eq!(target_tf, TransferFunction::Hlg);
874            }
875            _ => panic!("Expected Convert mode"),
876        }
877    }
878
879    // ── HdrSeiInjector ────────────────────────────────────────────────────────
880
881    #[test]
882    fn test_hdr_sei_injector_no_sei_inject_disabled() {
883        let cfg = HdrPassthroughConfig {
884            enabled: true,
885            inject_sei: false,
886            convert_hdr10_to_hlg: false,
887        };
888        let injector = HdrSeiInjector::new(cfg);
889        let data = vec![0xAAu8, 0xBB, 0xCC];
890        let result = injector.inject_into_packet(&data);
891        assert_eq!(result, data);
892    }
893
894    #[test]
895    fn test_hdr_sei_injector_no_sei_when_no_metadata_stored() {
896        let cfg = HdrPassthroughConfig {
897            enabled: true,
898            inject_sei: true,
899            convert_hdr10_to_hlg: false,
900        };
901        let injector = HdrSeiInjector::new(cfg);
902        let data = vec![0x01u8, 0x02, 0x03];
903        let result = injector.inject_into_packet(&data);
904        // No SEI stored → returns original data.
905        assert_eq!(result, data);
906        assert!(!injector.has_sei_data());
907    }
908
909    #[test]
910    fn test_hdr_sei_injector_stores_metadata_and_injects() {
911        let cfg = HdrPassthroughConfig {
912            enabled: true,
913            inject_sei: true,
914            convert_hdr10_to_hlg: false,
915        };
916        let mut injector = HdrSeiInjector::new(cfg);
917        let meta = HdrMetadata::hdr10(
918            MasteringDisplay::p3_d65_1000nit(),
919            ContentLightLevel::hdr10_default(),
920        );
921        injector.store_from_metadata(&meta);
922        assert!(injector.has_sei_data());
923
924        let payload = vec![0xDEu8, 0xAD];
925        let result = injector.inject_into_packet(&payload);
926        // Should prepend 24 (mastering display) + 4 (CLL) = 28 bytes.
927        assert_eq!(result.len(), 28 + 2);
928        // Tail must be the original payload.
929        assert_eq!(&result[28..], &payload[..]);
930    }
931
932    #[test]
933    fn test_hdr_sei_injector_resolve_passthrough() {
934        let cfg = HdrPassthroughConfig::passthrough();
935        let injector = HdrSeiInjector::new(cfg);
936        let meta = HdrMetadata::hlg();
937        let resolved = injector
938            .resolve_output_metadata(Some(&meta))
939            .expect("resolve should succeed");
940        assert!(resolved.is_some());
941        assert_eq!(
942            resolved.as_ref().and_then(|m| m.transfer_function),
943            Some(TransferFunction::Hlg)
944        );
945    }
946
947    #[test]
948    fn test_hdr_sei_injector_resolve_strip() {
949        let cfg = HdrPassthroughConfig::strip();
950        let injector = HdrSeiInjector::new(cfg);
951        let meta = HdrMetadata::hdr10(
952            MasteringDisplay::p3_d65_1000nit(),
953            ContentLightLevel::hdr10_default(),
954        );
955        let resolved = injector
956            .resolve_output_metadata(Some(&meta))
957            .expect("resolve should succeed");
958        assert!(resolved.is_none(), "strip should produce None");
959    }
960
961    #[test]
962    fn test_hdr_sei_injector_resolve_convert_hdr10_to_hlg() {
963        let cfg = HdrPassthroughConfig {
964            enabled: true,
965            convert_hdr10_to_hlg: true,
966            inject_sei: false,
967        };
968        let injector = HdrSeiInjector::new(cfg);
969        let meta = HdrMetadata::hdr10(
970            MasteringDisplay::p3_d65_1000nit(),
971            ContentLightLevel::hdr10_default(),
972        );
973        let resolved = injector
974            .resolve_output_metadata(Some(&meta))
975            .expect("conversion should succeed");
976        assert_eq!(
977            resolved.as_ref().and_then(|m| m.transfer_function),
978            Some(TransferFunction::Hlg)
979        );
980    }
981}