Skip to main content

ff_filter/graph/
filter_step.rs

1//! Internal filter step representation.
2
3use std::time::Duration;
4
5use super::builder::FilterGraphBuilder;
6use super::types::{
7    DrawTextOptions, EqBand, Rgb, ScaleAlgorithm, ToneMap, XfadeTransition, YadifMode,
8};
9use crate::animation::AnimatedValue;
10use crate::blend::BlendMode;
11
12// ── FilterStep ────────────────────────────────────────────────────────────────
13
14/// A single step in a filter chain.
15///
16/// Used by [`crate::FilterGraphBuilder`] to build pipeline filter graphs, and by
17/// [`crate::AudioTrack::effects`] to attach per-track effects in a multi-track mix.
18#[derive(Debug, Clone)]
19pub enum FilterStep {
20    /// Trim: keep only frames in `[start, end)` seconds.
21    Trim { start: f64, end: f64 },
22    /// Scale to a new resolution using the given resampling algorithm.
23    Scale {
24        width: u32,
25        height: u32,
26        algorithm: ScaleAlgorithm,
27    },
28    /// Crop a rectangular region.
29    Crop {
30        x: u32,
31        y: u32,
32        width: u32,
33        height: u32,
34    },
35    /// Overlay a second stream at position `(x, y)`.
36    Overlay { x: i32, y: i32 },
37    /// Fade-in from black starting at `start` seconds, over `duration` seconds.
38    FadeIn { start: f64, duration: f64 },
39    /// Fade-out to black starting at `start` seconds, over `duration` seconds.
40    FadeOut { start: f64, duration: f64 },
41    /// Audio fade-in from silence starting at `start` seconds, over `duration` seconds.
42    AFadeIn { start: f64, duration: f64 },
43    /// Audio fade-out to silence starting at `start` seconds, over `duration` seconds.
44    AFadeOut { start: f64, duration: f64 },
45    /// Fade-in from white starting at `start` seconds, over `duration` seconds.
46    FadeInWhite { start: f64, duration: f64 },
47    /// Fade-out to white starting at `start` seconds, over `duration` seconds.
48    FadeOutWhite { start: f64, duration: f64 },
49    /// Rotate clockwise by `angle_degrees`, filling exposed areas with `fill_color`.
50    Rotate {
51        angle_degrees: f64,
52        fill_color: String,
53    },
54    /// HDR-to-SDR tone mapping.
55    ToneMap(ToneMap),
56    /// Adjust audio volume (in dB; negative = quieter).
57    Volume(f64),
58    /// Mix `n` audio inputs together.
59    Amix(usize),
60    /// Multi-band parametric equalizer (low-shelf, high-shelf, or peak bands).
61    ///
62    /// Each band maps to its own `FFmpeg` filter node chained in sequence.
63    /// The `bands` vec must not be empty.
64    ParametricEq { bands: Vec<EqBand> },
65    /// Apply a 3D LUT from a `.cube` or `.3dl` file.
66    Lut3d { path: String },
67    /// Brightness/contrast/saturation adjustment via `FFmpeg` `eq` filter.
68    Eq {
69        brightness: f32,
70        contrast: f32,
71        saturation: f32,
72    },
73    /// Brightness / contrast / saturation / gamma via `FFmpeg` `eq` filter (optionally animated).
74    ///
75    /// Arguments are evaluated at [`Duration::ZERO`] for the initial graph build.
76    /// Per-frame updates are applied via `avfilter_graph_send_command` in #363.
77    EqAnimated {
78        /// Brightness offset. Range: −1.0 – 1.0 (neutral: 0.0).
79        brightness: AnimatedValue<f64>,
80        /// Contrast multiplier. Range: 0.0 – 3.0 (neutral: 1.0).
81        contrast: AnimatedValue<f64>,
82        /// Saturation multiplier. Range: 0.0 – 3.0 (neutral: 1.0; 0.0 = grayscale).
83        saturation: AnimatedValue<f64>,
84        /// Global gamma correction. Range: 0.1 – 10.0 (neutral: 1.0).
85        gamma: AnimatedValue<f64>,
86    },
87    /// Three-way color balance (shadows / midtones / highlights) via `FFmpeg` `colorbalance` filter
88    /// (optionally animated).
89    ///
90    /// Each tuple is `(R, G, B)`. Valid range per component: −1.0 – 1.0 (neutral: 0.0).
91    ///
92    /// Arguments are evaluated at [`Duration::ZERO`] for the initial graph build.
93    /// Per-frame updates are applied via `avfilter_graph_send_command` in #363.
94    ColorBalanceAnimated {
95        /// Shadows (lift) correction per channel. `FFmpeg` params: `"rs"`, `"gs"`, `"bs"`.
96        lift: AnimatedValue<(f64, f64, f64)>,
97        /// Midtones (gamma) correction per channel. `FFmpeg` params: `"rm"`, `"gm"`, `"bm"`.
98        gamma: AnimatedValue<(f64, f64, f64)>,
99        /// Highlights (gain) correction per channel. `FFmpeg` params: `"rh"`, `"gh"`, `"bh"`.
100        gain: AnimatedValue<(f64, f64, f64)>,
101    },
102    /// Per-channel RGB color curves adjustment.
103    Curves {
104        master: Vec<(f32, f32)>,
105        r: Vec<(f32, f32)>,
106        g: Vec<(f32, f32)>,
107        b: Vec<(f32, f32)>,
108    },
109    /// White balance correction via `colorchannelmixer`.
110    WhiteBalance { temperature_k: u32, tint: f32 },
111    /// Hue rotation by an arbitrary angle.
112    Hue { degrees: f32 },
113    /// Per-channel gamma correction via `FFmpeg` `eq` filter.
114    Gamma { r: f32, g: f32, b: f32 },
115    /// Three-way colour corrector (lift / gamma / gain) via `FFmpeg` `curves` filter.
116    ThreeWayCC {
117        /// Affects shadows (blacks). Neutral: `Rgb::NEUTRAL`.
118        lift: Rgb,
119        /// Affects midtones. Neutral: `Rgb::NEUTRAL`. All components must be > 0.0.
120        gamma: Rgb,
121        /// Affects highlights (whites). Neutral: `Rgb::NEUTRAL`.
122        gain: Rgb,
123    },
124    /// Vignette effect via `FFmpeg` `vignette` filter.
125    Vignette {
126        /// Radius angle in radians (valid range: 0.0 – π/2 ≈ 1.5708). Default: π/5 ≈ 0.628.
127        angle: f32,
128        /// Horizontal centre of the vignette. `0.0` maps to `w/2`.
129        x0: f32,
130        /// Vertical centre of the vignette. `0.0` maps to `h/2`.
131        y0: f32,
132    },
133    /// Horizontal flip (mirror left-right).
134    HFlip,
135    /// Vertical flip (mirror top-bottom).
136    VFlip,
137    /// Reverse video playback (buffers entire clip in memory — use only on short clips).
138    Reverse,
139    /// Reverse audio playback (buffers entire clip in memory — use only on short clips).
140    AReverse,
141    /// Pad to a target resolution with a fill color (letterbox / pillarbox).
142    Pad {
143        /// Target canvas width in pixels.
144        width: u32,
145        /// Target canvas height in pixels.
146        height: u32,
147        /// Horizontal offset of the source frame within the canvas.
148        /// Negative values are replaced with `(ow-iw)/2` (centred).
149        x: i32,
150        /// Vertical offset of the source frame within the canvas.
151        /// Negative values are replaced with `(oh-ih)/2` (centred).
152        y: i32,
153        /// Fill color (any `FFmpeg` color string, e.g. `"black"`, `"0x000000"`).
154        color: String,
155    },
156    /// Scale (preserving aspect ratio) then centre-pad to fill target dimensions
157    /// (letterbox or pillarbox as required).
158    ///
159    /// Implemented as a `scale` filter with `force_original_aspect_ratio=decrease`
160    /// followed by a `pad` filter that centres the scaled frame on the canvas.
161    FitToAspect {
162        /// Target canvas width in pixels.
163        width: u32,
164        /// Target canvas height in pixels.
165        height: u32,
166        /// Fill color for the bars (any `FFmpeg` color string, e.g. `"black"`).
167        color: String,
168    },
169    /// Gaussian blur with configurable radius.
170    ///
171    /// `sigma` is the blur radius. Valid range: 0.0 – 10.0 (values near 0.0 are
172    /// nearly a no-op; higher values produce a stronger blur).
173    GBlur {
174        /// Blur radius (standard deviation). Must be ≥ 0.0.
175        sigma: f32,
176    },
177    /// Crop with optionally animated boundaries (pixels, `f64` for sub-pixel precision).
178    ///
179    /// Arguments are evaluated at [`Duration::ZERO`] for the initial graph build.
180    /// Per-frame updates are applied via `avfilter_graph_send_command` in #363.
181    CropAnimated {
182        /// X offset of the top-left corner, in pixels.
183        x: AnimatedValue<f64>,
184        /// Y offset of the top-left corner, in pixels.
185        y: AnimatedValue<f64>,
186        /// Width of the cropped region. Must evaluate to > 0 at `Duration::ZERO`.
187        width: AnimatedValue<f64>,
188        /// Height of the cropped region. Must evaluate to > 0 at `Duration::ZERO`.
189        height: AnimatedValue<f64>,
190    },
191    /// Gaussian blur with an optionally animated sigma (blur radius).
192    ///
193    /// Arguments are evaluated at [`Duration::ZERO`] for the initial graph build.
194    /// Per-frame updates are applied via `avfilter_graph_send_command` in #363.
195    GBlurAnimated {
196        /// Blur radius (standard deviation). Must evaluate to ≥ 0.0 at `Duration::ZERO`.
197        sigma: AnimatedValue<f64>,
198    },
199    /// Sharpen or blur via unsharp mask (luma + chroma strength).
200    ///
201    /// Positive values sharpen; negative values blur. Valid range for each
202    /// component: −1.5 – 1.5.
203    Unsharp {
204        /// Luma (brightness) sharpening/blurring amount. Range: −1.5 – 1.5.
205        luma_strength: f32,
206        /// Chroma (colour) sharpening/blurring amount. Range: −1.5 – 1.5.
207        chroma_strength: f32,
208    },
209    /// High Quality 3D noise reduction (`hqdn3d`).
210    ///
211    /// Typical values: `luma_spatial=4.0`, `chroma_spatial=3.0`,
212    /// `luma_tmp=6.0`, `chroma_tmp=4.5`. All values must be ≥ 0.0.
213    Hqdn3d {
214        /// Spatial luma noise reduction strength. Must be ≥ 0.0.
215        luma_spatial: f32,
216        /// Spatial chroma noise reduction strength. Must be ≥ 0.0.
217        chroma_spatial: f32,
218        /// Temporal luma noise reduction strength. Must be ≥ 0.0.
219        luma_tmp: f32,
220        /// Temporal chroma noise reduction strength. Must be ≥ 0.0.
221        chroma_tmp: f32,
222    },
223    /// Non-local means noise reduction (`nlmeans`).
224    ///
225    /// `strength` controls the denoising intensity; range 1.0–30.0.
226    /// Higher values remove more noise but are significantly more CPU-intensive.
227    ///
228    /// NOTE: nlmeans is CPU-intensive; avoid for real-time pipelines.
229    Nlmeans {
230        /// Denoising strength. Must be in the range [1.0, 30.0].
231        strength: f32,
232    },
233    /// Deinterlace using the `yadif` filter.
234    Yadif {
235        /// Deinterlacing mode controlling output frame rate and spatial checks.
236        mode: YadifMode,
237    },
238    /// Cross-dissolve transition between two video streams (`xfade`).
239    ///
240    /// Requires two input slots: slot 0 is clip A, slot 1 is clip B.
241    /// `duration` is the overlap length in seconds; `offset` is the PTS
242    /// offset (in seconds) at which clip B begins.
243    XFade {
244        /// Transition style.
245        transition: XfadeTransition,
246        /// Overlap duration in seconds. Must be > 0.0.
247        duration: f64,
248        /// PTS offset (seconds) where clip B starts.
249        offset: f64,
250    },
251    /// Draw text onto the video using the `drawtext` filter.
252    DrawText {
253        /// Full set of drawtext parameters.
254        opts: DrawTextOptions,
255    },
256    /// Burn-in SRT subtitles (hard subtitles) using the `subtitles` filter.
257    SubtitlesSrt {
258        /// Absolute or relative path to the `.srt` file.
259        path: String,
260    },
261    /// Burn-in ASS/SSA styled subtitles using the `ass` filter.
262    SubtitlesAss {
263        /// Absolute or relative path to the `.ass` or `.ssa` file.
264        path: String,
265    },
266    /// Playback speed change using `setpts` (video) and chained `atempo` (audio).
267    ///
268    /// `factor > 1.0` = fast motion; `factor < 1.0` = slow motion.
269    /// Valid range: 0.1–100.0.
270    ///
271    /// Video path: `setpts=PTS/{factor}`.
272    /// Audio path: the `atempo` filter only accepts [0.5, 2.0] per instance;
273    /// `filter_inner` chains multiple instances to cover the full range.
274    Speed {
275        /// Speed multiplier. Must be in [0.1, 100.0].
276        factor: f64,
277    },
278    /// EBU R128 two-pass loudness normalization.
279    ///
280    /// Pass 1 measures integrated loudness with `ebur128=peak=true:metadata=1`.
281    /// Pass 2 applies a linear volume correction so the output reaches `target_lufs`.
282    /// All audio frames are buffered in memory between the two passes — use only
283    /// for clips that fit comfortably in RAM.
284    LoudnessNormalize {
285        /// Target integrated loudness in LUFS (e.g. −23.0). Must be < 0.0.
286        target_lufs: f32,
287        /// True-peak ceiling in dBTP (e.g. −1.0). Must be ≤ 0.0.
288        true_peak_db: f32,
289        /// Target loudness range in LU (e.g. 7.0). Must be > 0.0.
290        lra: f32,
291    },
292    /// Peak-level two-pass normalization using `astats`.
293    ///
294    /// Pass 1 measures the true peak with `astats=metadata=1`.
295    /// Pass 2 applies `volume={gain}dB` so the output peak reaches `target_db`.
296    /// All audio frames are buffered in memory between passes — use only
297    /// for clips that fit comfortably in RAM.
298    NormalizePeak {
299        /// Target peak level in dBFS (e.g. −1.0). Must be ≤ 0.0.
300        target_db: f32,
301    },
302    /// Noise gate via `FFmpeg`'s `agate` filter.
303    ///
304    /// Audio below `threshold_db` is attenuated; audio above passes through.
305    /// The threshold is converted from dBFS to the linear scale expected by
306    /// `agate`'s `threshold` parameter (`linear = 10^(dB/20)`).
307    ANoiseGate {
308        /// Gate open/close threshold in dBFS (e.g. −40.0).
309        threshold_db: f32,
310        /// Attack time in milliseconds — how quickly the gate opens. Must be > 0.0.
311        attack_ms: f32,
312        /// Release time in milliseconds — how quickly the gate closes. Must be > 0.0.
313        release_ms: f32,
314    },
315    /// Dynamic range compressor via `FFmpeg`'s `acompressor` filter.
316    ///
317    /// Reduces the dynamic range of the audio signal: peaks above
318    /// `threshold_db` are attenuated by `ratio`:1.  `makeup_db` applies
319    /// additional gain after compression to restore perceived loudness.
320    ACompressor {
321        /// Compression threshold in dBFS (e.g. −20.0).
322        threshold_db: f32,
323        /// Compression ratio (e.g. 4.0 = 4:1). Must be ≥ 1.0.
324        ratio: f32,
325        /// Attack time in milliseconds. Must be > 0.0.
326        attack_ms: f32,
327        /// Release time in milliseconds. Must be > 0.0.
328        release_ms: f32,
329        /// Make-up gain in dB applied after compression (e.g. 6.0).
330        makeup_db: f32,
331    },
332    /// Downmix stereo to mono via `FFmpeg`'s `pan` filter.
333    ///
334    /// Both channels are mixed with equal weight:
335    /// `mono|c0=0.5*c0+0.5*c1`.  The output has a single channel.
336    StereoToMono,
337    /// Remap audio channels using `FFmpeg`'s `channelmap` filter.
338    ///
339    /// `mapping` is a `|`-separated list of output channel names taken
340    /// from input channels, e.g. `"FR|FL"` swaps left and right.
341    /// Must not be empty.
342    ChannelMap {
343        /// `FFmpeg` channelmap mapping expression (e.g. `"FR|FL"`).
344        mapping: String,
345    },
346    /// A/V sync correction via audio delay or advance.
347    ///
348    /// Positive `ms`: uses `FFmpeg`'s `adelay` filter to shift audio later.
349    /// Negative `ms`: uses `FFmpeg`'s `atrim` filter to trim the audio start,
350    /// effectively advancing audio by `|ms|` milliseconds.
351    /// Zero `ms`: uses `adelay` with zero delay (no-op).
352    AudioDelay {
353        /// Delay in milliseconds. Positive = delay; negative = advance.
354        ms: f64,
355    },
356    /// Concatenate `n` sequential video input segments via `FFmpeg`'s `concat` filter.
357    ///
358    /// Requires `n` video input slots (0 through `n-1`). `n` must be ≥ 2.
359    ConcatVideo {
360        /// Number of video input segments to concatenate. Must be ≥ 2.
361        n: u32,
362    },
363    /// Concatenate `n` sequential audio input segments via `FFmpeg`'s `concat` filter.
364    ///
365    /// Requires `n` audio input slots (0 through `n-1`). `n` must be ≥ 2.
366    ConcatAudio {
367        /// Number of audio input segments to concatenate. Must be ≥ 2.
368        n: u32,
369    },
370    /// Freeze a single frame for a configurable duration using `FFmpeg`'s `loop` filter.
371    ///
372    /// The frame nearest to `pts` seconds is held for `duration` seconds, then
373    /// playback resumes. Frame numbers are approximated using a 25 fps assumption;
374    /// accuracy depends on the source stream's actual frame rate.
375    FreezeFrame {
376        /// Timestamp of the frame to freeze, in seconds. Must be >= 0.0.
377        pts: f64,
378        /// Duration to hold the frozen frame, in seconds. Must be > 0.0.
379        duration: f64,
380    },
381    /// Scrolling text ticker (right-to-left) using the `drawtext` filter.
382    ///
383    /// The text starts off-screen to the right and scrolls left at
384    /// `speed_px_per_sec` pixels per second using the expression
385    /// `x = w - t * speed`.
386    Ticker {
387        /// Text to display. Special characters (`\`, `:`, `'`) are escaped.
388        text: String,
389        /// Y position as an `FFmpeg` expression, e.g. `"h-50"` or `"10"`.
390        y: String,
391        /// Horizontal scroll speed in pixels per second (must be > 0.0).
392        speed_px_per_sec: f32,
393        /// Font size in points.
394        font_size: u32,
395        /// Font color as an `FFmpeg` color string, e.g. `"white"` or `"0xFFFFFF"`.
396        font_color: String,
397    },
398    /// Join two video clips with a cross-dissolve transition.
399    ///
400    /// Compound step — expands in `filter_inner` to:
401    /// ```text
402    /// in0 → trim(end=clip_a_end+dissolve_dur) → setpts → xfade[0]
403    /// in1 → trim(start=max(0, clip_b_start−dissolve_dur)) → setpts → xfade[1]
404    /// ```
405    ///
406    /// Requires two video input slots: slot 0 = clip A, slot 1 = clip B.
407    /// `clip_a_end` and `dissolve_dur` must be > 0.0.
408    JoinWithDissolve {
409        /// Timestamp (seconds) where clip A ends. Must be > 0.0.
410        clip_a_end: f64,
411        /// Timestamp (seconds) where clip B content starts (before the overlap).
412        clip_b_start: f64,
413        /// Cross-dissolve overlap duration in seconds. Must be > 0.0.
414        dissolve_dur: f64,
415    },
416    /// Composite a PNG image (watermark / logo) over video with optional opacity.
417    ///
418    /// This is a compound step: internally it creates a `movie` source,
419    /// a `lut` alpha-scaling filter, and an `overlay` compositing filter.
420    /// The image file is loaded once at graph construction time.
421    OverlayImage {
422        /// Absolute or relative path to the `.png` file.
423        path: String,
424        /// Horizontal position as an `FFmpeg` expression, e.g. `"10"` or `"W-w-10"`.
425        x: String,
426        /// Vertical position as an `FFmpeg` expression, e.g. `"10"` or `"H-h-10"`.
427        y: String,
428        /// Opacity 0.0 (fully transparent) to 1.0 (fully opaque).
429        opacity: f32,
430    },
431
432    /// Blend a `top` layer over the current stream (bottom) using the given mode.
433    ///
434    /// This is a compound step:
435    /// - **Normal** mode: `[top]colorchannelmixer=aa=<opacity>[top_faded];
436    ///   [bottom][top_faded]overlay=format=auto:shortest=1[out]`
437    ///   (the `colorchannelmixer` step is omitted when `opacity == 1.0`).
438    /// - All other modes return [`crate::FilterError::InvalidConfig`] from
439    ///   [`crate::FilterGraphBuilder::build`] until implemented.
440    ///
441    /// The `top` builder's steps are applied to the second input slot (`in1`).
442    /// `opacity` is clamped to `[0.0, 1.0]` by the builder method.
443    ///
444    /// `Box<FilterGraphBuilder>` is used to break the otherwise-recursive type:
445    /// `FilterStep` → `FilterGraphBuilder` → `Vec<FilterStep>`.
446    Blend {
447        /// Filter pipeline for the top (foreground) layer.
448        top: Box<FilterGraphBuilder>,
449        /// How the two layers are combined.
450        mode: BlendMode,
451        /// Opacity of the top layer in `[0.0, 1.0]`; 1.0 = fully opaque.
452        opacity: f32,
453    },
454
455    /// Remove pixels matching `color` using `FFmpeg`'s `chromakey` filter,
456    /// producing a `yuva420p` output with transparent areas where the key
457    /// color was detected.
458    ///
459    /// Use this for YCbCr-encoded sources (most video).  For RGB sources
460    /// use `colorkey` instead.
461    ChromaKey {
462        /// `FFmpeg` color string, e.g. `"green"`, `"0x00FF00"`, `"#00FF00"`.
463        color: String,
464        /// Match radius in `[0.0, 1.0]`; higher = more pixels removed.
465        similarity: f32,
466        /// Edge softness in `[0.0, 1.0]`; `0.0` = hard edge.
467        blend: f32,
468    },
469
470    /// Remove pixels matching `color` in RGB space using `FFmpeg`'s `colorkey`
471    /// filter, producing an `rgba` output with transparent areas where the key
472    /// color was detected.
473    ///
474    /// Use this for RGB-encoded sources.  For YCbCr-encoded video (most video)
475    /// use `chromakey` instead.
476    ColorKey {
477        /// `FFmpeg` color string, e.g. `"green"`, `"0x00FF00"`, `"#00FF00"`.
478        color: String,
479        /// Match radius in `[0.0, 1.0]`; higher = more pixels removed.
480        similarity: f32,
481        /// Edge softness in `[0.0, 1.0]`; `0.0` = hard edge.
482        blend: f32,
483    },
484
485    /// Reduce color spill from the key color on subject edges using `FFmpeg`'s
486    /// `hue` filter to desaturate the spill hue region.
487    ///
488    /// Applies `hue=h=0:s=(1.0 - strength)`.  `strength=0.0` leaves the image
489    /// unchanged; `strength=1.0` fully desaturates.
490    ///
491    /// `key_color` is stored for future use by a more targeted per-hue
492    /// implementation.
493    SpillSuppress {
494        /// `FFmpeg` color string identifying the spill color, e.g. `"green"`.
495        key_color: String,
496        /// Suppression intensity in `[0.0, 1.0]`; `0.0` = no effect, `1.0` = full suppression.
497        strength: f32,
498    },
499
500    /// Merge a grayscale `matte` as the alpha channel of the input video using
501    /// `FFmpeg`'s `alphamerge` filter.
502    ///
503    /// White (luma=255) in the matte produces fully opaque output; black (luma=0)
504    /// produces fully transparent output.
505    ///
506    /// This is a compound step: the `matte` builder's pipeline is applied to the
507    /// second input slot (`in1`) before the `alphamerge` filter is linked.
508    ///
509    /// `Box<FilterGraphBuilder>` breaks the otherwise-recursive type, following
510    /// the same pattern as [`FilterStep::Blend`].
511    AlphaMatte {
512        /// Pipeline for the grayscale matte stream (slot 1).
513        matte: Box<FilterGraphBuilder>,
514    },
515
516    /// Key out pixels by luminance value using `FFmpeg`'s `lumakey` filter.
517    ///
518    /// Pixels whose normalized luma is within `tolerance` of `threshold` are
519    /// made transparent.  When `invert` is `true`, a `geq` filter is appended
520    /// to negate the alpha channel, effectively swapping transparent and opaque
521    /// regions.
522    ///
523    /// - `threshold`: luma cutoff in `[0.0, 1.0]`; `0.0` = black, `1.0` = white.
524    /// - `tolerance`: match radius around the threshold in `[0.0, 1.0]`.
525    /// - `softness`: edge feather width in `[0.0, 1.0]`; `0.0` = hard edge.
526    /// - `invert`: when `false`, keys out bright regions (pixels matching the
527    ///   threshold); when `true`, the alpha is negated after keying, making
528    ///   the complementary region transparent instead.
529    ///
530    /// Output carries an alpha channel (`yuva420p`).
531    LumaKey {
532        /// Luma cutoff in `[0.0, 1.0]`.
533        threshold: f32,
534        /// Match radius around the threshold in `[0.0, 1.0]`.
535        tolerance: f32,
536        /// Edge feather width in `[0.0, 1.0]`; `0.0` = hard edge.
537        softness: f32,
538        /// When `true`, the alpha channel is negated after keying.
539        invert: bool,
540    },
541
542    /// Apply a rectangular alpha mask using `FFmpeg`'s `geq` filter.
543    ///
544    /// Pixels inside the rectangle defined by (`x`, `y`, `width`, `height`)
545    /// are made fully opaque (`alpha=255`); pixels outside are made fully
546    /// transparent (`alpha=0`).  When `invert` is `true` the roles are swapped:
547    /// inside becomes transparent and outside becomes opaque.
548    ///
549    /// - `x`, `y`: top-left corner of the rectangle (in pixels).
550    /// - `width`, `height`: rectangle dimensions (must be > 0).
551    /// - `invert`: when `false`, keeps the interior; when `true`, keeps the
552    ///   exterior.
553    ///
554    /// `width` and `height` are validated in [`build`](FilterGraphBuilder::build);
555    /// zero values return [`crate::FilterError::InvalidConfig`].
556    ///
557    /// The output carries an alpha channel (`rgba`).
558    RectMask {
559        /// Left edge of the rectangle (pixels from the left).
560        x: u32,
561        /// Top edge of the rectangle (pixels from the top).
562        y: u32,
563        /// Width of the rectangle in pixels (must be > 0).
564        width: u32,
565        /// Height of the rectangle in pixels (must be > 0).
566        height: u32,
567        /// When `true`, the mask is inverted: outside is opaque, inside is transparent.
568        invert: bool,
569    },
570
571    /// Feather (soften) the alpha channel edges using a Gaussian blur.
572    ///
573    /// Splits the stream into a color copy and an alpha copy, blurs the alpha
574    /// plane with `gblur=sigma=<radius>`, then re-merges:
575    ///
576    /// ```text
577    /// [in]split=2[color][with_alpha];
578    /// [with_alpha]alphaextract[alpha_only];
579    /// [alpha_only]gblur=sigma=<radius>[alpha_blurred];
580    /// [color][alpha_blurred]alphamerge[out]
581    /// ```
582    ///
583    /// `radius` is the blur kernel half-size in pixels and must be > 0.
584    /// Validated in [`build`](FilterGraphBuilder::build); `radius == 0` returns
585    /// [`crate::FilterError::InvalidConfig`].
586    ///
587    /// Typically chained after a keying or masking step
588    /// (e.g. [`FilterStep::ChromaKey`], [`FilterStep::RectMask`],
589    /// [`FilterStep::PolygonMatte`]).  Applying this step to a fully-opaque
590    /// video (no prior alpha) is a no-op because a uniform alpha of 255 blurs
591    /// to 255 everywhere.
592    FeatherMask {
593        /// Gaussian blur kernel half-size in pixels (must be > 0).
594        radius: u32,
595    },
596
597    /// Apply a polygon alpha mask using `FFmpeg`'s `geq` filter with a
598    /// crossing-number point-in-polygon test.
599    ///
600    /// Pixels inside the polygon are fully opaque (`alpha=255`); pixels outside
601    /// are fully transparent (`alpha=0`).  When `invert` is `true` the roles
602    /// are swapped.
603    ///
604    /// - `vertices`: polygon corners as `(x, y)` in `[0.0, 1.0]` (normalised
605    ///   to frame size).  Minimum 3, maximum 16.
606    /// - `invert`: when `false`, inside = opaque; when `true`, outside = opaque.
607    ///
608    /// Vertex count and coordinates are validated in
609    /// [`build`](FilterGraphBuilder::build); out-of-range values return
610    /// [`crate::FilterError::InvalidConfig`].
611    ///
612    /// The `geq` expression is constructed from the vertex list at graph
613    /// build time.  Degenerate polygons (zero area) produce a fully-transparent
614    /// mask.  The output carries an alpha channel (`rgba`).
615    PolygonMatte {
616        /// Polygon corners in normalised `[0.0, 1.0]` frame coordinates.
617        vertices: Vec<(f32, f32)>,
618        /// When `true`, the mask is inverted: outside is opaque, inside is transparent.
619        invert: bool,
620    },
621}
622
623/// Convert a color temperature in Kelvin to linear RGB multipliers using
624/// Tanner Helland's algorithm.
625///
626/// Returns `(r, g, b)` each in `[0.0, 1.0]`.
627fn kelvin_to_rgb(temp_k: u32) -> (f64, f64, f64) {
628    let t = (f64::from(temp_k) / 100.0).clamp(10.0, 400.0);
629    let r = if t <= 66.0 {
630        1.0
631    } else {
632        (329.698_727_446_4 * (t - 60.0).powf(-0.133_204_759_2) / 255.0).clamp(0.0, 1.0)
633    };
634    let g = if t <= 66.0 {
635        ((99.470_802_586_1 * t.ln() - 161.119_568_166_1) / 255.0).clamp(0.0, 1.0)
636    } else {
637        ((288.122_169_528_3 * (t - 60.0).powf(-0.075_514_849_2)) / 255.0).clamp(0.0, 1.0)
638    };
639    let b = if t >= 66.0 {
640        1.0
641    } else if t <= 19.0 {
642        0.0
643    } else {
644        ((138.517_731_223_1 * (t - 10.0).ln() - 305.044_792_730_7) / 255.0).clamp(0.0, 1.0)
645    };
646    (r, g, b)
647}
648
649impl FilterStep {
650    /// Returns the libavfilter filter name for this step.
651    pub(crate) fn filter_name(&self) -> &'static str {
652        match self {
653            Self::Trim { .. } => "trim",
654            Self::Scale { .. } => "scale",
655            Self::Crop { .. } => "crop",
656            Self::Overlay { .. } => "overlay",
657            Self::FadeIn { .. }
658            | Self::FadeOut { .. }
659            | Self::FadeInWhite { .. }
660            | Self::FadeOutWhite { .. } => "fade",
661            Self::AFadeIn { .. } | Self::AFadeOut { .. } => "afade",
662            Self::Rotate { .. } => "rotate",
663            Self::ToneMap(_) => "tonemap",
664            Self::Volume(_) => "volume",
665            Self::Amix(_) => "amix",
666            // ParametricEq is a compound step; "equalizer" is used only by
667            // validate_filter_steps as a best-effort existence check.  The
668            // actual nodes are built by `filter_inner::add_parametric_eq_chain`.
669            Self::ParametricEq { .. } => "equalizer",
670            Self::Lut3d { .. } => "lut3d",
671            Self::Eq { .. } => "eq",
672            Self::EqAnimated { .. } => "eq",
673            Self::ColorBalanceAnimated { .. } => "colorbalance",
674            Self::Curves { .. } => "curves",
675            Self::WhiteBalance { .. } => "colorchannelmixer",
676            Self::Hue { .. } => "hue",
677            Self::Gamma { .. } => "eq",
678            Self::ThreeWayCC { .. } => "curves",
679            Self::Vignette { .. } => "vignette",
680            Self::HFlip => "hflip",
681            Self::VFlip => "vflip",
682            Self::Reverse => "reverse",
683            Self::AReverse => "areverse",
684            Self::Pad { .. } => "pad",
685            // FitToAspect is implemented as scale + pad; "scale" is validated at
686            // build time.  The pad filter is inserted by filter_inner at graph
687            // construction time.
688            Self::FitToAspect { .. } => "scale",
689            Self::GBlur { .. } => "gblur",
690            Self::Unsharp { .. } => "unsharp",
691            Self::Hqdn3d { .. } => "hqdn3d",
692            Self::Nlmeans { .. } => "nlmeans",
693            Self::Yadif { .. } => "yadif",
694            Self::XFade { .. } => "xfade",
695            Self::DrawText { .. } | Self::Ticker { .. } => "drawtext",
696            // "setpts" is checked at build-time; the audio path uses "atempo"
697            // which is verified at graph-construction time in filter_inner.
698            Self::Speed { .. } => "setpts",
699            Self::FreezeFrame { .. } => "loop",
700            Self::LoudnessNormalize { .. } => "ebur128",
701            Self::NormalizePeak { .. } => "astats",
702            Self::ANoiseGate { .. } => "agate",
703            Self::ACompressor { .. } => "acompressor",
704            Self::StereoToMono => "pan",
705            Self::ChannelMap { .. } => "channelmap",
706            // AudioDelay dispatches to adelay (positive) or atrim (negative) at
707            // build time; "adelay" is returned here for validate_filter_steps only.
708            Self::AudioDelay { .. } => "adelay",
709            Self::ConcatVideo { .. } | Self::ConcatAudio { .. } => "concat",
710            // JoinWithDissolve is a compound step (trim+setpts → xfade ← setpts+trim);
711            // "xfade" is used by validate_filter_steps as the primary filter check.
712            Self::JoinWithDissolve { .. } => "xfade",
713            Self::SubtitlesSrt { .. } => "subtitles",
714            Self::SubtitlesAss { .. } => "ass",
715            // OverlayImage is a compound step (movie → lut → overlay); "overlay"
716            // is used only by validate_filter_steps as a best-effort existence
717            // check.  The actual graph construction is handled by
718            // `filter_inner::build::add_overlay_image_step`.
719            Self::OverlayImage { .. } => "overlay",
720            // Blend is a compound step; "overlay" is used as the primary filter
721            // for validate_filter_steps.  Unimplemented modes are caught by
722            // build() before validate_filter_steps is reached.
723            Self::Blend { .. } => "overlay",
724            Self::ChromaKey { .. } => "chromakey",
725            Self::ColorKey { .. } => "colorkey",
726            Self::SpillSuppress { .. } => "hue",
727            // AlphaMatte is a compound step (matte pipeline → alphamerge);
728            // "alphamerge" is used by validate_filter_steps as the primary check.
729            Self::AlphaMatte { .. } => "alphamerge",
730            // LumaKey is a compound step when invert=true (lumakey + geq);
731            // "lumakey" is used here for validate_filter_steps.
732            Self::LumaKey { .. } => "lumakey",
733            // RectMask uses geq to set alpha per-pixel based on rectangle bounds.
734            Self::RectMask { .. } => "geq",
735            // FeatherMask is a compound step (split → alphaextract → gblur → alphamerge);
736            // "alphaextract" is used by validate_filter_steps as the primary check.
737            Self::FeatherMask { .. } => "alphaextract",
738            // PolygonMatte uses geq with a crossing-number point-in-polygon expression.
739            Self::PolygonMatte { .. } => "geq",
740            Self::CropAnimated { .. } => "crop",
741            Self::GBlurAnimated { .. } => "gblur",
742        }
743    }
744
745    /// Returns the `args` string passed to `avfilter_graph_create_filter`.
746    pub(crate) fn args(&self) -> String {
747        match self {
748            Self::Trim { start, end } => format!("start={start}:end={end}"),
749            Self::Scale {
750                width,
751                height,
752                algorithm,
753            } => format!("w={width}:h={height}:flags={}", algorithm.as_flags_str()),
754            Self::Crop {
755                x,
756                y,
757                width,
758                height,
759            } => {
760                format!("x={x}:y={y}:w={width}:h={height}")
761            }
762            Self::Overlay { x, y } => format!("x={x}:y={y}"),
763            Self::FadeIn { start, duration } => {
764                format!("type=in:start_time={start}:duration={duration}")
765            }
766            Self::FadeOut { start, duration } => {
767                format!("type=out:start_time={start}:duration={duration}")
768            }
769            Self::FadeInWhite { start, duration } => {
770                format!("type=in:start_time={start}:duration={duration}:color=white")
771            }
772            Self::FadeOutWhite { start, duration } => {
773                format!("type=out:start_time={start}:duration={duration}:color=white")
774            }
775            Self::AFadeIn { start, duration } => {
776                format!("type=in:start_time={start}:duration={duration}")
777            }
778            Self::AFadeOut { start, duration } => {
779                format!("type=out:start_time={start}:duration={duration}")
780            }
781            Self::Rotate {
782                angle_degrees,
783                fill_color,
784            } => {
785                format!(
786                    "angle={}:fillcolor={fill_color}",
787                    angle_degrees.to_radians()
788                )
789            }
790            Self::ToneMap(algorithm) => format!("tonemap={}", algorithm.as_str()),
791            Self::Volume(db) => format!("volume={db}dB"),
792            Self::Amix(inputs) => format!("inputs={inputs}"),
793            // args() for ParametricEq is not used by the build loop (which is
794            // bypassed in favour of add_parametric_eq_chain); provided here for
795            // completeness using the first band's args.
796            Self::ParametricEq { bands } => bands.first().map(EqBand::args).unwrap_or_default(),
797            Self::Lut3d { path } => format!("file={path}:interp=trilinear"),
798            Self::Eq {
799                brightness,
800                contrast,
801                saturation,
802            } => format!("brightness={brightness}:contrast={contrast}:saturation={saturation}"),
803            Self::EqAnimated {
804                brightness,
805                contrast,
806                saturation,
807                gamma,
808            } => {
809                let b = brightness.value_at(Duration::ZERO);
810                let c = contrast.value_at(Duration::ZERO);
811                let s = saturation.value_at(Duration::ZERO);
812                let g = gamma.value_at(Duration::ZERO);
813                format!("brightness={b}:contrast={c}:saturation={s}:gamma={g}")
814            }
815            Self::ColorBalanceAnimated { lift, gamma, gain } => {
816                let (rl, gl, bl) = lift.value_at(Duration::ZERO);
817                let (rm, gm, bm) = gamma.value_at(Duration::ZERO);
818                let (rh, gh, bh) = gain.value_at(Duration::ZERO);
819                format!("rs={rl}:gs={gl}:bs={bl}:rm={rm}:gm={gm}:bm={bm}:rh={rh}:gh={gh}:bh={bh}")
820            }
821            Self::Curves { master, r, g, b } => {
822                let fmt = |pts: &[(f32, f32)]| -> String {
823                    pts.iter()
824                        .map(|(x, y)| format!("{x}/{y}"))
825                        .collect::<Vec<_>>()
826                        .join(" ")
827                };
828                [("master", master.as_slice()), ("r", r), ("g", g), ("b", b)]
829                    .iter()
830                    .filter(|(_, pts)| !pts.is_empty())
831                    .map(|(name, pts)| format!("{name}='{}'", fmt(pts)))
832                    .collect::<Vec<_>>()
833                    .join(":")
834            }
835            Self::WhiteBalance {
836                temperature_k,
837                tint,
838            } => {
839                let (r, g, b) = kelvin_to_rgb(*temperature_k);
840                let g_adj = (g + f64::from(*tint)).clamp(0.0, 2.0);
841                format!("rr={r}:gg={g_adj}:bb={b}")
842            }
843            Self::Hue { degrees } => format!("h={degrees}"),
844            Self::Gamma { r, g, b } => format!("gamma_r={r}:gamma_g={g}:gamma_b={b}"),
845            Self::Vignette { angle, x0, y0 } => {
846                let cx = if *x0 == 0.0 {
847                    "w/2".to_string()
848                } else {
849                    x0.to_string()
850                };
851                let cy = if *y0 == 0.0 {
852                    "h/2".to_string()
853                } else {
854                    y0.to_string()
855                };
856                format!("angle={angle}:x0={cx}:y0={cy}")
857            }
858            Self::ThreeWayCC { lift, gamma, gain } => {
859                // Convert lift/gamma/gain to a 3-point per-channel curves representation.
860                // The formula maps:
861                //   input 0.0 → (lift - 1.0) * gain  (black point)
862                //   input 0.5 → (0.5 * lift)^(1/gamma) * gain  (midtone)
863                //   input 1.0 → gain  (white point)
864                // All neutral (1.0) produces the identity curve 0/0 0.5/0.5 1/1.
865                let curve = |l: f32, gm: f32, gn: f32| -> String {
866                    let l = f64::from(l);
867                    let gm = f64::from(gm);
868                    let gn = f64::from(gn);
869                    let black = ((l - 1.0) * gn).clamp(0.0, 1.0);
870                    let mid = ((0.5 * l).powf(1.0 / gm) * gn).clamp(0.0, 1.0);
871                    let white = gn.clamp(0.0, 1.0);
872                    format!("0/{black} 0.5/{mid} 1/{white}")
873                };
874                format!(
875                    "r='{}':g='{}':b='{}'",
876                    curve(lift.r, gamma.r, gain.r),
877                    curve(lift.g, gamma.g, gain.g),
878                    curve(lift.b, gamma.b, gain.b),
879                )
880            }
881            Self::HFlip | Self::VFlip | Self::Reverse | Self::AReverse => String::new(),
882            Self::GBlur { sigma } => format!("sigma={sigma}"),
883            Self::Unsharp {
884                luma_strength,
885                chroma_strength,
886            } => format!(
887                "luma_msize_x=5:luma_msize_y=5:luma_amount={luma_strength}:\
888                 chroma_msize_x=5:chroma_msize_y=5:chroma_amount={chroma_strength}"
889            ),
890            Self::Hqdn3d {
891                luma_spatial,
892                chroma_spatial,
893                luma_tmp,
894                chroma_tmp,
895            } => format!("{luma_spatial}:{chroma_spatial}:{luma_tmp}:{chroma_tmp}"),
896            Self::Nlmeans { strength } => format!("s={strength}"),
897            Self::Yadif { mode } => format!("mode={}", *mode as i32),
898            Self::XFade {
899                transition,
900                duration,
901                offset,
902            } => {
903                let t = transition.as_str();
904                format!("transition={t}:duration={duration}:offset={offset}")
905            }
906            Self::DrawText { opts } => {
907                // Escape special characters recognised by the drawtext filter.
908                let escaped = opts
909                    .text
910                    .replace('\\', "\\\\")
911                    .replace(':', "\\:")
912                    .replace('\'', "\\'");
913                let mut parts = vec![
914                    format!("text='{escaped}'"),
915                    format!("x={}", opts.x),
916                    format!("y={}", opts.y),
917                    format!("fontsize={}", opts.font_size),
918                    format!("fontcolor={}@{:.2}", opts.font_color, opts.opacity),
919                ];
920                if let Some(ref ff) = opts.font_file {
921                    parts.push(format!("fontfile={ff}"));
922                }
923                if let Some(ref bc) = opts.box_color {
924                    parts.push("box=1".to_string());
925                    parts.push(format!("boxcolor={bc}"));
926                    parts.push(format!("boxborderw={}", opts.box_border_width));
927                }
928                parts.join(":")
929            }
930            Self::Ticker {
931                text,
932                y,
933                speed_px_per_sec,
934                font_size,
935                font_color,
936            } => {
937                // Use the same escaping as DrawText.
938                let escaped = text
939                    .replace('\\', "\\\\")
940                    .replace(':', "\\:")
941                    .replace('\'', "\\'");
942                // x = w - t * speed: at t=0 the text starts fully off the right
943                // edge (x = w) and scrolls left by `speed` pixels per second.
944                format!(
945                    "text='{escaped}':x=w-t*{speed_px_per_sec}:y={y}:\
946                     fontsize={font_size}:fontcolor={font_color}"
947                )
948            }
949            // Video path: divide PTS by factor to change playback speed.
950            // Audio path args are built by filter_inner (chained atempo).
951            Self::Speed { factor } => format!("PTS/{factor}"),
952            // args() is not used by the build loop for LoudnessNormalize (two-pass
953            // is handled entirely in filter_inner); provided here for completeness.
954            Self::LoudnessNormalize { .. } => "peak=true:metadata=1".to_string(),
955            // args() is not used by the build loop for NormalizePeak (two-pass
956            // is handled entirely in filter_inner); provided here for completeness.
957            Self::NormalizePeak { .. } => "metadata=1".to_string(),
958            Self::FreezeFrame { pts, duration } => {
959                // The `loop` filter needs a frame index and a loop count, not PTS or
960                // wall-clock duration.  We approximate both using 25 fps; accuracy
961                // depends on the source stream's actual frame rate.
962                #[allow(clippy::cast_possible_truncation)]
963                let start = (*pts * 25.0) as i64;
964                #[allow(clippy::cast_possible_truncation)]
965                let loop_count = (*duration * 25.0) as i64;
966                format!("loop={loop_count}:size=1:start={start}")
967            }
968            Self::SubtitlesSrt { path } | Self::SubtitlesAss { path } => {
969                format!("filename={path}")
970            }
971            // args() for OverlayImage returns the overlay positional args (x:y).
972            // These are not consumed by add_and_link_step (which is bypassed for
973            // this compound step); they exist here only for completeness.
974            Self::OverlayImage { x, y, .. } => format!("{x}:{y}"),
975            // args() for Blend is not consumed by add_and_link_step (which is
976            // bypassed in favour of add_blend_normal_step).  Provided for
977            // completeness using the Normal-mode overlay args.
978            Self::Blend { .. } => "format=auto:shortest=1".to_string(),
979            Self::ChromaKey {
980                color,
981                similarity,
982                blend,
983            } => format!("color={color}:similarity={similarity}:blend={blend}"),
984            Self::ColorKey {
985                color,
986                similarity,
987                blend,
988            } => format!("color={color}:similarity={similarity}:blend={blend}"),
989            Self::SpillSuppress { strength, .. } => format!("h=0:s={}", 1.0 - strength),
990            // args() is not consumed by add_and_link_step (which is bypassed for
991            // this compound step); provided here for completeness.
992            Self::AlphaMatte { .. } => String::new(),
993            Self::LumaKey {
994                threshold,
995                tolerance,
996                softness,
997                ..
998            } => format!("threshold={threshold}:tolerance={tolerance}:softness={softness}"),
999            // args() is not consumed by add_and_link_step (which is bypassed for
1000            // this compound step); provided here for completeness.
1001            Self::FeatherMask { .. } => String::new(),
1002            Self::RectMask {
1003                x,
1004                y,
1005                width,
1006                height,
1007                invert,
1008            } => {
1009                let xw = x + width - 1;
1010                let yh = y + height - 1;
1011                let (inside, outside) = if *invert { (0, 255) } else { (255, 0) };
1012                format!(
1013                    "r='r(X,Y)':g='g(X,Y)':b='b(X,Y)':\
1014                     a='if(between(X,{x},{xw})*between(Y,{y},{yh}),{inside},{outside})'"
1015                )
1016            }
1017            Self::PolygonMatte { vertices, invert } => {
1018                // Build a crossing-number point-in-polygon expression.
1019                // For each edge (ax,ay)→(bx,by), a horizontal ray from (X,Y) going
1020                // right crosses the edge when Y is in [min(ay,by), max(ay,by)) and
1021                // the intersection x > X.  Exact horizontal edges (dy==0) are skipped.
1022                let n = vertices.len();
1023                let mut edge_exprs = Vec::new();
1024                for i in 0..n {
1025                    let (ax, ay) = vertices[i];
1026                    let (bx, by) = vertices[(i + 1) % n];
1027                    let dy = by - ay;
1028                    if dy == 0.0 {
1029                        // Horizontal edge — never crosses a horizontal ray; skip.
1030                        continue;
1031                    }
1032                    let min_y = ay.min(by);
1033                    let max_y = ay.max(by);
1034                    let dx = bx - ax;
1035                    // x_intersect = ax*iw + (Y - ay*ih) * dx*iw / (dy*ih)
1036                    edge_exprs.push(format!(
1037                        "if(gte(Y,{min_y}*ih)*lt(Y,{max_y}*ih)*gt({ax}*iw+(Y-{ay}*ih)*{dx}*iw/({dy}*ih),X),1,0)"
1038                    ));
1039                }
1040                let sum = if edge_exprs.is_empty() {
1041                    "0".to_string()
1042                } else {
1043                    edge_exprs.join("+")
1044                };
1045                let (inside, outside) = if *invert { (0, 255) } else { (255, 0) };
1046                format!(
1047                    "r='r(X,Y)':g='g(X,Y)':b='b(X,Y)':\
1048                     a='if(gt(mod({sum},2),0),{inside},{outside})'"
1049                )
1050            }
1051            Self::FitToAspect { width, height, .. } => {
1052                // Scale to fit within the target dimensions, preserving the source
1053                // aspect ratio.  The accompanying pad filter (inserted by
1054                // filter_inner after this scale filter) centres the result on the
1055                // target canvas.
1056                format!("w={width}:h={height}:force_original_aspect_ratio=decrease")
1057            }
1058            Self::Pad {
1059                width,
1060                height,
1061                x,
1062                y,
1063                color,
1064            } => {
1065                let px = if *x < 0 {
1066                    "(ow-iw)/2".to_string()
1067                } else {
1068                    x.to_string()
1069                };
1070                let py = if *y < 0 {
1071                    "(oh-ih)/2".to_string()
1072                } else {
1073                    y.to_string()
1074                };
1075                format!("width={width}:height={height}:x={px}:y={py}:color={color}")
1076            }
1077            Self::ANoiseGate {
1078                threshold_db,
1079                attack_ms,
1080                release_ms,
1081            } => {
1082                // `agate` expects threshold as a linear amplitude ratio (0.0–1.0).
1083                let threshold_linear = 10f32.powf(threshold_db / 20.0);
1084                format!("threshold={threshold_linear:.6}:attack={attack_ms}:release={release_ms}")
1085            }
1086            Self::ACompressor {
1087                threshold_db,
1088                ratio,
1089                attack_ms,
1090                release_ms,
1091                makeup_db,
1092            } => {
1093                format!(
1094                    "threshold={threshold_db}dB:ratio={ratio}:attack={attack_ms}:\
1095                     release={release_ms}:makeup={makeup_db}dB"
1096                )
1097            }
1098            Self::StereoToMono => "mono|c0=0.5*c0+0.5*c1".to_string(),
1099            Self::ChannelMap { mapping } => format!("map={mapping}"),
1100            // args() is not used directly for AudioDelay — the audio build loop
1101            // dispatches to add_raw_filter_step with the correct filter name and
1102            // args based on the sign of ms.  These are provided for completeness.
1103            Self::AudioDelay { ms } => {
1104                if *ms >= 0.0 {
1105                    format!("delays={ms}:all=1")
1106                } else {
1107                    format!("start={}", -ms / 1000.0)
1108                }
1109            }
1110            Self::ConcatVideo { n } => format!("n={n}:v=1:a=0"),
1111            Self::ConcatAudio { n } => format!("n={n}:v=0:a=1"),
1112            // args() for JoinWithDissolve is not used by the build loop (which is
1113            // bypassed in favour of add_join_with_dissolve_step); provided here for
1114            // completeness using the xfade args.
1115            Self::JoinWithDissolve {
1116                clip_a_end,
1117                dissolve_dur,
1118                ..
1119            } => format!("transition=dissolve:duration={dissolve_dur}:offset={clip_a_end}"),
1120            Self::CropAnimated {
1121                x,
1122                y,
1123                width,
1124                height,
1125            } => {
1126                let x0 = x.value_at(Duration::ZERO);
1127                let y0 = y.value_at(Duration::ZERO);
1128                let w0 = width.value_at(Duration::ZERO);
1129                let h0 = height.value_at(Duration::ZERO);
1130                format!("x={x0}:y={y0}:w={w0}:h={h0}")
1131            }
1132            Self::GBlurAnimated { sigma } => {
1133                let s0 = sigma.value_at(Duration::ZERO);
1134                format!("sigma={s0}")
1135            }
1136        }
1137    }
1138}