pub enum FilterStep {
Show 65 variants
Trim {
start: f64,
end: f64,
},
Scale {
width: u32,
height: u32,
algorithm: ScaleAlgorithm,
},
Crop {
x: u32,
y: u32,
width: u32,
height: u32,
},
Overlay {
x: i32,
y: i32,
},
FadeIn {
start: f64,
duration: f64,
},
FadeOut {
start: f64,
duration: f64,
},
AFadeIn {
start: f64,
duration: f64,
},
AFadeOut {
start: f64,
duration: f64,
},
FadeInWhite {
start: f64,
duration: f64,
},
FadeOutWhite {
start: f64,
duration: f64,
},
Rotate {
angle_degrees: f64,
fill_color: String,
},
ToneMap(ToneMap),
Volume(f64),
Amix(usize),
ParametricEq {
bands: Vec<EqBand>,
},
Lut3d {
path: String,
},
Eq {
brightness: f32,
contrast: f32,
saturation: f32,
},
EqAnimated {
brightness: AnimatedValue<f64>,
contrast: AnimatedValue<f64>,
saturation: AnimatedValue<f64>,
gamma: AnimatedValue<f64>,
},
ColorBalanceAnimated {
lift: AnimatedValue<(f64, f64, f64)>,
gamma: AnimatedValue<(f64, f64, f64)>,
gain: AnimatedValue<(f64, f64, f64)>,
},
Curves {
master: Vec<(f32, f32)>,
r: Vec<(f32, f32)>,
g: Vec<(f32, f32)>,
b: Vec<(f32, f32)>,
},
WhiteBalance {
temperature_k: u32,
tint: f32,
},
Hue {
degrees: f32,
},
Gamma {
r: f32,
g: f32,
b: f32,
},
ThreeWayCC {
lift: Rgb,
gamma: Rgb,
gain: Rgb,
},
Vignette {
angle: f32,
x0: f32,
y0: f32,
},
HFlip,
VFlip,
Reverse,
AReverse,
Pad {
width: u32,
height: u32,
x: i32,
y: i32,
color: String,
},
FitToAspect {
width: u32,
height: u32,
color: String,
},
GBlur {
sigma: f32,
},
CropAnimated {
x: AnimatedValue<f64>,
y: AnimatedValue<f64>,
width: AnimatedValue<f64>,
height: AnimatedValue<f64>,
},
GBlurAnimated {
sigma: AnimatedValue<f64>,
},
Unsharp {
luma_strength: f32,
chroma_strength: f32,
},
Hqdn3d {
luma_spatial: f32,
chroma_spatial: f32,
luma_tmp: f32,
chroma_tmp: f32,
},
Nlmeans {
strength: f32,
},
Yadif {
mode: YadifMode,
},
XFade {
transition: XfadeTransition,
duration: f64,
offset: f64,
},
DrawText {
opts: DrawTextOptions,
},
SubtitlesSrt {
path: String,
},
SubtitlesAss {
path: String,
},
Speed {
factor: f64,
},
LoudnessNormalize {
target_lufs: f32,
true_peak_db: f32,
lra: f32,
},
NormalizePeak {
target_db: f32,
},
ANoiseGate {
threshold_db: f32,
attack_ms: f32,
release_ms: f32,
},
ACompressor {
threshold_db: f32,
ratio: f32,
attack_ms: f32,
release_ms: f32,
makeup_db: f32,
},
StereoToMono,
ChannelMap {
mapping: String,
},
AudioDelay {
ms: f64,
},
ConcatVideo {
n: u32,
},
ConcatAudio {
n: u32,
},
FreezeFrame {
pts: f64,
duration: f64,
},
Ticker {
text: String,
y: String,
speed_px_per_sec: f32,
font_size: u32,
font_color: String,
},
JoinWithDissolve {
clip_a_end: f64,
clip_b_start: f64,
dissolve_dur: f64,
},
OverlayImage {
path: String,
x: String,
y: String,
opacity: f32,
},
Blend {
top: Box<FilterGraphBuilder>,
mode: BlendMode,
opacity: f32,
},
ChromaKey {
color: String,
similarity: f32,
blend: f32,
},
ColorKey {
color: String,
similarity: f32,
blend: f32,
},
SpillSuppress {
key_color: String,
strength: f32,
},
AlphaMatte {
matte: Box<FilterGraphBuilder>,
},
LumaKey {
threshold: f32,
tolerance: f32,
softness: f32,
invert: bool,
},
RectMask {
x: u32,
y: u32,
width: u32,
height: u32,
invert: bool,
},
FeatherMask {
radius: u32,
},
PolygonMatte {
vertices: Vec<(f32, f32)>,
invert: bool,
},
}Expand description
A single step in a filter chain.
Used by crate::FilterGraphBuilder to build pipeline filter graphs, and by
crate::AudioTrack::effects to attach per-track effects in a multi-track mix.
Variants§
Trim
Trim: keep only frames in [start, end) seconds.
Scale
Scale to a new resolution using the given resampling algorithm.
Crop
Crop a rectangular region.
Overlay
Overlay a second stream at position (x, y).
FadeIn
Fade-in from black starting at start seconds, over duration seconds.
FadeOut
Fade-out to black starting at start seconds, over duration seconds.
AFadeIn
Audio fade-in from silence starting at start seconds, over duration seconds.
AFadeOut
Audio fade-out to silence starting at start seconds, over duration seconds.
FadeInWhite
Fade-in from white starting at start seconds, over duration seconds.
FadeOutWhite
Fade-out to white starting at start seconds, over duration seconds.
Rotate
Rotate clockwise by angle_degrees, filling exposed areas with fill_color.
ToneMap(ToneMap)
HDR-to-SDR tone mapping.
Volume(f64)
Adjust audio volume (in dB; negative = quieter).
Amix(usize)
Mix n audio inputs together.
ParametricEq
Multi-band parametric equalizer (low-shelf, high-shelf, or peak bands).
Each band maps to its own FFmpeg filter node chained in sequence.
The bands vec must not be empty.
Lut3d
Apply a 3D LUT from a .cube or .3dl file.
Eq
Brightness/contrast/saturation adjustment via FFmpeg eq filter.
EqAnimated
Brightness / contrast / saturation / gamma via FFmpeg eq filter (optionally animated).
Arguments are evaluated at Duration::ZERO for the initial graph build.
Per-frame updates are applied via avfilter_graph_send_command in #363.
Fields
brightness: AnimatedValue<f64>Brightness offset. Range: −1.0 – 1.0 (neutral: 0.0).
contrast: AnimatedValue<f64>Contrast multiplier. Range: 0.0 – 3.0 (neutral: 1.0).
saturation: AnimatedValue<f64>Saturation multiplier. Range: 0.0 – 3.0 (neutral: 1.0; 0.0 = grayscale).
gamma: AnimatedValue<f64>Global gamma correction. Range: 0.1 – 10.0 (neutral: 1.0).
ColorBalanceAnimated
Three-way color balance (shadows / midtones / highlights) via FFmpeg colorbalance filter
(optionally animated).
Each tuple is (R, G, B). Valid range per component: −1.0 – 1.0 (neutral: 0.0).
Arguments are evaluated at Duration::ZERO for the initial graph build.
Per-frame updates are applied via avfilter_graph_send_command in #363.
Fields
lift: AnimatedValue<(f64, f64, f64)>Shadows (lift) correction per channel. FFmpeg params: "rs", "gs", "bs".
gamma: AnimatedValue<(f64, f64, f64)>Midtones (gamma) correction per channel. FFmpeg params: "rm", "gm", "bm".
gain: AnimatedValue<(f64, f64, f64)>Highlights (gain) correction per channel. FFmpeg params: "rh", "gh", "bh".
Curves
Per-channel RGB color curves adjustment.
WhiteBalance
White balance correction via colorchannelmixer.
Hue
Hue rotation by an arbitrary angle.
Gamma
Per-channel gamma correction via FFmpeg eq filter.
ThreeWayCC
Three-way colour corrector (lift / gamma / gain) via FFmpeg curves filter.
Fields
Vignette
Vignette effect via FFmpeg vignette filter.
Fields
HFlip
Horizontal flip (mirror left-right).
VFlip
Vertical flip (mirror top-bottom).
Reverse
Reverse video playback (buffers entire clip in memory — use only on short clips).
AReverse
Reverse audio playback (buffers entire clip in memory — use only on short clips).
Pad
Pad to a target resolution with a fill color (letterbox / pillarbox).
Fields
x: i32Horizontal offset of the source frame within the canvas.
Negative values are replaced with (ow-iw)/2 (centred).
FitToAspect
Scale (preserving aspect ratio) then centre-pad to fill target dimensions (letterbox or pillarbox as required).
Implemented as a scale filter with force_original_aspect_ratio=decrease
followed by a pad filter that centres the scaled frame on the canvas.
Fields
GBlur
Gaussian blur with configurable radius.
sigma is the blur radius. Valid range: 0.0 – 10.0 (values near 0.0 are
nearly a no-op; higher values produce a stronger blur).
CropAnimated
Crop with optionally animated boundaries (pixels, f64 for sub-pixel precision).
Arguments are evaluated at Duration::ZERO for the initial graph build.
Per-frame updates are applied via avfilter_graph_send_command in #363.
Fields
x: AnimatedValue<f64>X offset of the top-left corner, in pixels.
y: AnimatedValue<f64>Y offset of the top-left corner, in pixels.
width: AnimatedValue<f64>Width of the cropped region. Must evaluate to > 0 at Duration::ZERO.
height: AnimatedValue<f64>Height of the cropped region. Must evaluate to > 0 at Duration::ZERO.
GBlurAnimated
Gaussian blur with an optionally animated sigma (blur radius).
Arguments are evaluated at Duration::ZERO for the initial graph build.
Per-frame updates are applied via avfilter_graph_send_command in #363.
Fields
sigma: AnimatedValue<f64>Blur radius (standard deviation). Must evaluate to ≥ 0.0 at Duration::ZERO.
Unsharp
Sharpen or blur via unsharp mask (luma + chroma strength).
Positive values sharpen; negative values blur. Valid range for each component: −1.5 – 1.5.
Fields
Hqdn3d
High Quality 3D noise reduction (hqdn3d).
Typical values: luma_spatial=4.0, chroma_spatial=3.0,
luma_tmp=6.0, chroma_tmp=4.5. All values must be ≥ 0.0.
Fields
Nlmeans
Non-local means noise reduction (nlmeans).
strength controls the denoising intensity; range 1.0–30.0.
Higher values remove more noise but are significantly more CPU-intensive.
NOTE: nlmeans is CPU-intensive; avoid for real-time pipelines.
Yadif
Deinterlace using the yadif filter.
XFade
Cross-dissolve transition between two video streams (xfade).
Requires two input slots: slot 0 is clip A, slot 1 is clip B.
duration is the overlap length in seconds; offset is the PTS
offset (in seconds) at which clip B begins.
Fields
transition: XfadeTransitionTransition style.
DrawText
Draw text onto the video using the drawtext filter.
Fields
opts: DrawTextOptionsFull set of drawtext parameters.
SubtitlesSrt
Burn-in SRT subtitles (hard subtitles) using the subtitles filter.
SubtitlesAss
Burn-in ASS/SSA styled subtitles using the ass filter.
Speed
Playback speed change using setpts (video) and chained atempo (audio).
factor > 1.0 = fast motion; factor < 1.0 = slow motion.
Valid range: 0.1–100.0.
Video path: setpts=PTS/{factor}.
Audio path: the atempo filter only accepts [0.5, 2.0] per instance;
filter_inner chains multiple instances to cover the full range.
LoudnessNormalize
EBU R128 two-pass loudness normalization.
Pass 1 measures integrated loudness with ebur128=peak=true:metadata=1.
Pass 2 applies a linear volume correction so the output reaches target_lufs.
All audio frames are buffered in memory between the two passes — use only
for clips that fit comfortably in RAM.
Fields
NormalizePeak
Peak-level two-pass normalization using astats.
Pass 1 measures the true peak with astats=metadata=1.
Pass 2 applies volume={gain}dB so the output peak reaches target_db.
All audio frames are buffered in memory between passes — use only
for clips that fit comfortably in RAM.
ANoiseGate
Noise gate via FFmpeg’s agate filter.
Audio below threshold_db is attenuated; audio above passes through.
The threshold is converted from dBFS to the linear scale expected by
agate’s threshold parameter (linear = 10^(dB/20)).
Fields
ACompressor
Dynamic range compressor via FFmpeg’s acompressor filter.
Reduces the dynamic range of the audio signal: peaks above
threshold_db are attenuated by ratio:1. makeup_db applies
additional gain after compression to restore perceived loudness.
Fields
StereoToMono
Downmix stereo to mono via FFmpeg’s pan filter.
Both channels are mixed with equal weight:
mono|c0=0.5*c0+0.5*c1. The output has a single channel.
ChannelMap
Remap audio channels using FFmpeg’s channelmap filter.
mapping is a |-separated list of output channel names taken
from input channels, e.g. "FR|FL" swaps left and right.
Must not be empty.
AudioDelay
A/V sync correction via audio delay or advance.
Positive ms: uses FFmpeg’s adelay filter to shift audio later.
Negative ms: uses FFmpeg’s atrim filter to trim the audio start,
effectively advancing audio by |ms| milliseconds.
Zero ms: uses adelay with zero delay (no-op).
ConcatVideo
Concatenate n sequential video input segments via FFmpeg’s concat filter.
Requires n video input slots (0 through n-1). n must be ≥ 2.
ConcatAudio
Concatenate n sequential audio input segments via FFmpeg’s concat filter.
Requires n audio input slots (0 through n-1). n must be ≥ 2.
FreezeFrame
Freeze a single frame for a configurable duration using FFmpeg’s loop filter.
The frame nearest to pts seconds is held for duration seconds, then
playback resumes. Frame numbers are approximated using a 25 fps assumption;
accuracy depends on the source stream’s actual frame rate.
Fields
Ticker
Scrolling text ticker (right-to-left) using the drawtext filter.
The text starts off-screen to the right and scrolls left at
speed_px_per_sec pixels per second using the expression
x = w - t * speed.
Fields
JoinWithDissolve
Join two video clips with a cross-dissolve transition.
Compound step — expands in filter_inner to:
in0 → trim(end=clip_a_end+dissolve_dur) → setpts → xfade[0]
in1 → trim(start=max(0, clip_b_start−dissolve_dur)) → setpts → xfade[1]Requires two video input slots: slot 0 = clip A, slot 1 = clip B.
clip_a_end and dissolve_dur must be > 0.0.
Fields
OverlayImage
Composite a PNG image (watermark / logo) over video with optional opacity.
This is a compound step: internally it creates a movie source,
a lut alpha-scaling filter, and an overlay compositing filter.
The image file is loaded once at graph construction time.
Fields
Blend
Blend a top layer over the current stream (bottom) using the given mode.
This is a compound step:
- Normal mode:
[top]colorchannelmixer=aa=<opacity>[top_faded]; [bottom][top_faded]overlay=format=auto:shortest=1[out](thecolorchannelmixerstep is omitted whenopacity == 1.0). - All other modes return
crate::FilterError::InvalidConfigfromcrate::FilterGraphBuilder::builduntil implemented.
The top builder’s steps are applied to the second input slot (in1).
opacity is clamped to [0.0, 1.0] by the builder method.
Box<FilterGraphBuilder> is used to break the otherwise-recursive type:
FilterStep → FilterGraphBuilder → Vec<FilterStep>.
Fields
top: Box<FilterGraphBuilder>Filter pipeline for the top (foreground) layer.
ChromaKey
Remove pixels matching color using FFmpeg’s chromakey filter,
producing a yuva420p output with transparent areas where the key
color was detected.
Use this for YCbCr-encoded sources (most video). For RGB sources
use colorkey instead.
Fields
ColorKey
Remove pixels matching color in RGB space using FFmpeg’s colorkey
filter, producing an rgba output with transparent areas where the key
color was detected.
Use this for RGB-encoded sources. For YCbCr-encoded video (most video)
use chromakey instead.
Fields
SpillSuppress
Reduce color spill from the key color on subject edges using FFmpeg’s
hue filter to desaturate the spill hue region.
Applies hue=h=0:s=(1.0 - strength). strength=0.0 leaves the image
unchanged; strength=1.0 fully desaturates.
key_color is stored for future use by a more targeted per-hue
implementation.
Fields
AlphaMatte
Merge a grayscale matte as the alpha channel of the input video using
FFmpeg’s alphamerge filter.
White (luma=255) in the matte produces fully opaque output; black (luma=0) produces fully transparent output.
This is a compound step: the matte builder’s pipeline is applied to the
second input slot (in1) before the alphamerge filter is linked.
Box<FilterGraphBuilder> breaks the otherwise-recursive type, following
the same pattern as FilterStep::Blend.
Fields
matte: Box<FilterGraphBuilder>Pipeline for the grayscale matte stream (slot 1).
LumaKey
Key out pixels by luminance value using FFmpeg’s lumakey filter.
Pixels whose normalized luma is within tolerance of threshold are
made transparent. When invert is true, a geq filter is appended
to negate the alpha channel, effectively swapping transparent and opaque
regions.
threshold: luma cutoff in[0.0, 1.0];0.0= black,1.0= white.tolerance: match radius around the threshold in[0.0, 1.0].softness: edge feather width in[0.0, 1.0];0.0= hard edge.invert: whenfalse, keys out bright regions (pixels matching the threshold); whentrue, the alpha is negated after keying, making the complementary region transparent instead.
Output carries an alpha channel (yuva420p).
Fields
RectMask
Apply a rectangular alpha mask using FFmpeg’s geq filter.
Pixels inside the rectangle defined by (x, y, width, height)
are made fully opaque (alpha=255); pixels outside are made fully
transparent (alpha=0). When invert is true the roles are swapped:
inside becomes transparent and outside becomes opaque.
x,y: top-left corner of the rectangle (in pixels).width,height: rectangle dimensions (must be > 0).invert: whenfalse, keeps the interior; whentrue, keeps the exterior.
width and height are validated in build;
zero values return crate::FilterError::InvalidConfig.
The output carries an alpha channel (rgba).
Fields
FeatherMask
Feather (soften) the alpha channel edges using a Gaussian blur.
Splits the stream into a color copy and an alpha copy, blurs the alpha
plane with gblur=sigma=<radius>, then re-merges:
[in]split=2[color][with_alpha];
[with_alpha]alphaextract[alpha_only];
[alpha_only]gblur=sigma=<radius>[alpha_blurred];
[color][alpha_blurred]alphamerge[out]radius is the blur kernel half-size in pixels and must be > 0.
Validated in build; radius == 0 returns
crate::FilterError::InvalidConfig.
Typically chained after a keying or masking step
(e.g. FilterStep::ChromaKey, FilterStep::RectMask,
FilterStep::PolygonMatte). Applying this step to a fully-opaque
video (no prior alpha) is a no-op because a uniform alpha of 255 blurs
to 255 everywhere.
PolygonMatte
Apply a polygon alpha mask using FFmpeg’s geq filter with a
crossing-number point-in-polygon test.
Pixels inside the polygon are fully opaque (alpha=255); pixels outside
are fully transparent (alpha=0). When invert is true the roles
are swapped.
vertices: polygon corners as(x, y)in[0.0, 1.0](normalised to frame size). Minimum 3, maximum 16.invert: whenfalse, inside = opaque; whentrue, outside = opaque.
Vertex count and coordinates are validated in
build; out-of-range values return
crate::FilterError::InvalidConfig.
The geq expression is constructed from the vertex list at graph
build time. Degenerate polygons (zero area) produce a fully-transparent
mask. The output carries an alpha channel (rgba).
Trait Implementations§
Source§impl Clone for FilterStep
impl Clone for FilterStep
Source§fn clone(&self) -> FilterStep
fn clone(&self) -> FilterStep
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read more