Struct ffmpeg_sidecar::command::FfmpegCommand
source · pub struct FfmpegCommand { /* private fields */ }Expand description
A wrapper around std::process::Command with some convenient preset
argument sets and customization for ffmpeg specifically.
The rustdoc on each method includes relevant information from the FFmpeg
documentation: https://ffmpeg.org/ffmpeg.html. Refer there for the
exhaustive list of possible arguments.
Implementations§
source§impl FfmpegCommand
impl FfmpegCommand
alias for -hide_banner argument.
Suppress printing banner.
All FFmpeg tools will normally show a copyright notice, build options and library versions. This option can be used to suppress printing this information.
sourcepub fn format<S: AsRef<str>>(&mut self, format: S) -> &mut Self
pub fn format<S: AsRef<str>>(&mut self, format: S) -> &mut Self
Alias for -f argument, the format name.
Force input or output file format. The format is normally auto detected for input files and guessed from the file extension for output files, so this option is not needed in most cases.
Examples found in repository?
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
fn main() {
let mut ffmpeg = FfmpegCommand::new()
.realtime()
.format("lavfi")
.input("testsrc=size=1920x1080:rate=60")
.codec_video("rawvideo")
.format("avi")
.output("-")
.spawn()
.unwrap();
let mut ffplay = Command::new("ffplay")
.args("-i -".split(' '))
.stdin(Stdio::piped())
.spawn()
.unwrap();
let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
let mut ffplay_stdin = ffplay.stdin.take().unwrap();
// pipe from ffmpeg stdout to ffplay stdin
let buf = &mut [0u8; 4096];
loop {
let n = ffmpeg_stdout.read(buf).unwrap();
if n == 0 {
break;
}
ffplay_stdin.write_all(&buf[..n]).unwrap();
}
}sourcepub fn input<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
pub fn input<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
Alias for -i argument, the input file path or URL.
To take input from stdin, use the value - or pipe:0.
Examples found in repository?
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
fn main() {
let mut ffmpeg = FfmpegCommand::new()
.realtime()
.format("lavfi")
.input("testsrc=size=1920x1080:rate=60")
.codec_video("rawvideo")
.format("avi")
.output("-")
.spawn()
.unwrap();
let mut ffplay = Command::new("ffplay")
.args("-i -".split(' '))
.stdin(Stdio::piped())
.spawn()
.unwrap();
let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
let mut ffplay_stdin = ffplay.stdin.take().unwrap();
// pipe from ffmpeg stdout to ffplay stdin
let buf = &mut [0u8; 4096];
loop {
let n = ffmpeg_stdout.read(buf).unwrap();
if n == 0 {
break;
}
ffplay_stdin.write_all(&buf[..n]).unwrap();
}
}More examples
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
fn main() {
// Create an H265 source video as a starting point
let input_path = "output/h265.mp4";
if !Path::new(input_path).exists() {
create_h265_source(input_path);
}
// One instance decodes H265 to raw frames
let mut input = FfmpegCommand::new()
.input(input_path)
.rawvideo()
.spawn()
.unwrap();
// Frames can be transformed by Iterator `.map()`.
// This example is a no-op, with frames passed through unaltered.
let transformed_frames = input.iter().unwrap().filter_frames().map(|f| f);
// You could easily add some "middleware" processing here:
// - overlay or composite another RGB image (or even another Ffmpeg Iterator)
// - apply a filter like blur or convolution
// Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
// `filtergraph` API, but doing it in Rust gives you much finer-grained
// control, debuggability, and modularity -- you can pull in any Rust crate
// you need.
// A second instance encodes the updated frames back to H265
let mut output = FfmpegCommand::new()
.args([
"-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
]) // note: should be possible to infer these params from the source input stream
.input("-")
.args(["-c:v", "libx265"])
.args(["-y", "output/h265_overlay.mp4"])
.spawn()
.unwrap();
// Connect the two instances
let mut stdin = output.take_stdin().unwrap();
thread::spawn(move || {
// `for_each` blocks through the end of the iterator,
// so we run it in another thread.
transformed_frames.for_each(|f| {
stdin.write(&f.data).ok();
});
});
// On the main thread, run the output instance to completion
output.iter().unwrap().for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
}sourcepub fn output<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
pub fn output<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
Alias for the output file path or URL.
To send output to stdout, use the value - or pipe:1.
Since this is the last argument in the command and has no - flag
preceding it, it is equivalent to calling .arg() directly. However,
using this command helps label the purpose of the argument, and makes the
code more readable at a glance.
Examples found in repository?
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
fn main() {
let mut ffmpeg = FfmpegCommand::new()
.realtime()
.format("lavfi")
.input("testsrc=size=1920x1080:rate=60")
.codec_video("rawvideo")
.format("avi")
.output("-")
.spawn()
.unwrap();
let mut ffplay = Command::new("ffplay")
.args("-i -".split(' '))
.stdin(Stdio::piped())
.spawn()
.unwrap();
let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
let mut ffplay_stdin = ffplay.stdin.take().unwrap();
// pipe from ffmpeg stdout to ffplay stdin
let buf = &mut [0u8; 4096];
loop {
let n = ffmpeg_stdout.read(buf).unwrap();
if n == 0 {
break;
}
ffplay_stdin.write_all(&buf[..n]).unwrap();
}
}sourcepub fn overwrite(&mut self) -> &mut Self
pub fn overwrite(&mut self) -> &mut Self
Alias for -y argument: overwrite output files without asking.
sourcepub fn no_overwrite(&mut self) -> &mut Self
pub fn no_overwrite(&mut self) -> &mut Self
Alias for -n argument: do not overwrite output files, and exit
immediately if a specified output file already exists.
sourcepub fn codec_video<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
pub fn codec_video<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
Alias for -c:v argument.
Select an encoder (when used before an output file) or a decoder (when
used before an input file) for one or more streams. codec is the name of
a decoder/encoder or a special value copy (output only) to indicate that
the stream is not to be re-encoded.
Examples found in repository?
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
fn main() {
let mut ffmpeg = FfmpegCommand::new()
.realtime()
.format("lavfi")
.input("testsrc=size=1920x1080:rate=60")
.codec_video("rawvideo")
.format("avi")
.output("-")
.spawn()
.unwrap();
let mut ffplay = Command::new("ffplay")
.args("-i -".split(' '))
.stdin(Stdio::piped())
.spawn()
.unwrap();
let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
let mut ffplay_stdin = ffplay.stdin.take().unwrap();
// pipe from ffmpeg stdout to ffplay stdin
let buf = &mut [0u8; 4096];
loop {
let n = ffmpeg_stdout.read(buf).unwrap();
if n == 0 {
break;
}
ffplay_stdin.write_all(&buf[..n]).unwrap();
}
}sourcepub fn codec_audio<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
pub fn codec_audio<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
Alias for -c:a argument.
Select an encoder (when used before an output file) or a decoder (when
used before an input file) for one or more streams. codec is the name of
a decoder/encoder or a special value copy (output only) to indicate that
the stream is not to be re-encoded.
sourcepub fn duration<S: AsRef<str>>(&mut self, duration: S) -> &mut Self
pub fn duration<S: AsRef<str>>(&mut self, duration: S) -> &mut Self
Alias for -t argument.
When used as an input option (before -i), limit the duration of data
read from the input file.
When used as an output option (before an output url), stop writing the output after its duration reaches duration.
duration must be a time duration specification, see (ffmpeg-utils)the
Time duration section in the ffmpeg-utils(1)
manual.
-to and -t are mutually exclusive and -t has priority.
sourcepub fn to<S: AsRef<str>>(&mut self, position: S) -> &mut Self
pub fn to<S: AsRef<str>>(&mut self, position: S) -> &mut Self
Alias for -to argument.
Stop writing the output or reading the input at position. position
must be a time duration specification, see (ffmpeg-utils)the Time
duration section in the ffmpeg-utils(1)
manual.
-to and -t (aka duration()) are mutually exclusive and -t has
priority.
sourcepub fn limit_file_size(&mut self, size_in_bytes: u32) -> &mut Self
pub fn limit_file_size(&mut self, size_in_bytes: u32) -> &mut Self
Alias for -fs argument.
Set the file size limit, expressed in bytes. No further chunk of bytes is written after the limit is exceeded. The size of the output file is slightly more than the requested file size.
sourcepub fn seek<S: AsRef<str>>(&mut self, position: S) -> &mut Self
pub fn seek<S: AsRef<str>>(&mut self, position: S) -> &mut Self
Alias for -ss argument.
When used as an input option (before -i), seeks in this input file to
position. Note that in most formats it is not possible to seek exactly, so
ffmpeg will seek to the closest seek point before position. When
transcoding and -accurate_seek is enabled (the default), this extra
segment between the seek point and position will be decoded and
discarded. When doing stream copy or when -noaccurate_seek is used, it
will be preserved.
When used as an output option (before an output url), decodes but discards
input until the timestamps reach position.
position must be a time duration specification, see (ffmpeg-utils)the
Time duration section in the ffmpeg-utils(1)
manual.
sourcepub fn seek_eof<S: AsRef<str>>(&mut self, position: S) -> &mut Self
pub fn seek_eof<S: AsRef<str>>(&mut self, position: S) -> &mut Self
Alias for -sseof argument.
Like the -ss option but relative to the “end of file”. That is negative
values are earlier in the file, 0 is at EOF.
sourcepub fn filter<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
pub fn filter<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
Alias for -filter argument.
Create the filtergraph specified by filtergraph and use it to filter the
stream.
filtergraph is a description of the filtergraph to apply to the stream,
and must have a single input and a single output of the same type of the
stream. In the filtergraph, the input is associated to the label in, and
the output to the label out. See the ffmpeg-filters manual for more
information about the filtergraph syntax.
See the -filter_complex
option if
you want to create filtergraphs with multiple inputs and/or outputs.
sourcepub fn frames(&mut self, framecount: u32) -> &mut Self
pub fn frames(&mut self, framecount: u32) -> &mut Self
Alias for -frames:v argument.
Stop writing to the stream after framecount frames.
See also: -frames:a (audio), -frames:d (data).
sourcepub fn rate(&mut self, fps: f32) -> &mut Self
pub fn rate(&mut self, fps: f32) -> &mut Self
Alias for -r argument.
Set frame rate (Hz value, fraction or abbreviation).
As an input option, ignore any timestamps stored in the file and instead
generate timestamps assuming constant frame rate fps. This is not the
same as the -framerate option used for some input formats like image2 or
v4l2 (it used to be the same in older versions of FFmpeg). If in doubt use
-framerate instead of the input option -r.
sourcepub fn size(&mut self, width: u32, height: u32) -> &mut Self
pub fn size(&mut self, width: u32, height: u32) -> &mut Self
Alias for -s argument.
Set frame size.
As an input option, this is a shortcut for the video_size private
option, recognized by some demuxers for which the frame size is either not
stored in the file or is configurable – e.g. raw video or video grabbers.
As an output option, this inserts the scale video filter to the end of
the corresponding filtergraph. Please use the scale filter directly to
insert it at the beginning or some other place.
The format is 'wxh' (default - same as source).
sourcepub fn no_video(&mut self) -> &mut Self
pub fn no_video(&mut self) -> &mut Self
Alias for -vn argument.
As an input option, blocks all video streams of a file from being filtered
or being automatically selected or mapped for any output. See -discard
option to disable streams individually.
As an output option, disables video recording i.e. automatic selection or
mapping of any video stream. For full manual control see the -map
option.
sourcepub fn pix_fmt<S: AsRef<str>>(&mut self, format: S) -> &mut Self
pub fn pix_fmt<S: AsRef<str>>(&mut self, format: S) -> &mut Self
Alias for -pix_fmt argument.
Set pixel format. Use -pix_fmts to show all the supported pixel formats.
If the selected pixel format can not be selected, ffmpeg will print a
warning and select the best pixel format supported by the encoder. If
pix_fmt is prefixed by a +, ffmpeg will exit with an error if the
requested pixel format can not be selected, and automatic conversions
inside filtergraphs are disabled. If pix_fmt is a single +, ffmpeg
selects the same pixel format as the input (or graph output) and automatic
conversions are disabled.
sourcepub fn hwaccel<S: AsRef<str>>(&mut self, hwaccel: S) -> &mut Self
pub fn hwaccel<S: AsRef<str>>(&mut self, hwaccel: S) -> &mut Self
Alias for -hwaccel argument.
Use hardware acceleration to decode the matching stream(s). The allowed values of hwaccel are:
none: Do not use any hardware acceleration (the default).auto: Automatically select the hardware acceleration method.vdpau: Use VDPAU (Video Decode and Presentation API for Unix) hardware acceleration.dxva2: Use DXVA2 (DirectX Video Acceleration) hardware acceleration.d3d11va: Use D3D11VA (DirectX Video Acceleration) hardware acceleration.vaapi: Use VAAPI (Video Acceleration API) hardware acceleration.qsv: Use the Intel QuickSync Video acceleration for video transcoding.- Unlike most other values, this option does not enable accelerated decoding (that is used automatically whenever a qsv decoder is selected), but accelerated transcoding, without copying the frames into the system memory.
- For it to work, both the decoder and the encoder must support QSV acceleration and no filters must be used.
This option has no effect if the selected hwaccel is not available or not supported by the chosen decoder.
Note that most acceleration methods are intended for playback and will not
be faster than software decoding on modern CPUs. Additionally, ffmpeg
will usually need to copy the decoded frames from the GPU memory into the
system memory, resulting in further performance loss. This option is thus
mainly useful for testing.
sourcepub fn no_audio(&mut self) -> &mut Self
pub fn no_audio(&mut self) -> &mut Self
Alias for -an argument.
As an input option, blocks all audio streams of a file from being filtered
or being automatically selected or mapped for any output. See -discard
option to disable streams individually.
As an output option, disables audio recording i.e. automatic selection or
mapping of any audio stream. For full manual control see the -map
option.
sourcepub fn map<S: AsRef<str>>(&mut self, map_string: S) -> &mut Self
pub fn map<S: AsRef<str>>(&mut self, map_string: S) -> &mut Self
Alias for -map argument.
Create one or more streams in the output file. This option has two forms
for specifying the data source(s): the first selects one or more streams
from some input file (specified with -i), the second takes an output
from some complex filtergraph (specified with -filter_complex or
-filter_complex_script).
In the first form, an output stream is created for every stream from the input file with the index input_file_id. If stream_specifier is given, only those streams that match the specifier are used (see the Stream specifiers section for the stream_specifier syntax).
A - character before the stream identifier creates a “negative” mapping.
It disables matching streams from already created mappings.
A trailing ? after the stream index will allow the map to be optional:
if the map matches no streams the map will be ignored instead of failing.
Note the map will still fail if an invalid input file index is used; such
as if the map refers to a non-existent input.
An alternative [linklabel] form will map outputs from complex filter
graphs (see the -filter_complex option) to the output file. linklabel
must correspond to a defined output link label in the graph.
This option may be specified multiple times, each adding more streams to
the output file. Any given input stream may also be mapped any number of
times as a source for different output streams, e.g. in order to use
different encoding options and/or filters. The streams are created in the
output in the same order in which the -map options are given on the
commandline.
Using this option disables the default mappings for this output file.
sourcepub fn readrate(&mut self, speed: f32) -> &mut Self
pub fn readrate(&mut self, speed: f32) -> &mut Self
Alias for -readrate argument.
Limit input read speed.
Its value is a floating-point positive number which represents the maximum
duration of media, in seconds, that should be ingested in one second of
wallclock time. Default value is zero and represents no imposed limitation
on speed of ingestion. Value 1 represents real-time speed and is
equivalent to -re.
Mainly used to simulate a capture device or live input stream (e.g. when reading from a file). Should not be used with a low value when input is an actual capture device or live stream as it may cause packet loss.
It is useful for when flow speed of output packets is important, such as live streaming.
sourcepub fn realtime(&mut self) -> &mut Self
pub fn realtime(&mut self) -> &mut Self
Alias for -re.
Read input at native frame rate. This is equivalent to setting -readrate 1.
Examples found in repository?
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
fn main() {
let mut ffmpeg = FfmpegCommand::new()
.realtime()
.format("lavfi")
.input("testsrc=size=1920x1080:rate=60")
.codec_video("rawvideo")
.format("avi")
.output("-")
.spawn()
.unwrap();
let mut ffplay = Command::new("ffplay")
.args("-i -".split(' '))
.stdin(Stdio::piped())
.spawn()
.unwrap();
let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
let mut ffplay_stdin = ffplay.stdin.take().unwrap();
// pipe from ffmpeg stdout to ffplay stdin
let buf = &mut [0u8; 4096];
loop {
let n = ffmpeg_stdout.read(buf).unwrap();
if n == 0 {
break;
}
ffplay_stdin.write_all(&buf[..n]).unwrap();
}
}sourcepub fn fps_mode<S: AsRef<str>>(&mut self, parameter: S) -> &mut Self
pub fn fps_mode<S: AsRef<str>>(&mut self, parameter: S) -> &mut Self
Alias for -fps_mode argument.
Set video sync method / framerate mode. vsync is applied to all output video streams but can be overridden for a stream by setting fps_mode. vsync is deprecated and will be removed in the future.
For compatibility reasons some of the values for vsync can be specified as numbers (shown in parentheses in the following table).
passthrough(0): Each frame is passed with its timestamp from the demuxer to the muxer.cfr(1): Frames will be duplicated and dropped to achieve exactly the requested constant frame rate.vfr(2): Frames are passed through with their timestamp or dropped so as to prevent 2 frames from having the same timestamp.drop: As passthrough but destroys all timestamps, making the muxer generate fresh timestamps based on frame-rate.auto(-1): Chooses between cfr and vfr depending on muxer capabilities. This is the default method.
sourcepub fn bitstream_filter_video<S: AsRef<str>>(
&mut self,
bitstream_filters: S
) -> &mut Self
pub fn bitstream_filter_video<S: AsRef<str>>( &mut self, bitstream_filters: S ) -> &mut Self
Alias for -bsf:v argument.
Set bitstream filters for matching streams. bitstream_filters is a
comma-separated list of bitstream filters. Use the -bsfs option to get
the list of bitstream filters.
See also: -bsf:s (subtitles), -bsf:a (audio), -bsf:d (data)
sourcepub fn filter_complex<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
pub fn filter_complex<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
Alias for -filter_complex argument.
Define a complex filtergraph, i.e. one with arbitrary number of inputs
and/or outputs. For simple graphs – those with one input and one output of
the same type – see the -filter options. filtergraph is a description
of the filtergraph, as described in the “Filtergraph syntax” section of
the ffmpeg-filters manual.
Input link labels must refer to input streams using the
[file_index:stream_specifier] syntax (i.e. the same as -map uses). If
stream_specifier matches multiple streams, the first one will be used.
An unlabeled input will be connected to the first unused input stream of
the matching type.
Output link labels are referred to with -map. Unlabeled outputs are
added to the first output file.
Note that with this option it is possible to use only lavfi sources without normal input files.
sourcepub fn testsrc(&mut self) -> &mut Self
pub fn testsrc(&mut self) -> &mut Self
Generate a procedural test video. Equivalent to ffmpeg -f lavfi -i testsrc=duration=10.
Examples found in repository?
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
fn main() {
// similar to `std::process::Command`
let mut command = FfmpegCommand::new();
command
.testsrc() // generate a test pattern video
.rawvideo(); // pipe raw video output
// similar to `std::process::Child`
let mut child: FfmpegChild = command.spawn().unwrap();
// Iterator over all messages and output
let iter: FfmpegIterator = child.iter().unwrap();
iter.for_each(|event: FfmpegEvent| {
match event {
FfmpegEvent::OutputFrame(frame) => {
let _pixels = frame.data; // <- raw RGB pixels! 🎨
}
FfmpegEvent::Error(e) => eprintln!("Error: {}", e),
_ => {}
}
});
}sourcepub fn rawvideo(&mut self) -> &mut Self
pub fn rawvideo(&mut self) -> &mut Self
Preset for emitting raw decoded video frames on stdout. Equivalent to -f rawvideo -pix_fmt rgb24 -.
Examples found in repository?
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
fn main() {
// similar to `std::process::Command`
let mut command = FfmpegCommand::new();
command
.testsrc() // generate a test pattern video
.rawvideo(); // pipe raw video output
// similar to `std::process::Child`
let mut child: FfmpegChild = command.spawn().unwrap();
// Iterator over all messages and output
let iter: FfmpegIterator = child.iter().unwrap();
iter.for_each(|event: FfmpegEvent| {
match event {
FfmpegEvent::OutputFrame(frame) => {
let _pixels = frame.data; // <- raw RGB pixels! 🎨
}
FfmpegEvent::Error(e) => eprintln!("Error: {}", e),
_ => {}
}
});
}More examples
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
fn main() {
// Create an H265 source video as a starting point
let input_path = "output/h265.mp4";
if !Path::new(input_path).exists() {
create_h265_source(input_path);
}
// One instance decodes H265 to raw frames
let mut input = FfmpegCommand::new()
.input(input_path)
.rawvideo()
.spawn()
.unwrap();
// Frames can be transformed by Iterator `.map()`.
// This example is a no-op, with frames passed through unaltered.
let transformed_frames = input.iter().unwrap().filter_frames().map(|f| f);
// You could easily add some "middleware" processing here:
// - overlay or composite another RGB image (or even another Ffmpeg Iterator)
// - apply a filter like blur or convolution
// Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
// `filtergraph` API, but doing it in Rust gives you much finer-grained
// control, debuggability, and modularity -- you can pull in any Rust crate
// you need.
// A second instance encodes the updated frames back to H265
let mut output = FfmpegCommand::new()
.args([
"-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
]) // note: should be possible to infer these params from the source input stream
.input("-")
.args(["-c:v", "libx265"])
.args(["-y", "output/h265_overlay.mp4"])
.spawn()
.unwrap();
// Connect the two instances
let mut stdin = output.take_stdin().unwrap();
thread::spawn(move || {
// `for_each` blocks through the end of the iterator,
// so we run it in another thread.
transformed_frames.for_each(|f| {
stdin.write(&f.data).ok();
});
});
// On the main thread, run the output instance to completion
output.iter().unwrap().for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
}sourcepub fn pipe_stdout(&mut self) -> &mut Self
pub fn pipe_stdout(&mut self) -> &mut Self
Configure the ffmpeg command to produce output on stdout.
Synchronizes two changes:
- Pass
pipe:1to the ffmpeg command (“output on stdout”) - Set the
stdoutfield of the innerCommandtoStdio::piped()
sourcepub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self
pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self
Adds an argument to pass to the program.
Identical to arg in std::process::Command.
Examples found in repository?
69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
fn create_h265_source(path_str: &str) {
println!("Creating H265 source video: {}", path_str);
FfmpegCommand::new()
.args("-f lavfi -i testsrc=size=600x800:rate=30:duration=15 -c:v libx265".split(' '))
.arg(path_str)
.spawn()
.unwrap()
.iter()
.unwrap()
.for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
println!("Created H265 source video: {}", path_str);
}sourcepub fn args<I, S>(&mut self, args: I) -> &mut Selfwhere
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
pub fn args<I, S>(&mut self, args: I) -> &mut Selfwhere I: IntoIterator<Item = S>, S: AsRef<OsStr>,
Adds multiple arguments to pass to the program.
Identical to args in std::process::Command.
Examples found in repository?
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
fn main() {
let fps = 60;
let duration = 10;
let total_frames = fps * duration;
let arg_string = format!(
"-f lavfi -i testsrc=duration={}:size=1920x1080:rate={} -y output/test.mp4",
duration, fps
);
FfmpegCommand::new()
.args(arg_string.split(' '))
.spawn()
.unwrap()
.iter()
.unwrap()
.filter_progress()
.for_each(|progress| println!("{}%", (progress.frame * 100) / total_frames));
}More examples
13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
fn main() {
// Create an H265 source video as a starting point
let input_path = "output/h265.mp4";
if !Path::new(input_path).exists() {
create_h265_source(input_path);
}
// One instance decodes H265 to raw frames
let mut input = FfmpegCommand::new()
.input(input_path)
.rawvideo()
.spawn()
.unwrap();
// Frames can be transformed by Iterator `.map()`.
// This example is a no-op, with frames passed through unaltered.
let transformed_frames = input.iter().unwrap().filter_frames().map(|f| f);
// You could easily add some "middleware" processing here:
// - overlay or composite another RGB image (or even another Ffmpeg Iterator)
// - apply a filter like blur or convolution
// Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
// `filtergraph` API, but doing it in Rust gives you much finer-grained
// control, debuggability, and modularity -- you can pull in any Rust crate
// you need.
// A second instance encodes the updated frames back to H265
let mut output = FfmpegCommand::new()
.args([
"-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
]) // note: should be possible to infer these params from the source input stream
.input("-")
.args(["-c:v", "libx265"])
.args(["-y", "output/h265_overlay.mp4"])
.spawn()
.unwrap();
// Connect the two instances
let mut stdin = output.take_stdin().unwrap();
thread::spawn(move || {
// `for_each` blocks through the end of the iterator,
// so we run it in another thread.
transformed_frames.for_each(|f| {
stdin.write(&f.data).ok();
});
});
// On the main thread, run the output instance to completion
output.iter().unwrap().for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
}
/// Create a H265 source video from scratch
fn create_h265_source(path_str: &str) {
println!("Creating H265 source video: {}", path_str);
FfmpegCommand::new()
.args("-f lavfi -i testsrc=size=600x800:rate=30:duration=15 -c:v libx265".split(' '))
.arg(path_str)
.spawn()
.unwrap()
.iter()
.unwrap()
.for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
println!("Created H265 source video: {}", path_str);
}sourcepub fn get_args(&self) -> CommandArgs<'_>
pub fn get_args(&self) -> CommandArgs<'_>
Returns an iterator of the arguments that will be passed to the program.
Identical to get_args in std::process::Command.
sourcepub fn spawn(&mut self) -> Result<FfmpegChild>
pub fn spawn(&mut self) -> Result<FfmpegChild>
Spawn the ffmpeg command as a child process, wrapping it in a
FfmpegChild interface.
Identical to spawn in std::process::Command.
Examples found in repository?
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
fn main() {
let fps = 60;
let duration = 10;
let total_frames = fps * duration;
let arg_string = format!(
"-f lavfi -i testsrc=duration={}:size=1920x1080:rate={} -y output/test.mp4",
duration, fps
);
FfmpegCommand::new()
.args(arg_string.split(' '))
.spawn()
.unwrap()
.iter()
.unwrap()
.filter_progress()
.for_each(|progress| println!("{}%", (progress.frame * 100) / total_frames));
}More examples
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
fn main() {
// similar to `std::process::Command`
let mut command = FfmpegCommand::new();
command
.testsrc() // generate a test pattern video
.rawvideo(); // pipe raw video output
// similar to `std::process::Child`
let mut child: FfmpegChild = command.spawn().unwrap();
// Iterator over all messages and output
let iter: FfmpegIterator = child.iter().unwrap();
iter.for_each(|event: FfmpegEvent| {
match event {
FfmpegEvent::OutputFrame(frame) => {
let _pixels = frame.data; // <- raw RGB pixels! 🎨
}
FfmpegEvent::Error(e) => eprintln!("Error: {}", e),
_ => {}
}
});
}13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
fn main() {
let mut ffmpeg = FfmpegCommand::new()
.realtime()
.format("lavfi")
.input("testsrc=size=1920x1080:rate=60")
.codec_video("rawvideo")
.format("avi")
.output("-")
.spawn()
.unwrap();
let mut ffplay = Command::new("ffplay")
.args("-i -".split(' '))
.stdin(Stdio::piped())
.spawn()
.unwrap();
let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
let mut ffplay_stdin = ffplay.stdin.take().unwrap();
// pipe from ffmpeg stdout to ffplay stdin
let buf = &mut [0u8; 4096];
loop {
let n = ffmpeg_stdout.read(buf).unwrap();
if n == 0 {
break;
}
ffplay_stdin.write_all(&buf[..n]).unwrap();
}
}13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
fn main() {
// Create an H265 source video as a starting point
let input_path = "output/h265.mp4";
if !Path::new(input_path).exists() {
create_h265_source(input_path);
}
// One instance decodes H265 to raw frames
let mut input = FfmpegCommand::new()
.input(input_path)
.rawvideo()
.spawn()
.unwrap();
// Frames can be transformed by Iterator `.map()`.
// This example is a no-op, with frames passed through unaltered.
let transformed_frames = input.iter().unwrap().filter_frames().map(|f| f);
// You could easily add some "middleware" processing here:
// - overlay or composite another RGB image (or even another Ffmpeg Iterator)
// - apply a filter like blur or convolution
// Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
// `filtergraph` API, but doing it in Rust gives you much finer-grained
// control, debuggability, and modularity -- you can pull in any Rust crate
// you need.
// A second instance encodes the updated frames back to H265
let mut output = FfmpegCommand::new()
.args([
"-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
]) // note: should be possible to infer these params from the source input stream
.input("-")
.args(["-c:v", "libx265"])
.args(["-y", "output/h265_overlay.mp4"])
.spawn()
.unwrap();
// Connect the two instances
let mut stdin = output.take_stdin().unwrap();
thread::spawn(move || {
// `for_each` blocks through the end of the iterator,
// so we run it in another thread.
transformed_frames.for_each(|f| {
stdin.write(&f.data).ok();
});
});
// On the main thread, run the output instance to completion
output.iter().unwrap().for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
}
/// Create a H265 source video from scratch
fn create_h265_source(path_str: &str) {
println!("Creating H265 source video: {}", path_str);
FfmpegCommand::new()
.args("-f lavfi -i testsrc=size=600x800:rate=30:duration=15 -c:v libx265".split(' '))
.arg(path_str)
.spawn()
.unwrap()
.iter()
.unwrap()
.for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
println!("Created H265 source video: {}", path_str);
}sourcepub fn print_command(&mut self) -> &mut Self
pub fn print_command(&mut self) -> &mut Self
Print a command that can be copy-pasted to run in the terminal. Requires
&mut self so that it chains seamlessly with other methods in the
interface.
sourcepub fn new() -> Self
pub fn new() -> Self
Examples found in repository?
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
fn main() {
let fps = 60;
let duration = 10;
let total_frames = fps * duration;
let arg_string = format!(
"-f lavfi -i testsrc=duration={}:size=1920x1080:rate={} -y output/test.mp4",
duration, fps
);
FfmpegCommand::new()
.args(arg_string.split(' '))
.spawn()
.unwrap()
.iter()
.unwrap()
.filter_progress()
.for_each(|progress| println!("{}%", (progress.frame * 100) / total_frames));
}More examples
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
fn main() {
// similar to `std::process::Command`
let mut command = FfmpegCommand::new();
command
.testsrc() // generate a test pattern video
.rawvideo(); // pipe raw video output
// similar to `std::process::Child`
let mut child: FfmpegChild = command.spawn().unwrap();
// Iterator over all messages and output
let iter: FfmpegIterator = child.iter().unwrap();
iter.for_each(|event: FfmpegEvent| {
match event {
FfmpegEvent::OutputFrame(frame) => {
let _pixels = frame.data; // <- raw RGB pixels! 🎨
}
FfmpegEvent::Error(e) => eprintln!("Error: {}", e),
_ => {}
}
});
}13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
fn main() {
let mut ffmpeg = FfmpegCommand::new()
.realtime()
.format("lavfi")
.input("testsrc=size=1920x1080:rate=60")
.codec_video("rawvideo")
.format("avi")
.output("-")
.spawn()
.unwrap();
let mut ffplay = Command::new("ffplay")
.args("-i -".split(' '))
.stdin(Stdio::piped())
.spawn()
.unwrap();
let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
let mut ffplay_stdin = ffplay.stdin.take().unwrap();
// pipe from ffmpeg stdout to ffplay stdin
let buf = &mut [0u8; 4096];
loop {
let n = ffmpeg_stdout.read(buf).unwrap();
if n == 0 {
break;
}
ffplay_stdin.write_all(&buf[..n]).unwrap();
}
}13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
fn main() {
// Create an H265 source video as a starting point
let input_path = "output/h265.mp4";
if !Path::new(input_path).exists() {
create_h265_source(input_path);
}
// One instance decodes H265 to raw frames
let mut input = FfmpegCommand::new()
.input(input_path)
.rawvideo()
.spawn()
.unwrap();
// Frames can be transformed by Iterator `.map()`.
// This example is a no-op, with frames passed through unaltered.
let transformed_frames = input.iter().unwrap().filter_frames().map(|f| f);
// You could easily add some "middleware" processing here:
// - overlay or composite another RGB image (or even another Ffmpeg Iterator)
// - apply a filter like blur or convolution
// Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
// `filtergraph` API, but doing it in Rust gives you much finer-grained
// control, debuggability, and modularity -- you can pull in any Rust crate
// you need.
// A second instance encodes the updated frames back to H265
let mut output = FfmpegCommand::new()
.args([
"-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
]) // note: should be possible to infer these params from the source input stream
.input("-")
.args(["-c:v", "libx265"])
.args(["-y", "output/h265_overlay.mp4"])
.spawn()
.unwrap();
// Connect the two instances
let mut stdin = output.take_stdin().unwrap();
thread::spawn(move || {
// `for_each` blocks through the end of the iterator,
// so we run it in another thread.
transformed_frames.for_each(|f| {
stdin.write(&f.data).ok();
});
});
// On the main thread, run the output instance to completion
output.iter().unwrap().for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
}
/// Create a H265 source video from scratch
fn create_h265_source(path_str: &str) {
println!("Creating H265 source video: {}", path_str);
FfmpegCommand::new()
.args("-f lavfi -i testsrc=size=600x800:rate=30:duration=15 -c:v libx265".split(' '))
.arg(path_str)
.spawn()
.unwrap()
.iter()
.unwrap()
.for_each(|e| match e {
FfmpegEvent::LogError(e) => println!("Error: {}", e),
FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
_ => {}
});
println!("Created H265 source video: {}", path_str);
}pub fn new_with_path<S: AsRef<OsStr>>(path_to_ffmpeg_binary: S) -> Self
sourcepub fn as_inner_mut(&mut self) -> &mut Command
pub fn as_inner_mut(&mut self) -> &mut Command
Escape hatch to mutably access the inner Command.