pub struct FfmpegCommand { /* private fields */ }
Expand description
A wrapper around std::process::Command
with some convenient preset
argument sets and customization for ffmpeg
specifically.
The rustdoc
on each method includes relevant information from the FFmpeg
documentation: https://ffmpeg.org/ffmpeg.html. Refer there for the
exhaustive list of possible arguments.
Implementations§
Source§impl FfmpegCommand
impl FfmpegCommand
alias for -hide_banner
argument.
Suppress printing banner.
All FFmpeg tools will normally show a copyright notice, build options and library versions. This option can be used to suppress printing this information.
Examples found in repository?
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
More examples
10pub fn main() -> Result<()> {
11 if cfg!(not(windows)) {
12 eprintln!("Note: Methods for capturing audio are platform-specific and this demo is intended for Windows.");
13 eprintln!("On Linux or Mac, you need to switch from the `dshow` format to a different one supported on your platform.");
14 eprintln!("Make sure to also include format-specific arguments such as `-audio_buffer_size`.");
15 eprintln!("Pull requests are welcome to make this demo cross-platform!");
16 }
17
18 // First step: find default audio input device
19 // Runs an `ffmpeg -list_devices` command and selects the first one found
20 // Sample log output: [dshow @ 000001c9babdb000] "Headset Microphone (Arctis 7 Chat)" (audio)
21
22 let audio_device = FfmpegCommand::new()
23 .hide_banner()
24 .args(&["-list_devices", "true"])
25 .format("dshow")
26 .input("dummy")
27 .spawn()?
28 .iter()?
29 .into_ffmpeg_stderr()
30 .find(|line| line.contains("(audio)"))
31 .map(|line| line.split('\"').nth(1).map(|s| s.to_string()))
32 .context("No audio device found")?
33 .context("Failed to parse audio device")?;
34
35 println!("Listening to audio device: {}", audio_device);
36
37 // Second step: Capture audio and analyze w/ `ebur128` audio filter
38 // Loudness metadata will be printed to the FFmpeg logs
39 // Docs: <https://ffmpeg.org/ffmpeg-filters.html#ebur128-1>
40
41 let iter = FfmpegCommand::new()
42 .format("dshow")
43 .args("-audio_buffer_size 50".split(' ')) // reduces latency to 50ms (dshow-specific)
44 .input(format!("audio={audio_device}"))
45 .args("-af ebur128=metadata=1,ametadata=print".split(' '))
46 .format("null")
47 .output("-")
48 .spawn()?
49 .iter()?;
50
51 // Note: even though the audio device name may have spaces, it should *not* be
52 // in quotes (""). Quotes are only needed on the command line to separate
53 // different arguments. Since Rust invokes the command directly without a
54 // shell interpreter, args are already divided up correctly. Any quotes
55 // would be included in the device name instead and the command would fail.
56 // <https://github.com/fluent-ffmpeg/node-fluent-ffmpeg/issues/648#issuecomment-866242144>
57
58 let mut first_volume_event = true;
59 for event in iter {
60 match event {
61 FfmpegEvent::Error(e) | FfmpegEvent::Log(LogLevel::Error | LogLevel::Fatal, e) => {
62 eprintln!("{e}");
63 }
64 FfmpegEvent::Log(LogLevel::Info, msg) if msg.contains("lavfi.r128.M=") => {
65 if let Some(volume) = msg.split("lavfi.r128.M=").last() {
66 // Sample log output: [Parsed_ametadata_1 @ 0000024c27effdc0] [info] lavfi.r128.M=-120.691
67 // M = "momentary loudness"; a sliding time window of 400ms
68 // Volume scale is roughly -70 to 0 LUFS. Anything below -70 is silence.
69 // See <https://en.wikipedia.org/wiki/EBU_R_128#Metering>
70 let volume_f32 = volume.parse::<f32>().context("Failed to parse volume")?;
71 let volume_normalized: usize = max(((volume_f32 / 5.0).round() as i32) + 14, 0) as usize;
72 let volume_percent = ((volume_normalized as f32 / 14.0) * 100.0).round();
73
74 // Clear previous line of output
75 if !first_volume_event {
76 print!("\x1b[1A\x1b[2K");
77 } else {
78 first_volume_event = false;
79 }
80
81 // Blinking red dot to indicate recording
82 let time = std::time::SystemTime::now()
83 .duration_since(std::time::UNIX_EPOCH)
84 .unwrap()
85 .as_secs();
86 let recording_indicator = if time % 2 == 0 { "🔴" } else { " " };
87
88 println!(
89 "{} {} {}%",
90 recording_indicator,
91 repeat('█').take(volume_normalized).collect::<String>(),
92 volume_percent
93 );
94 }
95 }
96 _ => {}
97 }
98 }
99
100 Ok(())
101}
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn format<S: AsRef<str>>(&mut self, format: S) -> &mut Self
pub fn format<S: AsRef<str>>(&mut self, format: S) -> &mut Self
Alias for -f
argument, the format name.
Force input or output file format. The format is normally auto detected for input files and guessed from the file extension for output files, so this option is not needed in most cases.
Examples found in repository?
13fn main() {
14 let mut ffmpeg = FfmpegCommand::new()
15 .realtime()
16 .format("lavfi")
17 .input("testsrc=size=1920x1080:rate=60")
18 .codec_video("rawvideo")
19 .format("avi")
20 .output("-")
21 .spawn()
22 .unwrap();
23
24 let mut ffplay = Command::new("ffplay")
25 .args("-i -".split(' '))
26 .stdin(Stdio::piped())
27 .spawn()
28 .unwrap();
29
30 let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
31 let mut ffplay_stdin = ffplay.stdin.take().unwrap();
32
33 // pipe from ffmpeg stdout to ffplay stdin
34 let buf = &mut [0u8; 4096];
35 loop {
36 let n = ffmpeg_stdout.read(buf).unwrap();
37 if n == 0 {
38 break;
39 }
40 ffplay_stdin.write_all(&buf[..n]).unwrap();
41 }
42}
More examples
9fn main() -> Result<()> {
10 let iter = FfmpegCommand::new()
11 .format("lavfi")
12 .arg("-re") // "realtime"
13 .input(format!(
14 "testsrc=size={OUTPUT_WIDTH}x{OUTPUT_HEIGHT}:rate={OUTPUT_FRAMERATE}"
15 ))
16 .rawvideo()
17 .spawn()?
18 .iter()?
19 .filter_frames();
20
21 for frame in iter {
22 // clear the previous frame
23 if frame.frame_num > 0 {
24 for _ in 0..frame.height {
25 print!("\x1B[{}A", 1);
26 }
27 }
28
29 // Print the pixels colored with ANSI codes
30 for y in 0..frame.height {
31 for x in 0..frame.width {
32 let idx = (y * frame.width + x) as usize * 3;
33 let r = frame.data[idx] as u32;
34 let g = frame.data[idx + 1] as u32;
35 let b = frame.data[idx + 2] as u32;
36 print!("\x1B[48;2;{r};{g};{b}m ");
37 }
38 println!("\x1B[0m");
39 }
40 }
41
42 Ok(())
43}
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
10pub fn main() -> Result<()> {
11 if cfg!(not(windows)) {
12 eprintln!("Note: Methods for capturing audio are platform-specific and this demo is intended for Windows.");
13 eprintln!("On Linux or Mac, you need to switch from the `dshow` format to a different one supported on your platform.");
14 eprintln!("Make sure to also include format-specific arguments such as `-audio_buffer_size`.");
15 eprintln!("Pull requests are welcome to make this demo cross-platform!");
16 }
17
18 // First step: find default audio input device
19 // Runs an `ffmpeg -list_devices` command and selects the first one found
20 // Sample log output: [dshow @ 000001c9babdb000] "Headset Microphone (Arctis 7 Chat)" (audio)
21
22 let audio_device = FfmpegCommand::new()
23 .hide_banner()
24 .args(&["-list_devices", "true"])
25 .format("dshow")
26 .input("dummy")
27 .spawn()?
28 .iter()?
29 .into_ffmpeg_stderr()
30 .find(|line| line.contains("(audio)"))
31 .map(|line| line.split('\"').nth(1).map(|s| s.to_string()))
32 .context("No audio device found")?
33 .context("Failed to parse audio device")?;
34
35 println!("Listening to audio device: {}", audio_device);
36
37 // Second step: Capture audio and analyze w/ `ebur128` audio filter
38 // Loudness metadata will be printed to the FFmpeg logs
39 // Docs: <https://ffmpeg.org/ffmpeg-filters.html#ebur128-1>
40
41 let iter = FfmpegCommand::new()
42 .format("dshow")
43 .args("-audio_buffer_size 50".split(' ')) // reduces latency to 50ms (dshow-specific)
44 .input(format!("audio={audio_device}"))
45 .args("-af ebur128=metadata=1,ametadata=print".split(' '))
46 .format("null")
47 .output("-")
48 .spawn()?
49 .iter()?;
50
51 // Note: even though the audio device name may have spaces, it should *not* be
52 // in quotes (""). Quotes are only needed on the command line to separate
53 // different arguments. Since Rust invokes the command directly without a
54 // shell interpreter, args are already divided up correctly. Any quotes
55 // would be included in the device name instead and the command would fail.
56 // <https://github.com/fluent-ffmpeg/node-fluent-ffmpeg/issues/648#issuecomment-866242144>
57
58 let mut first_volume_event = true;
59 for event in iter {
60 match event {
61 FfmpegEvent::Error(e) | FfmpegEvent::Log(LogLevel::Error | LogLevel::Fatal, e) => {
62 eprintln!("{e}");
63 }
64 FfmpegEvent::Log(LogLevel::Info, msg) if msg.contains("lavfi.r128.M=") => {
65 if let Some(volume) = msg.split("lavfi.r128.M=").last() {
66 // Sample log output: [Parsed_ametadata_1 @ 0000024c27effdc0] [info] lavfi.r128.M=-120.691
67 // M = "momentary loudness"; a sliding time window of 400ms
68 // Volume scale is roughly -70 to 0 LUFS. Anything below -70 is silence.
69 // See <https://en.wikipedia.org/wiki/EBU_R_128#Metering>
70 let volume_f32 = volume.parse::<f32>().context("Failed to parse volume")?;
71 let volume_normalized: usize = max(((volume_f32 / 5.0).round() as i32) + 14, 0) as usize;
72 let volume_percent = ((volume_normalized as f32 / 14.0) * 100.0).round();
73
74 // Clear previous line of output
75 if !first_volume_event {
76 print!("\x1b[1A\x1b[2K");
77 } else {
78 first_volume_event = false;
79 }
80
81 // Blinking red dot to indicate recording
82 let time = std::time::SystemTime::now()
83 .duration_since(std::time::UNIX_EPOCH)
84 .unwrap()
85 .as_secs();
86 let recording_indicator = if time % 2 == 0 { "🔴" } else { " " };
87
88 println!(
89 "{} {} {}%",
90 recording_indicator,
91 repeat('█').take(volume_normalized).collect::<String>(),
92 volume_percent
93 );
94 }
95 }
96 _ => {}
97 }
98 }
99
100 Ok(())
101}
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn input<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
pub fn input<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
Alias for -i
argument, the input file path or URL.
To take input from stdin, use the value -
or pipe:0
.
Examples found in repository?
13fn main() {
14 let mut ffmpeg = FfmpegCommand::new()
15 .realtime()
16 .format("lavfi")
17 .input("testsrc=size=1920x1080:rate=60")
18 .codec_video("rawvideo")
19 .format("avi")
20 .output("-")
21 .spawn()
22 .unwrap();
23
24 let mut ffplay = Command::new("ffplay")
25 .args("-i -".split(' '))
26 .stdin(Stdio::piped())
27 .spawn()
28 .unwrap();
29
30 let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
31 let mut ffplay_stdin = ffplay.stdin.take().unwrap();
32
33 // pipe from ffmpeg stdout to ffplay stdin
34 let buf = &mut [0u8; 4096];
35 loop {
36 let n = ffmpeg_stdout.read(buf).unwrap();
37 if n == 0 {
38 break;
39 }
40 ffplay_stdin.write_all(&buf[..n]).unwrap();
41 }
42}
More examples
9fn main() -> Result<()> {
10 let iter = FfmpegCommand::new()
11 .format("lavfi")
12 .arg("-re") // "realtime"
13 .input(format!(
14 "testsrc=size={OUTPUT_WIDTH}x{OUTPUT_HEIGHT}:rate={OUTPUT_FRAMERATE}"
15 ))
16 .rawvideo()
17 .spawn()?
18 .iter()?
19 .filter_frames();
20
21 for frame in iter {
22 // clear the previous frame
23 if frame.frame_num > 0 {
24 for _ in 0..frame.height {
25 print!("\x1B[{}A", 1);
26 }
27 }
28
29 // Print the pixels colored with ANSI codes
30 for y in 0..frame.height {
31 for x in 0..frame.width {
32 let idx = (y * frame.width + x) as usize * 3;
33 let r = frame.data[idx] as u32;
34 let g = frame.data[idx + 1] as u32;
35 let b = frame.data[idx + 2] as u32;
36 print!("\x1B[48;2;{r};{g};{b}m ");
37 }
38 println!("\x1B[0m");
39 }
40 }
41
42 Ok(())
43}
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
16fn main() {
17 // Create an H265 source video as a starting point
18 let input_path = "output/h265.mp4";
19 if !Path::new(input_path).exists() {
20 create_h265_source(input_path);
21 }
22
23 // One instance decodes H265 to raw frames
24 let mut input = FfmpegCommand::new()
25 .input(input_path)
26 .rawvideo()
27 .spawn()
28 .unwrap();
29
30 // Frames can be transformed by Iterator `.map()`.
31 // This example is a no-op, with frames passed through unaltered.
32 let transformed_frames = input.iter().unwrap().filter_frames();
33
34 // You could easily add some "middleware" processing here:
35 // - overlay or composite another RGB image (or even another Ffmpeg Iterator)
36 // - apply a filter like blur or convolution
37 // Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
38 // `filtergraph` API, but doing it in Rust gives you much finer-grained
39 // control, debuggability, and modularity -- you can pull in any Rust crate
40 // you need.
41
42 // A second instance encodes the updated frames back to H265
43 let mut output = FfmpegCommand::new()
44 .args([
45 "-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
46 ]) // note: should be possible to infer these params from the source input stream
47 .input("-")
48 .args(["-c:v", "libx265"])
49 .args(["-y", "output/h265_overlay.mp4"])
50 .spawn()
51 .unwrap();
52
53 // Connect the two instances
54 let mut stdin = output.take_stdin().unwrap();
55 thread::spawn(move || {
56 // `for_each` blocks through the end of the iterator,
57 // so we run it in another thread.
58 transformed_frames.for_each(|f| {
59 stdin.write_all(&f.data).ok();
60 });
61 });
62
63 // On the main thread, run the output instance to completion
64 output.iter().unwrap().for_each(|e| match e {
65 FfmpegEvent::Log(LogLevel::Error, e) => println!("Error: {}", e),
66 FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
67 _ => {}
68 });
69}
10pub fn main() -> Result<()> {
11 if cfg!(not(windows)) {
12 eprintln!("Note: Methods for capturing audio are platform-specific and this demo is intended for Windows.");
13 eprintln!("On Linux or Mac, you need to switch from the `dshow` format to a different one supported on your platform.");
14 eprintln!("Make sure to also include format-specific arguments such as `-audio_buffer_size`.");
15 eprintln!("Pull requests are welcome to make this demo cross-platform!");
16 }
17
18 // First step: find default audio input device
19 // Runs an `ffmpeg -list_devices` command and selects the first one found
20 // Sample log output: [dshow @ 000001c9babdb000] "Headset Microphone (Arctis 7 Chat)" (audio)
21
22 let audio_device = FfmpegCommand::new()
23 .hide_banner()
24 .args(&["-list_devices", "true"])
25 .format("dshow")
26 .input("dummy")
27 .spawn()?
28 .iter()?
29 .into_ffmpeg_stderr()
30 .find(|line| line.contains("(audio)"))
31 .map(|line| line.split('\"').nth(1).map(|s| s.to_string()))
32 .context("No audio device found")?
33 .context("Failed to parse audio device")?;
34
35 println!("Listening to audio device: {}", audio_device);
36
37 // Second step: Capture audio and analyze w/ `ebur128` audio filter
38 // Loudness metadata will be printed to the FFmpeg logs
39 // Docs: <https://ffmpeg.org/ffmpeg-filters.html#ebur128-1>
40
41 let iter = FfmpegCommand::new()
42 .format("dshow")
43 .args("-audio_buffer_size 50".split(' ')) // reduces latency to 50ms (dshow-specific)
44 .input(format!("audio={audio_device}"))
45 .args("-af ebur128=metadata=1,ametadata=print".split(' '))
46 .format("null")
47 .output("-")
48 .spawn()?
49 .iter()?;
50
51 // Note: even though the audio device name may have spaces, it should *not* be
52 // in quotes (""). Quotes are only needed on the command line to separate
53 // different arguments. Since Rust invokes the command directly without a
54 // shell interpreter, args are already divided up correctly. Any quotes
55 // would be included in the device name instead and the command would fail.
56 // <https://github.com/fluent-ffmpeg/node-fluent-ffmpeg/issues/648#issuecomment-866242144>
57
58 let mut first_volume_event = true;
59 for event in iter {
60 match event {
61 FfmpegEvent::Error(e) | FfmpegEvent::Log(LogLevel::Error | LogLevel::Fatal, e) => {
62 eprintln!("{e}");
63 }
64 FfmpegEvent::Log(LogLevel::Info, msg) if msg.contains("lavfi.r128.M=") => {
65 if let Some(volume) = msg.split("lavfi.r128.M=").last() {
66 // Sample log output: [Parsed_ametadata_1 @ 0000024c27effdc0] [info] lavfi.r128.M=-120.691
67 // M = "momentary loudness"; a sliding time window of 400ms
68 // Volume scale is roughly -70 to 0 LUFS. Anything below -70 is silence.
69 // See <https://en.wikipedia.org/wiki/EBU_R_128#Metering>
70 let volume_f32 = volume.parse::<f32>().context("Failed to parse volume")?;
71 let volume_normalized: usize = max(((volume_f32 / 5.0).round() as i32) + 14, 0) as usize;
72 let volume_percent = ((volume_normalized as f32 / 14.0) * 100.0).round();
73
74 // Clear previous line of output
75 if !first_volume_event {
76 print!("\x1b[1A\x1b[2K");
77 } else {
78 first_volume_event = false;
79 }
80
81 // Blinking red dot to indicate recording
82 let time = std::time::SystemTime::now()
83 .duration_since(std::time::UNIX_EPOCH)
84 .unwrap()
85 .as_secs();
86 let recording_indicator = if time % 2 == 0 { "🔴" } else { " " };
87
88 println!(
89 "{} {} {}%",
90 recording_indicator,
91 repeat('█').take(volume_normalized).collect::<String>(),
92 volume_percent
93 );
94 }
95 }
96 _ => {}
97 }
98 }
99
100 Ok(())
101}
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn output<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
pub fn output<S: AsRef<str>>(&mut self, path_or_url: S) -> &mut Self
Alias for the output file path or URL.
To send output to stdout, use the value -
or pipe:1
.
Since this is the last argument in the command and has no -
flag
preceding it, it is equivalent to calling .arg()
directly. However,
using this command helps label the purpose of the argument, and makes the
code more readable at a glance.
Examples found in repository?
4fn main() {
5 let mut ffmpeg_runner = FfmpegCommand::new()
6 .testsrc()
7 .args(["-metadata", "title=some cool title"])
8 .overwrite() // -y
9 .output("output/metadata.mp4")
10 .print_command()
11 .spawn()
12 .unwrap();
13
14 ffmpeg_runner
15 .iter()
16 .unwrap()
17 .for_each(|e| {
18 match e {
19 FfmpegEvent::Progress(FfmpegProgress { frame, .. }) =>
20 println!("Current frame: {frame}"),
21 FfmpegEvent::Log(_level, msg) =>
22 println!("[ffmpeg] {msg}"),
23 _ => {}
24 }
25 });
26}
More examples
13fn main() {
14 let mut ffmpeg = FfmpegCommand::new()
15 .realtime()
16 .format("lavfi")
17 .input("testsrc=size=1920x1080:rate=60")
18 .codec_video("rawvideo")
19 .format("avi")
20 .output("-")
21 .spawn()
22 .unwrap();
23
24 let mut ffplay = Command::new("ffplay")
25 .args("-i -".split(' '))
26 .stdin(Stdio::piped())
27 .spawn()
28 .unwrap();
29
30 let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
31 let mut ffplay_stdin = ffplay.stdin.take().unwrap();
32
33 // pipe from ffmpeg stdout to ffplay stdin
34 let buf = &mut [0u8; 4096];
35 loop {
36 let n = ffmpeg_stdout.read(buf).unwrap();
37 if n == 0 {
38 break;
39 }
40 ffplay_stdin.write_all(&buf[..n]).unwrap();
41 }
42}
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
10pub fn main() -> Result<()> {
11 if cfg!(not(windows)) {
12 eprintln!("Note: Methods for capturing audio are platform-specific and this demo is intended for Windows.");
13 eprintln!("On Linux or Mac, you need to switch from the `dshow` format to a different one supported on your platform.");
14 eprintln!("Make sure to also include format-specific arguments such as `-audio_buffer_size`.");
15 eprintln!("Pull requests are welcome to make this demo cross-platform!");
16 }
17
18 // First step: find default audio input device
19 // Runs an `ffmpeg -list_devices` command and selects the first one found
20 // Sample log output: [dshow @ 000001c9babdb000] "Headset Microphone (Arctis 7 Chat)" (audio)
21
22 let audio_device = FfmpegCommand::new()
23 .hide_banner()
24 .args(&["-list_devices", "true"])
25 .format("dshow")
26 .input("dummy")
27 .spawn()?
28 .iter()?
29 .into_ffmpeg_stderr()
30 .find(|line| line.contains("(audio)"))
31 .map(|line| line.split('\"').nth(1).map(|s| s.to_string()))
32 .context("No audio device found")?
33 .context("Failed to parse audio device")?;
34
35 println!("Listening to audio device: {}", audio_device);
36
37 // Second step: Capture audio and analyze w/ `ebur128` audio filter
38 // Loudness metadata will be printed to the FFmpeg logs
39 // Docs: <https://ffmpeg.org/ffmpeg-filters.html#ebur128-1>
40
41 let iter = FfmpegCommand::new()
42 .format("dshow")
43 .args("-audio_buffer_size 50".split(' ')) // reduces latency to 50ms (dshow-specific)
44 .input(format!("audio={audio_device}"))
45 .args("-af ebur128=metadata=1,ametadata=print".split(' '))
46 .format("null")
47 .output("-")
48 .spawn()?
49 .iter()?;
50
51 // Note: even though the audio device name may have spaces, it should *not* be
52 // in quotes (""). Quotes are only needed on the command line to separate
53 // different arguments. Since Rust invokes the command directly without a
54 // shell interpreter, args are already divided up correctly. Any quotes
55 // would be included in the device name instead and the command would fail.
56 // <https://github.com/fluent-ffmpeg/node-fluent-ffmpeg/issues/648#issuecomment-866242144>
57
58 let mut first_volume_event = true;
59 for event in iter {
60 match event {
61 FfmpegEvent::Error(e) | FfmpegEvent::Log(LogLevel::Error | LogLevel::Fatal, e) => {
62 eprintln!("{e}");
63 }
64 FfmpegEvent::Log(LogLevel::Info, msg) if msg.contains("lavfi.r128.M=") => {
65 if let Some(volume) = msg.split("lavfi.r128.M=").last() {
66 // Sample log output: [Parsed_ametadata_1 @ 0000024c27effdc0] [info] lavfi.r128.M=-120.691
67 // M = "momentary loudness"; a sliding time window of 400ms
68 // Volume scale is roughly -70 to 0 LUFS. Anything below -70 is silence.
69 // See <https://en.wikipedia.org/wiki/EBU_R_128#Metering>
70 let volume_f32 = volume.parse::<f32>().context("Failed to parse volume")?;
71 let volume_normalized: usize = max(((volume_f32 / 5.0).round() as i32) + 14, 0) as usize;
72 let volume_percent = ((volume_normalized as f32 / 14.0) * 100.0).round();
73
74 // Clear previous line of output
75 if !first_volume_event {
76 print!("\x1b[1A\x1b[2K");
77 } else {
78 first_volume_event = false;
79 }
80
81 // Blinking red dot to indicate recording
82 let time = std::time::SystemTime::now()
83 .duration_since(std::time::UNIX_EPOCH)
84 .unwrap()
85 .as_secs();
86 let recording_indicator = if time % 2 == 0 { "🔴" } else { " " };
87
88 println!(
89 "{} {} {}%",
90 recording_indicator,
91 repeat('█').take(volume_normalized).collect::<String>(),
92 volume_percent
93 );
94 }
95 }
96 _ => {}
97 }
98 }
99
100 Ok(())
101}
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn overwrite(&mut self) -> &mut Self
pub fn overwrite(&mut self) -> &mut Self
Alias for -y
argument: overwrite output files without asking.
Examples found in repository?
4fn main() {
5 let mut ffmpeg_runner = FfmpegCommand::new()
6 .testsrc()
7 .args(["-metadata", "title=some cool title"])
8 .overwrite() // -y
9 .output("output/metadata.mp4")
10 .print_command()
11 .spawn()
12 .unwrap();
13
14 ffmpeg_runner
15 .iter()
16 .unwrap()
17 .for_each(|e| {
18 match e {
19 FfmpegEvent::Progress(FfmpegProgress { frame, .. }) =>
20 println!("Current frame: {frame}"),
21 FfmpegEvent::Log(_level, msg) =>
22 println!("[ffmpeg] {msg}"),
23 _ => {}
24 }
25 });
26}
More examples
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn no_overwrite(&mut self) -> &mut Self
pub fn no_overwrite(&mut self) -> &mut Self
Alias for -n
argument: do not overwrite output files, and exit
immediately if a specified output file already exists.
Sourcepub fn codec_video<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
pub fn codec_video<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
Alias for -c:v
argument.
Select an encoder (when used before an output file) or a decoder (when
used before an input file) for one or more video streams. codec
is the
name of a decoder/encoder or a special value `copy`` (output only) to
indicate that the stream is not to be re-encoded.
Examples found in repository?
13fn main() {
14 let mut ffmpeg = FfmpegCommand::new()
15 .realtime()
16 .format("lavfi")
17 .input("testsrc=size=1920x1080:rate=60")
18 .codec_video("rawvideo")
19 .format("avi")
20 .output("-")
21 .spawn()
22 .unwrap();
23
24 let mut ffplay = Command::new("ffplay")
25 .args("-i -".split(' '))
26 .stdin(Stdio::piped())
27 .spawn()
28 .unwrap();
29
30 let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
31 let mut ffplay_stdin = ffplay.stdin.take().unwrap();
32
33 // pipe from ffmpeg stdout to ffplay stdin
34 let buf = &mut [0u8; 4096];
35 loop {
36 let n = ffmpeg_stdout.read(buf).unwrap();
37 if n == 0 {
38 break;
39 }
40 ffplay_stdin.write_all(&buf[..n]).unwrap();
41 }
42}
Sourcepub fn codec_audio<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
pub fn codec_audio<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
Alias for -c:a
argument.
Select an encoder (when used before an output file) or a decoder (when
used before an input file) for one or more audio streams. codec
is the
name of a decoder/encoder or a special value copy
(output only) to
indicate that the stream is not to be re-encoded.
Sourcepub fn codec_subtitle<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
pub fn codec_subtitle<S: AsRef<str>>(&mut self, codec: S) -> &mut Self
Alias for -c:s
argument.
Select an encoder (when used before an output file) or a decoder (when
used before an input file) for one or more subtitle streams. codec
is
the name of a decoder/encoder or a special value copy
(output only) to
indicate that the stream is not to be re-encoded.
Sourcepub fn duration<S: AsRef<str>>(&mut self, duration: S) -> &mut Self
pub fn duration<S: AsRef<str>>(&mut self, duration: S) -> &mut Self
Alias for -t
argument.
When used as an input option (before -i
), limit the duration of data
read from the input file.
When used as an output option (before an output url), stop writing the output after its duration reaches duration.
duration
must be a time duration specification, see (ffmpeg-utils)the
Time duration section in the ffmpeg-utils(1)
manual.
-to
and -t
are mutually exclusive and -t has priority.
Sourcepub fn to<S: AsRef<str>>(&mut self, position: S) -> &mut Self
pub fn to<S: AsRef<str>>(&mut self, position: S) -> &mut Self
Alias for -to
argument.
Stop writing the output or reading the input at position
. position
must be a time duration specification, see (ffmpeg-utils)the Time
duration section in the ffmpeg-utils(1)
manual.
-to
and -t
(aka duration()
) are mutually exclusive and -t
has
priority.
Sourcepub fn limit_file_size(&mut self, size_in_bytes: u32) -> &mut Self
pub fn limit_file_size(&mut self, size_in_bytes: u32) -> &mut Self
Alias for -fs
argument.
Set the file size limit, expressed in bytes. No further chunk of bytes is written after the limit is exceeded. The size of the output file is slightly more than the requested file size.
Sourcepub fn seek<S: AsRef<str>>(&mut self, position: S) -> &mut Self
pub fn seek<S: AsRef<str>>(&mut self, position: S) -> &mut Self
Alias for -ss
argument.
When used as an input option (before -i
), seeks in this input file to
position. Note that in most formats it is not possible to seek exactly, so
ffmpeg
will seek to the closest seek point before position
. When
transcoding and -accurate_seek
is enabled (the default), this extra
segment between the seek point and position
will be decoded and
discarded. When doing stream copy or when -noaccurate_seek
is used, it
will be preserved.
When used as an output option (before an output url), decodes but discards
input until the timestamps reach position
.
position
must be a time duration specification, see (ffmpeg-utils)the
Time duration section in the ffmpeg-utils(1)
manual.
Sourcepub fn seek_eof<S: AsRef<str>>(&mut self, position: S) -> &mut Self
pub fn seek_eof<S: AsRef<str>>(&mut self, position: S) -> &mut Self
Alias for -sseof
argument.
Like the -ss
option but relative to the “end of file”. That is negative
values are earlier in the file, 0 is at EOF.
Sourcepub fn filter<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
pub fn filter<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
Alias for -filter
argument.
Create the filtergraph specified by filtergraph
and use it to filter the
stream.
filtergraph
is a description of the filtergraph to apply to the stream,
and must have a single input and a single output of the same type of the
stream. In the filtergraph, the input is associated to the label in
, and
the output to the label out
. See the ffmpeg-filters manual for more
information about the filtergraph syntax.
See the -filter_complex
option if
you want to create filtergraphs with multiple inputs and/or outputs.
Sourcepub fn crf(&mut self, crf: u32) -> &mut Self
pub fn crf(&mut self, crf: u32) -> &mut Self
Alias for ‘-crf:v’ argument.
Set CRF (Constant Rate Factor) for quality-based VBR (Variable BitRate)
Use this rate control mode if you want to keep the best quality and care less about the file size. Lower values means better quality with bigger average bitrate (0 usually means lossless).
Possible values depend on codec:
- 0-51 for h264 (default is 23), see ffmpeg encoding guide for h264 for more details
- 0-51 for h265 (default is 28), see ffmpeg encoding guide for h265 for more details
- 0-63 for vp9 (no default, 31 is recommended for 1080p HD video), see ffmpeg encoding guide for vp9 for more details
- 0-63 for av1(libaom-av1) (no default), see ffmpeg encoding guide for libaom for more details
- 0-63 for av1(libsvtav1) (default is 30), see ffmpeg encoding guide for svt-av1 for mode details
Sourcepub fn frames(&mut self, framecount: u32) -> &mut Self
pub fn frames(&mut self, framecount: u32) -> &mut Self
Alias for -frames:v
argument.
Stop writing to the stream after framecount
frames.
See also: -frames:a
(audio), -frames:d
(data).
Sourcepub fn preset<S: AsRef<str>>(&mut self, preset: S) -> &mut Self
pub fn preset<S: AsRef<str>>(&mut self, preset: S) -> &mut Self
Alias for -preset:v
argument.
Set preset which is basically trade-off between encoding speed and compression ratio.
For h264 and h265 allowed values are:
- ultrafast
- superfast
- veryfast
- faster
- medium (default preset)
- slow
- slower
- veryslow
- placebo
For svt-av1 supported values 0-13 (higher number providing a higher encoding speed). Prior to version 0.9.0 valid values was 0-8.
For libaom supported values 0-11 (higher number providing a higher encoding speed)
VP9 has no presets
Sourcepub fn rate(&mut self, fps: f32) -> &mut Self
pub fn rate(&mut self, fps: f32) -> &mut Self
Alias for -r
argument.
Set frame rate (Hz value, fraction or abbreviation).
As an input option, ignore any timestamps stored in the file and instead
generate timestamps assuming constant frame rate fps
. This is not the
same as the -framerate
option used for some input formats like image2 or
v4l2 (it used to be the same in older versions of FFmpeg). If in doubt use
-framerate
instead of the input option -r
.
Sourcepub fn size(&mut self, width: u32, height: u32) -> &mut Self
pub fn size(&mut self, width: u32, height: u32) -> &mut Self
Alias for -s
argument.
Set frame size.
As an input option, this is a shortcut for the video_size
private
option, recognized by some demuxers for which the frame size is either not
stored in the file or is configurable – e.g. raw video or video grabbers.
As an output option, this inserts the scale
video filter to the end of
the corresponding filtergraph. Please use the scale
filter directly to
insert it at the beginning or some other place.
The format is 'wxh'
(default - same as source).
Sourcepub fn no_video(&mut self) -> &mut Self
pub fn no_video(&mut self) -> &mut Self
Alias for -vn
argument.
As an input option, blocks all video streams of a file from being filtered
or being automatically selected or mapped for any output. See -discard
option to disable streams individually.
As an output option, disables video recording i.e. automatic selection or
mapping of any video stream. For full manual control see the -map
option.
Sourcepub fn pix_fmt<S: AsRef<str>>(&mut self, format: S) -> &mut Self
pub fn pix_fmt<S: AsRef<str>>(&mut self, format: S) -> &mut Self
Alias for -pix_fmt
argument.
Set pixel format. Use -pix_fmts
to show all the supported pixel formats.
If the selected pixel format can not be selected, ffmpeg will print a
warning and select the best pixel format supported by the encoder. If
pix_fmt is prefixed by a +
, ffmpeg will exit with an error if the
requested pixel format can not be selected, and automatic conversions
inside filtergraphs are disabled. If pix_fmt is a single +
, ffmpeg
selects the same pixel format as the input (or graph output) and automatic
conversions are disabled.
Examples found in repository?
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
More examples
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn hwaccel<S: AsRef<str>>(&mut self, hwaccel: S) -> &mut Self
pub fn hwaccel<S: AsRef<str>>(&mut self, hwaccel: S) -> &mut Self
Alias for -hwaccel
argument.
Use hardware acceleration to decode the matching stream(s). The allowed values of hwaccel are:
none
: Do not use any hardware acceleration (the default).auto
: Automatically select the hardware acceleration method.vdpau
: Use VDPAU (Video Decode and Presentation API for Unix) hardware acceleration.dxva2
: Use DXVA2 (DirectX Video Acceleration) hardware acceleration.d3d11va
: Use D3D11VA (DirectX Video Acceleration) hardware acceleration.vaapi
: Use VAAPI (Video Acceleration API) hardware acceleration.qsv
: Use the Intel QuickSync Video acceleration for video transcoding.- Unlike most other values, this option does not enable accelerated decoding (that is used automatically whenever a qsv decoder is selected), but accelerated transcoding, without copying the frames into the system memory.
- For it to work, both the decoder and the encoder must support QSV acceleration and no filters must be used.
This option has no effect if the selected hwaccel is not available or not supported by the chosen decoder.
Note that most acceleration methods are intended for playback and will not
be faster than software decoding on modern CPUs. Additionally, ffmpeg
will usually need to copy the decoded frames from the GPU memory into the
system memory, resulting in further performance loss. This option is thus
mainly useful for testing.
Sourcepub fn no_audio(&mut self) -> &mut Self
pub fn no_audio(&mut self) -> &mut Self
Alias for -an
argument.
As an input option, blocks all audio streams of a file from being filtered
or being automatically selected or mapped for any output. See -discard
option to disable streams individually.
As an output option, disables audio recording i.e. automatic selection or
mapping of any audio stream. For full manual control see the -map
option.
Sourcepub fn map<S: AsRef<str>>(&mut self, map_string: S) -> &mut Self
pub fn map<S: AsRef<str>>(&mut self, map_string: S) -> &mut Self
Alias for -map
argument.
Create one or more streams in the output file. This option has two forms
for specifying the data source(s): the first selects one or more streams
from some input file (specified with -i
), the second takes an output
from some complex filtergraph (specified with -filter_complex
or
-filter_complex_script
).
In the first form, an output stream is created for every stream from the input file with the index input_file_id. If stream_specifier is given, only those streams that match the specifier are used (see the Stream specifiers section for the stream_specifier syntax).
A -
character before the stream identifier creates a “negative” mapping.
It disables matching streams from already created mappings.
A trailing ?
after the stream index will allow the map to be optional:
if the map matches no streams the map will be ignored instead of failing.
Note the map will still fail if an invalid input file index is used; such
as if the map refers to a non-existent input.
An alternative [linklabel]
form will map outputs from complex filter
graphs (see the -filter_complex
option) to the output file. linklabel
must correspond to a defined output link label in the graph.
This option may be specified multiple times, each adding more streams to
the output file. Any given input stream may also be mapped any number of
times as a source for different output streams, e.g. in order to use
different encoding options and/or filters. The streams are created in the
output in the same order in which the -map
options are given on the
commandline.
Using this option disables the default mappings for this output file.
Examples found in repository?
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
More examples
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn readrate(&mut self, speed: f32) -> &mut Self
pub fn readrate(&mut self, speed: f32) -> &mut Self
Alias for -readrate
argument.
Limit input read speed.
Its value is a floating-point positive number which represents the maximum
duration of media, in seconds, that should be ingested in one second of
wallclock time. Default value is zero and represents no imposed limitation
on speed of ingestion. Value 1
represents real-time speed and is
equivalent to -re
.
Mainly used to simulate a capture device or live input stream (e.g. when reading from a file). Should not be used with a low value when input is an actual capture device or live stream as it may cause packet loss.
It is useful for when flow speed of output packets is important, such as live streaming.
Sourcepub fn realtime(&mut self) -> &mut Self
pub fn realtime(&mut self) -> &mut Self
Alias for -re
.
Read input at native frame rate. This is equivalent to setting -readrate 1
.
Examples found in repository?
13fn main() {
14 let mut ffmpeg = FfmpegCommand::new()
15 .realtime()
16 .format("lavfi")
17 .input("testsrc=size=1920x1080:rate=60")
18 .codec_video("rawvideo")
19 .format("avi")
20 .output("-")
21 .spawn()
22 .unwrap();
23
24 let mut ffplay = Command::new("ffplay")
25 .args("-i -".split(' '))
26 .stdin(Stdio::piped())
27 .spawn()
28 .unwrap();
29
30 let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
31 let mut ffplay_stdin = ffplay.stdin.take().unwrap();
32
33 // pipe from ffmpeg stdout to ffplay stdin
34 let buf = &mut [0u8; 4096];
35 loop {
36 let n = ffmpeg_stdout.read(buf).unwrap();
37 if n == 0 {
38 break;
39 }
40 ffplay_stdin.write_all(&buf[..n]).unwrap();
41 }
42}
Sourcepub fn fps_mode<S: AsRef<str>>(&mut self, parameter: S) -> &mut Self
pub fn fps_mode<S: AsRef<str>>(&mut self, parameter: S) -> &mut Self
Alias for -fps_mode
argument.
Set video sync method / framerate mode. vsync is applied to all output video streams but can be overridden for a stream by setting fps_mode. vsync is deprecated and will be removed in the future.
For compatibility reasons some of the values for vsync can be specified as numbers (shown in parentheses in the following table).
passthrough
(0
): Each frame is passed with its timestamp from the demuxer to the muxer.cfr
(1
): Frames will be duplicated and dropped to achieve exactly the requested constant frame rate.vfr
(2
): Frames are passed through with their timestamp or dropped so as to prevent 2 frames from having the same timestamp.drop
: As passthrough but destroys all timestamps, making the muxer generate fresh timestamps based on frame-rate.auto
(-1
): Chooses between cfr and vfr depending on muxer capabilities. This is the default method.
Sourcepub fn bitstream_filter_video<S: AsRef<str>>(
&mut self,
bitstream_filters: S,
) -> &mut Self
pub fn bitstream_filter_video<S: AsRef<str>>( &mut self, bitstream_filters: S, ) -> &mut Self
Alias for -bsf:v
argument.
Set bitstream filters for matching streams. bitstream_filters
is a
comma-separated list of bitstream filters. Use the -bsfs
option to get
the list of bitstream filters.
See also: -bsf:s
(subtitles), -bsf:a
(audio), -bsf:d
(data)
Sourcepub fn filter_complex<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
pub fn filter_complex<S: AsRef<str>>(&mut self, filtergraph: S) -> &mut Self
Alias for -filter_complex
argument.
Define a complex filtergraph, i.e. one with arbitrary number of inputs
and/or outputs. For simple graphs – those with one input and one output of
the same type – see the -filter
options. filtergraph
is a description
of the filtergraph, as described in the “Filtergraph syntax” section of
the ffmpeg-filters manual.
Input link labels must refer to input streams using the
[file_index:stream_specifier]
syntax (i.e. the same as -map
uses). If
stream_specifier
matches multiple streams, the first one will be used.
An unlabeled input will be connected to the first unused input stream of
the matching type.
Output link labels are referred to with -map
. Unlabeled outputs are
added to the first output file.
Note that with this option it is possible to use only lavfi sources without normal input files.
Sourcepub fn testsrc(&mut self) -> &mut Self
pub fn testsrc(&mut self) -> &mut Self
Generate a procedural test video. Equivalent to ffmpeg -f lavfi -i testsrc=duration=10
. It also inherits defaults from the testsrc
filter
in FFmpeg: 320x240
size and 25
fps.
Examples found in repository?
4fn main() {
5 let mut ffmpeg_runner = FfmpegCommand::new()
6 .testsrc()
7 .args(["-metadata", "title=some cool title"])
8 .overwrite() // -y
9 .output("output/metadata.mp4")
10 .print_command()
11 .spawn()
12 .unwrap();
13
14 ffmpeg_runner
15 .iter()
16 .unwrap()
17 .for_each(|e| {
18 match e {
19 FfmpegEvent::Progress(FfmpegProgress { frame, .. }) =>
20 println!("Current frame: {frame}"),
21 FfmpegEvent::Log(_level, msg) =>
22 println!("[ffmpeg] {msg}"),
23 _ => {}
24 }
25 });
26}
More examples
8fn main() -> anyhow::Result<()> {
9 // Run an FFmpeg command that generates a test video
10 let iter = FfmpegCommand::new() // <- Builder API like `std::process::Command`
11 .testsrc() // <- Discoverable aliases for FFmpeg args
12 .rawvideo() // <- Convenient argument presets
13 .spawn()? // <- Ordinary `std::process::Child`
14 .iter()?; // <- Blocking iterator over logs and output
15
16 // Use a regular "for" loop to read decoded video data
17 for frame in iter.filter_frames() {
18 println!("frame: {}x{}", frame.width, frame.height);
19 let _pixels: Vec<u8> = frame.data; // <- raw RGB pixels! 🎨
20 }
21
22 Ok(())
23}
Sourcepub fn rawvideo(&mut self) -> &mut Self
pub fn rawvideo(&mut self) -> &mut Self
Preset for emitting raw decoded video frames on stdout. Equivalent to -f rawvideo -pix_fmt rgb24 -
.
Examples found in repository?
8fn main() -> anyhow::Result<()> {
9 // Run an FFmpeg command that generates a test video
10 let iter = FfmpegCommand::new() // <- Builder API like `std::process::Command`
11 .testsrc() // <- Discoverable aliases for FFmpeg args
12 .rawvideo() // <- Convenient argument presets
13 .spawn()? // <- Ordinary `std::process::Child`
14 .iter()?; // <- Blocking iterator over logs and output
15
16 // Use a regular "for" loop to read decoded video data
17 for frame in iter.filter_frames() {
18 println!("frame: {}x{}", frame.width, frame.height);
19 let _pixels: Vec<u8> = frame.data; // <- raw RGB pixels! 🎨
20 }
21
22 Ok(())
23}
More examples
9fn main() -> Result<()> {
10 let iter = FfmpegCommand::new()
11 .format("lavfi")
12 .arg("-re") // "realtime"
13 .input(format!(
14 "testsrc=size={OUTPUT_WIDTH}x{OUTPUT_HEIGHT}:rate={OUTPUT_FRAMERATE}"
15 ))
16 .rawvideo()
17 .spawn()?
18 .iter()?
19 .filter_frames();
20
21 for frame in iter {
22 // clear the previous frame
23 if frame.frame_num > 0 {
24 for _ in 0..frame.height {
25 print!("\x1B[{}A", 1);
26 }
27 }
28
29 // Print the pixels colored with ANSI codes
30 for y in 0..frame.height {
31 for x in 0..frame.width {
32 let idx = (y * frame.width + x) as usize * 3;
33 let r = frame.data[idx] as u32;
34 let g = frame.data[idx + 1] as u32;
35 let b = frame.data[idx + 2] as u32;
36 print!("\x1B[48;2;{r};{g};{b}m ");
37 }
38 println!("\x1B[0m");
39 }
40 }
41
42 Ok(())
43}
16fn main() {
17 // Create an H265 source video as a starting point
18 let input_path = "output/h265.mp4";
19 if !Path::new(input_path).exists() {
20 create_h265_source(input_path);
21 }
22
23 // One instance decodes H265 to raw frames
24 let mut input = FfmpegCommand::new()
25 .input(input_path)
26 .rawvideo()
27 .spawn()
28 .unwrap();
29
30 // Frames can be transformed by Iterator `.map()`.
31 // This example is a no-op, with frames passed through unaltered.
32 let transformed_frames = input.iter().unwrap().filter_frames();
33
34 // You could easily add some "middleware" processing here:
35 // - overlay or composite another RGB image (or even another Ffmpeg Iterator)
36 // - apply a filter like blur or convolution
37 // Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
38 // `filtergraph` API, but doing it in Rust gives you much finer-grained
39 // control, debuggability, and modularity -- you can pull in any Rust crate
40 // you need.
41
42 // A second instance encodes the updated frames back to H265
43 let mut output = FfmpegCommand::new()
44 .args([
45 "-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
46 ]) // note: should be possible to infer these params from the source input stream
47 .input("-")
48 .args(["-c:v", "libx265"])
49 .args(["-y", "output/h265_overlay.mp4"])
50 .spawn()
51 .unwrap();
52
53 // Connect the two instances
54 let mut stdin = output.take_stdin().unwrap();
55 thread::spawn(move || {
56 // `for_each` blocks through the end of the iterator,
57 // so we run it in another thread.
58 transformed_frames.for_each(|f| {
59 stdin.write_all(&f.data).ok();
60 });
61 });
62
63 // On the main thread, run the output instance to completion
64 output.iter().unwrap().for_each(|e| match e {
65 FfmpegEvent::Log(LogLevel::Error, e) => println!("Error: {}", e),
66 FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
67 _ => {}
68 });
69}
Sourcepub fn pipe_stdout(&mut self) -> &mut Self
pub fn pipe_stdout(&mut self) -> &mut Self
Configure the ffmpeg command to produce output on stdout.
Synchronizes two changes:
- Pass
pipe:1
to the ffmpeg command (“output on stdout”) - Set the
stdout
field of the innerCommand
toStdio::piped()
Sourcepub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self
pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self
Adds an argument to pass to the program.
Identical to arg
in std::process::Command
.
Examples found in repository?
72fn create_h265_source(path_str: &str) {
73 println!("Creating H265 source video: {}", path_str);
74 FfmpegCommand::new()
75 .args("-f lavfi -i testsrc=size=600x800:rate=30:duration=15 -c:v libx265".split(' '))
76 .arg(path_str)
77 .spawn()
78 .unwrap()
79 .iter()
80 .unwrap()
81 .for_each(|e| match e {
82 FfmpegEvent::Log(LogLevel::Error, e) => println!("Error: {}", e),
83 FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
84 _ => {}
85 });
86 println!("Created H265 source video: {}", path_str);
87}
More examples
9fn main() -> Result<()> {
10 let iter = FfmpegCommand::new()
11 .format("lavfi")
12 .arg("-re") // "realtime"
13 .input(format!(
14 "testsrc=size={OUTPUT_WIDTH}x{OUTPUT_HEIGHT}:rate={OUTPUT_FRAMERATE}"
15 ))
16 .rawvideo()
17 .spawn()?
18 .iter()?
19 .filter_frames();
20
21 for frame in iter {
22 // clear the previous frame
23 if frame.frame_num > 0 {
24 for _ in 0..frame.height {
25 print!("\x1B[{}A", 1);
26 }
27 }
28
29 // Print the pixels colored with ANSI codes
30 for y in 0..frame.height {
31 for x in 0..frame.width {
32 let idx = (y * frame.width + x) as usize * 3;
33 let r = frame.data[idx] as u32;
34 let g = frame.data[idx + 1] as u32;
35 let b = frame.data[idx + 2] as u32;
36 print!("\x1B[48;2;{r};{g};{b}m ");
37 }
38 println!("\x1B[0m");
39 }
40 }
41
42 Ok(())
43}
Sourcepub fn args<I, S>(&mut self, args: I) -> &mut Self
pub fn args<I, S>(&mut self, args: I) -> &mut Self
Adds multiple arguments to pass to the program.
Identical to args
in std::process::Command
.
Examples found in repository?
9fn main() {
10 let fps = 60;
11 let duration = 10;
12 let total_frames = fps * duration;
13 let arg_string = format!(
14 "-f lavfi -i testsrc=duration={}:size=1920x1080:rate={} -y output/test.mp4",
15 duration, fps
16 );
17 FfmpegCommand::new()
18 .args(arg_string.split(' '))
19 .spawn()
20 .unwrap()
21 .iter()
22 .unwrap()
23 .filter_progress()
24 .for_each(|progress| println!("{}%", (progress.frame * 100) / total_frames));
25}
More examples
4fn main() {
5 let mut ffmpeg_runner = FfmpegCommand::new()
6 .testsrc()
7 .args(["-metadata", "title=some cool title"])
8 .overwrite() // -y
9 .output("output/metadata.mp4")
10 .print_command()
11 .spawn()
12 .unwrap();
13
14 ffmpeg_runner
15 .iter()
16 .unwrap()
17 .for_each(|e| {
18 match e {
19 FfmpegEvent::Progress(FfmpegProgress { frame, .. }) =>
20 println!("Current frame: {frame}"),
21 FfmpegEvent::Log(_level, msg) =>
22 println!("[ffmpeg] {msg}"),
23 _ => {}
24 }
25 });
26}
16fn main() {
17 // Create an H265 source video as a starting point
18 let input_path = "output/h265.mp4";
19 if !Path::new(input_path).exists() {
20 create_h265_source(input_path);
21 }
22
23 // One instance decodes H265 to raw frames
24 let mut input = FfmpegCommand::new()
25 .input(input_path)
26 .rawvideo()
27 .spawn()
28 .unwrap();
29
30 // Frames can be transformed by Iterator `.map()`.
31 // This example is a no-op, with frames passed through unaltered.
32 let transformed_frames = input.iter().unwrap().filter_frames();
33
34 // You could easily add some "middleware" processing here:
35 // - overlay or composite another RGB image (or even another Ffmpeg Iterator)
36 // - apply a filter like blur or convolution
37 // Note: some of these operations are also possible with FFmpeg's (somewhat arcane)
38 // `filtergraph` API, but doing it in Rust gives you much finer-grained
39 // control, debuggability, and modularity -- you can pull in any Rust crate
40 // you need.
41
42 // A second instance encodes the updated frames back to H265
43 let mut output = FfmpegCommand::new()
44 .args([
45 "-f", "rawvideo", "-pix_fmt", "rgb24", "-s", "600x800", "-r", "30",
46 ]) // note: should be possible to infer these params from the source input stream
47 .input("-")
48 .args(["-c:v", "libx265"])
49 .args(["-y", "output/h265_overlay.mp4"])
50 .spawn()
51 .unwrap();
52
53 // Connect the two instances
54 let mut stdin = output.take_stdin().unwrap();
55 thread::spawn(move || {
56 // `for_each` blocks through the end of the iterator,
57 // so we run it in another thread.
58 transformed_frames.for_each(|f| {
59 stdin.write_all(&f.data).ok();
60 });
61 });
62
63 // On the main thread, run the output instance to completion
64 output.iter().unwrap().for_each(|e| match e {
65 FfmpegEvent::Log(LogLevel::Error, e) => println!("Error: {}", e),
66 FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
67 _ => {}
68 });
69}
70
71/// Create a H265 source video from scratch
72fn create_h265_source(path_str: &str) {
73 println!("Creating H265 source video: {}", path_str);
74 FfmpegCommand::new()
75 .args("-f lavfi -i testsrc=size=600x800:rate=30:duration=15 -c:v libx265".split(' '))
76 .arg(path_str)
77 .spawn()
78 .unwrap()
79 .iter()
80 .unwrap()
81 .for_each(|e| match e {
82 FfmpegEvent::Log(LogLevel::Error, e) => println!("Error: {}", e),
83 FfmpegEvent::Progress(p) => println!("Progress: {} / 00:00:15", p.time),
84 _ => {}
85 });
86 println!("Created H265 source video: {}", path_str);
87}
10pub fn main() -> Result<()> {
11 if cfg!(not(windows)) {
12 eprintln!("Note: Methods for capturing audio are platform-specific and this demo is intended for Windows.");
13 eprintln!("On Linux or Mac, you need to switch from the `dshow` format to a different one supported on your platform.");
14 eprintln!("Make sure to also include format-specific arguments such as `-audio_buffer_size`.");
15 eprintln!("Pull requests are welcome to make this demo cross-platform!");
16 }
17
18 // First step: find default audio input device
19 // Runs an `ffmpeg -list_devices` command and selects the first one found
20 // Sample log output: [dshow @ 000001c9babdb000] "Headset Microphone (Arctis 7 Chat)" (audio)
21
22 let audio_device = FfmpegCommand::new()
23 .hide_banner()
24 .args(&["-list_devices", "true"])
25 .format("dshow")
26 .input("dummy")
27 .spawn()?
28 .iter()?
29 .into_ffmpeg_stderr()
30 .find(|line| line.contains("(audio)"))
31 .map(|line| line.split('\"').nth(1).map(|s| s.to_string()))
32 .context("No audio device found")?
33 .context("Failed to parse audio device")?;
34
35 println!("Listening to audio device: {}", audio_device);
36
37 // Second step: Capture audio and analyze w/ `ebur128` audio filter
38 // Loudness metadata will be printed to the FFmpeg logs
39 // Docs: <https://ffmpeg.org/ffmpeg-filters.html#ebur128-1>
40
41 let iter = FfmpegCommand::new()
42 .format("dshow")
43 .args("-audio_buffer_size 50".split(' ')) // reduces latency to 50ms (dshow-specific)
44 .input(format!("audio={audio_device}"))
45 .args("-af ebur128=metadata=1,ametadata=print".split(' '))
46 .format("null")
47 .output("-")
48 .spawn()?
49 .iter()?;
50
51 // Note: even though the audio device name may have spaces, it should *not* be
52 // in quotes (""). Quotes are only needed on the command line to separate
53 // different arguments. Since Rust invokes the command directly without a
54 // shell interpreter, args are already divided up correctly. Any quotes
55 // would be included in the device name instead and the command would fail.
56 // <https://github.com/fluent-ffmpeg/node-fluent-ffmpeg/issues/648#issuecomment-866242144>
57
58 let mut first_volume_event = true;
59 for event in iter {
60 match event {
61 FfmpegEvent::Error(e) | FfmpegEvent::Log(LogLevel::Error | LogLevel::Fatal, e) => {
62 eprintln!("{e}");
63 }
64 FfmpegEvent::Log(LogLevel::Info, msg) if msg.contains("lavfi.r128.M=") => {
65 if let Some(volume) = msg.split("lavfi.r128.M=").last() {
66 // Sample log output: [Parsed_ametadata_1 @ 0000024c27effdc0] [info] lavfi.r128.M=-120.691
67 // M = "momentary loudness"; a sliding time window of 400ms
68 // Volume scale is roughly -70 to 0 LUFS. Anything below -70 is silence.
69 // See <https://en.wikipedia.org/wiki/EBU_R_128#Metering>
70 let volume_f32 = volume.parse::<f32>().context("Failed to parse volume")?;
71 let volume_normalized: usize = max(((volume_f32 / 5.0).round() as i32) + 14, 0) as usize;
72 let volume_percent = ((volume_normalized as f32 / 14.0) * 100.0).round();
73
74 // Clear previous line of output
75 if !first_volume_event {
76 print!("\x1b[1A\x1b[2K");
77 } else {
78 first_volume_event = false;
79 }
80
81 // Blinking red dot to indicate recording
82 let time = std::time::SystemTime::now()
83 .duration_since(std::time::UNIX_EPOCH)
84 .unwrap()
85 .as_secs();
86 let recording_indicator = if time % 2 == 0 { "🔴" } else { " " };
87
88 println!(
89 "{} {} {}%",
90 recording_indicator,
91 repeat('█').take(volume_normalized).collect::<String>(),
92 volume_percent
93 );
94 }
95 }
96 _ => {}
97 }
98 }
99
100 Ok(())
101}
Sourcepub fn get_args(&self) -> CommandArgs<'_>
pub fn get_args(&self) -> CommandArgs<'_>
Returns an iterator of the arguments that will be passed to the program.
Identical to get_args
in std::process::Command
.
Sourcepub fn spawn(&mut self) -> Result<FfmpegChild>
pub fn spawn(&mut self) -> Result<FfmpegChild>
Spawn the ffmpeg command as a child process, wrapping it in a
FfmpegChild
interface.
Please note that if the result is not used with wait()
the process is not cleaned up correctly resulting in a zombie process
until your main thread exits.
Identical to spawn
in std::process::Command
.
Examples found in repository?
9fn main() {
10 let fps = 60;
11 let duration = 10;
12 let total_frames = fps * duration;
13 let arg_string = format!(
14 "-f lavfi -i testsrc=duration={}:size=1920x1080:rate={} -y output/test.mp4",
15 duration, fps
16 );
17 FfmpegCommand::new()
18 .args(arg_string.split(' '))
19 .spawn()
20 .unwrap()
21 .iter()
22 .unwrap()
23 .filter_progress()
24 .for_each(|progress| println!("{}%", (progress.frame * 100) / total_frames));
25}
More examples
4fn main() {
5 let mut ffmpeg_runner = FfmpegCommand::new()
6 .testsrc()
7 .args(["-metadata", "title=some cool title"])
8 .overwrite() // -y
9 .output("output/metadata.mp4")
10 .print_command()
11 .spawn()
12 .unwrap();
13
14 ffmpeg_runner
15 .iter()
16 .unwrap()
17 .for_each(|e| {
18 match e {
19 FfmpegEvent::Progress(FfmpegProgress { frame, .. }) =>
20 println!("Current frame: {frame}"),
21 FfmpegEvent::Log(_level, msg) =>
22 println!("[ffmpeg] {msg}"),
23 _ => {}
24 }
25 });
26}
8fn main() -> anyhow::Result<()> {
9 // Run an FFmpeg command that generates a test video
10 let iter = FfmpegCommand::new() // <- Builder API like `std::process::Command`
11 .testsrc() // <- Discoverable aliases for FFmpeg args
12 .rawvideo() // <- Convenient argument presets
13 .spawn()? // <- Ordinary `std::process::Child`
14 .iter()?; // <- Blocking iterator over logs and output
15
16 // Use a regular "for" loop to read decoded video data
17 for frame in iter.filter_frames() {
18 println!("frame: {}x{}", frame.width, frame.height);
19 let _pixels: Vec<u8> = frame.data; // <- raw RGB pixels! 🎨
20 }
21
22 Ok(())
23}
13fn main() {
14 let mut ffmpeg = FfmpegCommand::new()
15 .realtime()
16 .format("lavfi")
17 .input("testsrc=size=1920x1080:rate=60")
18 .codec_video("rawvideo")
19 .format("avi")
20 .output("-")
21 .spawn()
22 .unwrap();
23
24 let mut ffplay = Command::new("ffplay")
25 .args("-i -".split(' '))
26 .stdin(Stdio::piped())
27 .spawn()
28 .unwrap();
29
30 let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
31 let mut ffplay_stdin = ffplay.stdin.take().unwrap();
32
33 // pipe from ffmpeg stdout to ffplay stdin
34 let buf = &mut [0u8; 4096];
35 loop {
36 let n = ffmpeg_stdout.read(buf).unwrap();
37 if n == 0 {
38 break;
39 }
40 ffplay_stdin.write_all(&buf[..n]).unwrap();
41 }
42}
9fn main() -> Result<()> {
10 let iter = FfmpegCommand::new()
11 .format("lavfi")
12 .arg("-re") // "realtime"
13 .input(format!(
14 "testsrc=size={OUTPUT_WIDTH}x{OUTPUT_HEIGHT}:rate={OUTPUT_FRAMERATE}"
15 ))
16 .rawvideo()
17 .spawn()?
18 .iter()?
19 .filter_frames();
20
21 for frame in iter {
22 // clear the previous frame
23 if frame.frame_num > 0 {
24 for _ in 0..frame.height {
25 print!("\x1B[{}A", 1);
26 }
27 }
28
29 // Print the pixels colored with ANSI codes
30 for y in 0..frame.height {
31 for x in 0..frame.width {
32 let idx = (y * frame.width + x) as usize * 3;
33 let r = frame.data[idx] as u32;
34 let g = frame.data[idx + 1] as u32;
35 let b = frame.data[idx + 2] as u32;
36 print!("\x1B[48;2;{r};{g};{b}m ");
37 }
38 println!("\x1B[0m");
39 }
40 }
41
42 Ok(())
43}
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
Sourcepub fn print_command(&mut self) -> &mut Self
pub fn print_command(&mut self) -> &mut Self
Print a command that can be copy-pasted to run in the terminal. Requires
&mut self
so that it chains seamlessly with other methods in the
interface. Sample output:
ffmpeg \
-f lavfi \
-i testsrc=duration=10 output/test.mp4
Examples found in repository?
4fn main() {
5 let mut ffmpeg_runner = FfmpegCommand::new()
6 .testsrc()
7 .args(["-metadata", "title=some cool title"])
8 .overwrite() // -y
9 .output("output/metadata.mp4")
10 .print_command()
11 .spawn()
12 .unwrap();
13
14 ffmpeg_runner
15 .iter()
16 .unwrap()
17 .for_each(|e| {
18 match e {
19 FfmpegEvent::Progress(FfmpegProgress { frame, .. }) =>
20 println!("Current frame: {frame}"),
21 FfmpegEvent::Log(_level, msg) =>
22 println!("[ffmpeg] {msg}"),
23 _ => {}
24 }
25 });
26}
More examples
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
12fn main() -> anyhow::Result<()> {
13 use anyhow::Result;
14 use ffmpeg_sidecar::command::FfmpegCommand;
15 use ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
16 use ffmpeg_sidecar::named_pipes::NamedPipe;
17 use ffmpeg_sidecar::pipe_name;
18 use std::io::Read;
19 use std::sync::mpsc;
20 use std::thread;
21
22 const VIDEO_PIPE_NAME: &str = pipe_name!("ffmpeg_video");
23 const AUDIO_PIPE_NAME: &str = pipe_name!("ffmpeg_audio");
24 const SUBTITLES_PIPE_NAME: &str = pipe_name!("ffmpeg_subtitles");
25
26 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
27 let mut command = FfmpegCommand::new();
28 command
29 // Global flags
30 .hide_banner()
31 .overwrite() // <- overwrite required on windows
32 // Generate test video
33 .format("lavfi")
34 .input("testsrc=size=1920x1080:rate=60:duration=10")
35 // Generate test audio
36 .format("lavfi")
37 .input("sine=frequency=1000:duration=10")
38 // Generate test subtitles
39 .format("srt")
40 .input(
41 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
42 )
43 // Video output
44 .map("0:v")
45 .format("rawvideo")
46 .pix_fmt("rgb24")
47 .output(VIDEO_PIPE_NAME)
48 // Audio output
49 .map("1:a")
50 .format("s16le")
51 .output(AUDIO_PIPE_NAME)
52 // Subtitles output
53 .map("2:s")
54 .format("srt")
55 .output(SUBTITLES_PIPE_NAME);
56
57 // Create a separate thread for each output pipe
58 let threads = [VIDEO_PIPE_NAME, AUDIO_PIPE_NAME, SUBTITLES_PIPE_NAME]
59 .iter()
60 .cloned()
61 .map(|pipe_name| {
62 // It's important to create the named pipe on the main thread before
63 // sending it elsewhere so that any errors are caught at the top level.
64 let mut pipe = NamedPipe::new(pipe_name)?;
65 println!("[{pipe_name}] pipe created");
66 let (ready_sender, ready_receiver) = mpsc::channel::<()>();
67 let thread = thread::spawn(move || -> Result<()> {
68 // Wait for FFmpeg to start writing
69 // Only needed for Windows, since Unix will block until a writer has connected
70 println!("[{pipe_name}] waiting for ready signal");
71 ready_receiver.recv()?;
72
73 // Read continuously until finished
74 // Note that if the stream of output is interrupted or paused,
75 // you may need additional logic to keep the read loop alive.
76 println!("[{pipe_name}] reading from pipe");
77 let mut buf = vec![0; 1920 * 1080 * 3];
78 let mut total_bytes_read = 0;
79
80 // In the case of subtitles, we'll decode the string contents directly
81 let mut text_content = if pipe_name == SUBTITLES_PIPE_NAME {
82 Some("".to_string())
83 } else {
84 None
85 };
86
87 loop {
88 match pipe.read(&mut buf) {
89 Ok(bytes_read) => {
90 total_bytes_read += bytes_read;
91
92 // read bytes into string
93 if let Some(cur_str) = &mut text_content {
94 let s = std::str::from_utf8(&buf[..bytes_read]).unwrap();
95 text_content = Some(format!("{}{}", cur_str, s));
96 }
97
98 if bytes_read == 0 {
99 break;
100 }
101 }
102 Err(err) => {
103 if err.kind() != std::io::ErrorKind::BrokenPipe {
104 return Err(err.into());
105 } else {
106 break;
107 }
108 }
109 }
110 }
111
112 // Log how many bytes were received over this pipe.
113 // You can visually compare this to the FFmpeg log output to confirm
114 // that all the expected bytes were captured.
115 let size_str = if total_bytes_read < 1024 {
116 format!("{}B", total_bytes_read)
117 } else {
118 format!("{}KiB", total_bytes_read / 1024)
119 };
120
121 if let Some(text_content) = text_content {
122 println!("[{pipe_name}] subtitle text content: ");
123 println!("{}", text_content.trim());
124 }
125
126 println!("[{pipe_name}] done reading ({size_str} total)");
127 Ok(())
128 });
129
130 Ok((thread, ready_sender))
131 })
132 .collect::<Result<Vec<_>>>()?;
133
134 // Start FFmpeg
135 let mut ready_signal_sent = false;
136 command
137 .print_command()
138 .spawn()?
139 .iter()?
140 .for_each(|event| match event {
141 // Signal threads when output is ready
142 FfmpegEvent::Progress(_) if !ready_signal_sent => {
143 threads.iter().for_each(|(_, sender)| {
144 sender.send(()).ok();
145 });
146 ready_signal_sent = true;
147 }
148
149 // Verify output size from FFmpeg logs (video/audio KiB)
150 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
151 println!("{msg}");
152 }
153
154 // Log any unexpected errors
155 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
156 eprintln!("{msg}");
157 }
158
159 _ => {}
160 });
161
162 for (thread, _) in threads {
163 thread.join().unwrap()?;
164 }
165
166 Ok(())
167}
Sourcepub fn create_no_window(&mut self) -> &mut Self
pub fn create_no_window(&mut self) -> &mut Self
Disable creating a new console window for the spawned process on Windows. Has no effect on other platforms. This can be useful when spawning a command from a GUI program.
This is called automatically in the constructor. To override, use
CommandExt::creation_flags()
directly on the inner Command
.
Sourcepub fn new() -> Self
pub fn new() -> Self
Examples found in repository?
9fn main() {
10 let fps = 60;
11 let duration = 10;
12 let total_frames = fps * duration;
13 let arg_string = format!(
14 "-f lavfi -i testsrc=duration={}:size=1920x1080:rate={} -y output/test.mp4",
15 duration, fps
16 );
17 FfmpegCommand::new()
18 .args(arg_string.split(' '))
19 .spawn()
20 .unwrap()
21 .iter()
22 .unwrap()
23 .filter_progress()
24 .for_each(|progress| println!("{}%", (progress.frame * 100) / total_frames));
25}
More examples
4fn main() {
5 let mut ffmpeg_runner = FfmpegCommand::new()
6 .testsrc()
7 .args(["-metadata", "title=some cool title"])
8 .overwrite() // -y
9 .output("output/metadata.mp4")
10 .print_command()
11 .spawn()
12 .unwrap();
13
14 ffmpeg_runner
15 .iter()
16 .unwrap()
17 .for_each(|e| {
18 match e {
19 FfmpegEvent::Progress(FfmpegProgress { frame, .. }) =>
20 println!("Current frame: {frame}"),
21 FfmpegEvent::Log(_level, msg) =>
22 println!("[ffmpeg] {msg}"),
23 _ => {}
24 }
25 });
26}
8fn main() -> anyhow::Result<()> {
9 // Run an FFmpeg command that generates a test video
10 let iter = FfmpegCommand::new() // <- Builder API like `std::process::Command`
11 .testsrc() // <- Discoverable aliases for FFmpeg args
12 .rawvideo() // <- Convenient argument presets
13 .spawn()? // <- Ordinary `std::process::Child`
14 .iter()?; // <- Blocking iterator over logs and output
15
16 // Use a regular "for" loop to read decoded video data
17 for frame in iter.filter_frames() {
18 println!("frame: {}x{}", frame.width, frame.height);
19 let _pixels: Vec<u8> = frame.data; // <- raw RGB pixels! 🎨
20 }
21
22 Ok(())
23}
13fn main() {
14 let mut ffmpeg = FfmpegCommand::new()
15 .realtime()
16 .format("lavfi")
17 .input("testsrc=size=1920x1080:rate=60")
18 .codec_video("rawvideo")
19 .format("avi")
20 .output("-")
21 .spawn()
22 .unwrap();
23
24 let mut ffplay = Command::new("ffplay")
25 .args("-i -".split(' '))
26 .stdin(Stdio::piped())
27 .spawn()
28 .unwrap();
29
30 let mut ffmpeg_stdout = ffmpeg.take_stdout().unwrap();
31 let mut ffplay_stdin = ffplay.stdin.take().unwrap();
32
33 // pipe from ffmpeg stdout to ffplay stdin
34 let buf = &mut [0u8; 4096];
35 loop {
36 let n = ffmpeg_stdout.read(buf).unwrap();
37 if n == 0 {
38 break;
39 }
40 ffplay_stdin.write_all(&buf[..n]).unwrap();
41 }
42}
9fn main() -> Result<()> {
10 let iter = FfmpegCommand::new()
11 .format("lavfi")
12 .arg("-re") // "realtime"
13 .input(format!(
14 "testsrc=size={OUTPUT_WIDTH}x{OUTPUT_HEIGHT}:rate={OUTPUT_FRAMERATE}"
15 ))
16 .rawvideo()
17 .spawn()?
18 .iter()?
19 .filter_frames();
20
21 for frame in iter {
22 // clear the previous frame
23 if frame.frame_num > 0 {
24 for _ in 0..frame.height {
25 print!("\x1B[{}A", 1);
26 }
27 }
28
29 // Print the pixels colored with ANSI codes
30 for y in 0..frame.height {
31 for x in 0..frame.width {
32 let idx = (y * frame.width + x) as usize * 3;
33 let r = frame.data[idx] as u32;
34 let g = frame.data[idx + 1] as u32;
35 let b = frame.data[idx + 2] as u32;
36 print!("\x1B[48;2;{r};{g};{b}m ");
37 }
38 println!("\x1B[0m");
39 }
40 }
41
42 Ok(())
43}
10fn main() -> Result<()> {
11 // Set up a TCP listener
12 const TCP_PORT: u32 = 3000;
13 let (exit_sender, exit_receiver) = channel::<()>();
14 let listener_thread = thread::spawn(|| listen_for_connections(TCP_PORT, exit_receiver));
15
16 // Wait for the listener to start
17 thread::sleep(Duration::from_millis(1000));
18
19 // Prepare an FFmpeg command with separate outputs for video, audio, and subtitles.
20 FfmpegCommand::new()
21 // Global flags
22 .hide_banner()
23 .overwrite() // <- overwrite required on windows
24 // Generate test video
25 .format("lavfi")
26 .input("testsrc=size=1920x1080:rate=60:duration=10")
27 // Generate test audio
28 .format("lavfi")
29 .input("sine=frequency=1000:duration=10")
30 // Generate test subtitles
31 .format("srt")
32 .input(
33 "data:text/plain;base64,MQ0KMDA6MDA6MDAsMDAwIC0tPiAwMDowMDoxMCw1MDANCkhlbGxvIFdvcmxkIQ==",
34 )
35 // Video output
36 .map("0:v")
37 .format("rawvideo")
38 .pix_fmt("rgb24")
39 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
40 // Audio output
41 .map("1:a")
42 .format("s16le")
43 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
44 // Subtitles output
45 .map("2:s")
46 .format("srt")
47 .output(format!("tcp://127.0.0.1:{TCP_PORT}"))
48 .print_command()
49 .spawn()?
50 .iter()?
51 .for_each(|event| match event {
52 // Verify output size from FFmpeg logs (video/audio KiB)
53 FfmpegEvent::Log(LogLevel::Info, msg) if msg.starts_with("[out#") => {
54 println!("{msg}");
55 }
56
57 // Log any unexpected errors
58 FfmpegEvent::Log(LogLevel::Warning | LogLevel::Error | LogLevel::Fatal, msg) => {
59 eprintln!("{msg}");
60 }
61
62 // _ => {}
63 e => {
64 println!("{:?}", e);
65 }
66 });
67 exit_sender.send(())?;
68 listener_thread.join().unwrap()?;
69 Ok(())
70}
pub fn new_with_path<S: AsRef<OsStr>>(path_to_ffmpeg_binary: S) -> Self
Sourcepub fn as_inner_mut(&mut self) -> &mut Command
pub fn as_inner_mut(&mut self) -> &mut Command
Escape hatch to mutably access the inner Command
.