ez_ffmpeg/core/mod.rs
1//! The **core** module provides the foundational building blocks for configuring and running FFmpeg
2//! pipelines. It encompasses:
3//!
4//! - **Input & Output Handling** (in [`context`]): Structures and logic (`Input`, `Output`) for
5//! specifying where media data originates and where it should be written.
6//! - **Filter Descriptions**: Define filter graphs with `FilterComplex` or attach custom [`FrameFilter`](filter::frame_filter::FrameFilter)
7//! implementations at the input/output stage.
8//! - **Stream and Device Queries** (in [`stream_info`] and [`device`]): Utilities for retrieving
9//! information about media streams and available input devices.
10//! - **Hardware Acceleration** (in [`hwaccel`]): Enumerate/configure GPU-accelerated codecs (CUDA, VAAPI, etc.).
11//! - **Codec Discovery** (in [`codec`]): List encoders/decoders supported by FFmpeg.
12//! - **Custom Filters** (in [`filter`]): Implement user-defined [`FrameFilter`](filter::frame_filter::FrameFilter) logic for frames.
13//! - **Lifecycle Orchestration** (in [`scheduler`]): [`FfmpegScheduler`](scheduler::ffmpeg_scheduler::FfmpegScheduler) that runs the configured pipeline
14//! (synchronously or asynchronously if the `async` feature is enabled).
15//!
16//! # Submodules
17//!
18//! - [`context`]: Houses [`FfmpegContext`](context::ffmpeg_context::FfmpegContext)—the central struct for assembling inputs, outputs, and filters.
19//! - [`scheduler`]: Defines [`FfmpegScheduler`](scheduler::ffmpeg_scheduler::FfmpegScheduler), managing the execution of an `FfmpegContext` pipeline.
20//! - [`container_info`]: Utilities to extract information about the container, such as duration and format details.
21//! - [`stream_info`]: Inspect media streams (e.g., find video/audio streams in a file).
22//! - [`device`]: Query audio/video input devices (cameras, microphones, etc.) on various platforms.
23//! - [`hwaccel`]: Helpers for hardware-accelerated encoding/decoding setup.
24//! - [`codec`]: Tools to discover which encoders/decoders your FFmpeg build supports.
25//! - [`filter`]: Query FFmpeg's built-in filters and infrastructure for building custom frame-processing filters.
26//!
27//! # Example Workflow
28//!
29//! 1. **Build a context** using [`FfmpegContext::builder()`](crate::core::context::ffmpeg_context::FfmpegContext::builder)
30//! specifying your input, any filters, and your output.
31//! 2. **Create a scheduler** with [`FfmpegScheduler::new`](crate::core::scheduler::ffmpeg_scheduler::FfmpegScheduler::new),
32//! then call `.start()` to begin processing.
33//! 3. **Wait** (or `.await` if `async` feature is enabled) for the job to complete. Use the returned
34//! `Result` to detect success or failure.
35//!
36//! # Example
37//! ```rust
38//!
39//! fn main() -> Result<(), Box<dyn std::error::Error>> {
40//! // 1. Build an FfmpegContext with an input, a simple filter, and an output
41//! let context = FfmpegContext::builder()
42//! .input("test.mp4")
43//! .filter_desc("hue=s=0") // Example: desaturate video
44//! .output("output.mp4")
45//! .build()?;
46//!
47//! // 2. Create a scheduler and start the job
48//! let scheduler = FfmpegScheduler::new(context).start()?;
49//!
50//! // 3. Block until it's finished
51//! scheduler.wait()?;
52//! Ok(())
53//! }
54//! ```
55
56/// The **context** module provides tools for assembling an entire FFmpeg pipeline,
57/// culminating in the [`FfmpegContext`](context::ffmpeg_context::FfmpegContext). This includes:
58///
59/// - **Inputs**: [`Input`](context::input::Input) objects representing files, URLs, or custom I/O callbacks.
60/// - **Outputs**: [`Output`](context::output::Output) objects representing target files, streams, or custom sinks.
61/// - **Filter Descriptions**: Simple inline filters via `filter_desc` or more complex
62/// [`FilterComplex`](context::filter_complex::FilterComplex) graphs.
63/// - **Builders**: e.g., [`FfmpegContextBuilder`](context::ffmpeg_context_builder::FfmpegContextBuilder) for constructing a complete context
64/// with multiple inputs, outputs, and filter settings.
65///
66/// Once you’ve built an [`FfmpegContext`](context::ffmpeg_context::FfmpegContext), you can execute it via the [`FfmpegScheduler`](scheduler::ffmpeg_scheduler::FfmpegScheduler).
67///
68/// # Example
69///
70/// ```rust
71/// // Build an FFmpeg context with one input, some filter settings, and one output.
72/// let context = FfmpegContext::builder()
73/// .input("test.mp4")
74/// .filter_desc("hue=s=0")
75/// .output("output.mp4")
76/// .build()
77/// .unwrap();
78/// // The context now holds all info needed for an FFmpeg job.
79/// ```
80pub mod context;
81
82/// The **scheduler** module orchestrates the execution of a configured [`FfmpegContext`](context::ffmpeg_context::FfmpegContext).
83/// It provides the [`FfmpegScheduler`](scheduler::ffmpeg_scheduler::FfmpegScheduler) struct, which:
84///
85/// - **Starts** the FFmpeg pipeline via [`FfmpegScheduler::start()`](scheduler::ffmpeg_scheduler::FfmpegScheduler<crate::core::scheduler::ffmpeg_scheduler::Initialization>::start()).
86/// - **Manages** thread or subprocess creation, ensuring all streams and filters run.
87/// - **Waits** for completion (blocking or asynchronous, depending on whether the `async` feature is enabled).
88/// - **Returns** the final result, indicating success or failure.
89///
90/// # Synchronous Example
91///
92/// ```rust
93/// let context = FfmpegContext::builder()
94/// .input("test.mp4")
95/// .filter_desc("hue=s=0")
96/// .output("output.mp4")
97/// .build()
98/// .unwrap();
99///
100/// let result = FfmpegScheduler::new(context)
101/// .start()
102/// .unwrap()
103/// .wait();
104///
105/// assert!(result.is_ok(), "FFmpeg job failed unexpectedly");
106/// ```
107///
108/// # Asynchronous Example (requires `async` feature)
109///
110/// ```rust,ignore
111/// #[tokio::main]
112/// async fn main() {
113/// let context = FfmpegContext::builder()
114/// .input("test.mp4")
115/// .output("output.mp4")
116/// .build()
117/// .unwrap();
118///
119/// let mut scheduler = FfmpegScheduler::new(context)
120/// .start()
121/// .expect("Failed to start FFmpeg job");
122///
123/// // Asynchronous wait
124/// scheduler.await.expect("FFmpeg job failed unexpectedly");
125/// }
126/// ```
127pub mod scheduler;
128
129/// The **container_info** module provides utilities for retrieving metadata related to the media container,
130/// such as duration, format, and other general properties of the media file.
131///
132/// This module helps to query the overall properties of a media container file (e.g., `.mp4`, `.avi`, `.mkv`)
133/// without diving into individual streams (audio, video, etc.). It is useful when you need information
134/// about the file as a whole, such as total duration, format type, and container-specific properties.
135///
136/// # Examples
137///
138/// ```rust
139/// // Retrieve the duration in microseconds for the media file "test.mp4"
140/// let duration = get_duration_us("test.mp4").unwrap();
141/// println!("Duration: {} us", duration);
142///
143/// // Retrieve the format name for "test.mp4"
144/// let format = get_format("test.mp4").unwrap();
145/// println!("Format: {}", format);
146///
147/// // Retrieve the metadata for "test.mp4"
148/// let metadata = get_metadata("test.mp4").unwrap();
149/// for (key, value) in metadata {
150/// println!("{}: {}", key, value);
151/// }
152/// ```
153///
154/// These helper functions return the container-level metadata, and they handle any errors that may arise
155/// (e.g., if the file can't be opened or if there is an issue reading the data).
156pub mod container_info;
157
158/// The **stream_info** module provides utilities to retrieve detailed information
159/// about media streams (video, audio, and more) from an input source (e.g., a local file
160/// path, an RTMP URL, etc.). It queries FFmpeg for metadata regarding stream types, codec
161/// parameters, duration, and other relevant details.
162///
163/// # Examples
164///
165/// ```rust
166/// // Retrieve information about the first video stream in "test.mp4"
167/// let maybe_video_info = find_video_stream_info("test.mp4").unwrap();
168/// if let Some(video_info) = maybe_video_info {
169/// println!("Found video stream: {:?}", video_info);
170/// } else {
171/// println!("No video stream found.");
172/// }
173///
174/// // Retrieve information about the first audio stream in "test.mp4"
175/// let maybe_audio_info = find_audio_stream_info("test.mp4").unwrap();
176/// if let Some(audio_info) = maybe_audio_info {
177/// println!("Found audio stream: {:?}", audio_info);
178/// } else {
179/// println!("No audio stream found.");
180/// }
181///
182/// // Retrieve information about all streams (video, audio, etc.) in "test.mp4"
183/// let all_infos = find_all_stream_infos("test.mp4").unwrap();
184/// println!("Total streams found: {}", all_infos.len());
185/// for info in all_infos {
186/// println!("{:?}", info);
187/// }
188/// ```
189///
190/// These helper functions return `Result<Option<StreamInfo>, Error>` or `Result<Vec<StreamInfo>, Error>`
191/// depending on the call, allowing you to differentiate between “no stream found” (returns `Ok(None)`)
192/// and encountering an actual error (returns `Err(...)`).
193pub mod stream_info;
194
195/// The **device** module provides cross-platform methods to query available audio and video
196/// input devices on the system. Depending on the target operating system, it internally
197/// delegates to different platform APIs or FFmpeg’s device capabilities:
198///
199/// - **macOS**: Leverages AVFoundation for enumerating devices such as cameras ("vide")
200/// and microphones ("soun").
201/// - **Other OSes**: Uses FFmpeg’s `avdevice` to list input devices for video and audio.
202///
203/// These functions can be used to programmatically discover devices before choosing one
204/// for capture or recording in an FFmpeg-based pipeline.
205///
206/// # Examples
207///
208/// ```rust
209/// // Query video input devices (e.g., cameras)
210/// let video_devices = get_input_video_devices().unwrap();
211/// for device in &video_devices {
212/// println!("Available video device: {}", device);
213/// }
214///
215/// // Query audio input devices (e.g., microphones)
216/// let audio_devices = get_input_audio_devices().unwrap();
217/// for device in &audio_devices {
218/// println!("Available audio device: {}", device);
219/// }
220/// ```
221///
222/// # Notes
223///
224/// - If the query process fails (e.g., missing permissions or no devices available),
225/// the functions return an appropriate error from `crate::error`.
226/// - On macOS, the `AVFoundation` framework is used directly. On other platforms, FFmpeg’s
227/// `avdevice` functionality is used. Implementation details differ, but the returned
228/// results have a uniform format: a list of human-readable device names.
229/// - For more advanced device details (e.g., supported formats or resolutions), you may need
230/// to perform additional FFmpeg queries or platform-specific calls.
231pub mod device;
232/// The **hwaccel** module provides functionality for working with hardware-accelerated
233/// codecs in FFmpeg. It allows you to detect and configure various hardware devices
234/// (like NVENC, VAAPI, DXVA2, or VideoToolbox) so that FFmpeg can offload encoding or
235/// decoding tasks to GPU or specialized hardware.
236///
237/// # Public API
238///
239/// - [`get_hwaccels()`](hwaccel::get_hwaccels): Enumerates the hardware acceleration backends available on the
240/// current system, returning a list of [`HWAccelInfo`](hwaccel::HWAccelInfo) items. Each item contains a
241/// readable name (e.g., `"cuda"`, `"vaapi"`) and the corresponding `AVHWDeviceType`.
242///
243/// # Example
244///
245/// ```rust
246/// // Query hardware acceleration backends
247/// let hwaccels = get_hwaccels();
248/// for accel in hwaccels {
249/// println!("Found HW Accel: {} (type: {:?})", accel.name, accel.hw_device_type);
250/// }
251/// ```
252///
253/// # Notes
254///
255/// - While only [`get_hwaccels()`](hwaccel::get_hwaccels) is directly exposed, internally the module contains
256/// various helpers to initialize and manage hardware devices (e.g., `hw_device_init_from_string`).
257/// These are used behind the scenes or in more advanced scenarios where explicit control
258/// over device creation is required.
259/// - Hardware acceleration support depends on both FFmpeg’s compilation configuration
260/// and the underlying system drivers/frameworks. Not all listed accelerations may be
261/// fully functional on every platform.
262pub mod hwaccel;
263
264/// The **codec** module provides helpers for enumerating and querying FFmpeg’s
265/// available audio/video **encoders** and **decoders**. This can be useful for
266/// discovering which codecs are supported in your current FFmpeg build, along
267/// with their core attributes.
268///
269/// # Public API
270///
271/// - [`get_encoders()`](codec::get_encoders): Returns a list of [`CodecInfo`](codec::CodecInfo) representing all
272/// encoders (e.g., H.264, AAC) recognized by FFmpeg.
273/// - [`get_decoders()`](codec::get_decoders): Returns a list of [`CodecInfo`](codec::CodecInfo) representing all
274/// decoders (e.g., H.264, AAC) recognized by FFmpeg.
275///
276/// # Example
277///
278/// ```rust
279/// // List all available encoders
280/// let encoders = get_encoders();
281/// for enc in &encoders {
282/// println!("Encoder: {} - {}", enc.codec_name, enc.codec_long_name);
283/// }
284///
285/// // List all available decoders
286/// let decoders = get_decoders();
287/// for dec in &decoders {
288/// println!("Decoder: {} - {}", dec.codec_name, dec.codec_long_name);
289/// }
290/// ```
291///
292/// # Data Structures
293///
294/// - [`CodecInfo`](codec::CodecInfo): Contains user-friendly fields such as:
295/// - `codec_name` / `codec_long_name`
296/// - `desc_name`: The descriptor name from FFmpeg.
297/// - `media_type` (audio/video/subtitle, etc.)
298/// - `codec_id` (internal FFmpeg ID)
299/// - `codec_capabilities` (bitmask indicating codec features)
300///
301/// # Notes
302///
303/// - The underlying [`Codec`] struct is for internal usage only, bridging to
304/// the raw FFmpeg APIs. In most cases, you only need the higher-level [`CodecInfo`](codec::CodecInfo)
305/// data from the public functions above.
306/// - The available encoders/decoders can vary depending on your FFmpeg build
307/// and any external libraries installed on the system.
308pub mod codec;
309
310/// The **filter** module provides a flexible framework for custom frame processing
311/// within the FFmpeg pipeline, along with the ability to query FFmpeg's built-in filters.
312/// It introduces the [`FrameFilter`](filter::frame_filter::FrameFilter) trait, which defines how to apply transformations
313/// (e.g., scaling, color adjustments, GPU-accelerated effects) to decoded frames.
314/// You can attach these filters to either the input or the output side
315/// (depending on your desired pipeline design) so that frames are automatically
316/// processed in your FFmpeg workflow.
317///
318/// # FFmpeg Built-in Filters
319///
320/// ```rust
321/// use ez_ffmpeg::core::filter::get_filters;
322///
323/// // Query available FFmpeg filters
324/// let filters = get_filters();
325/// for filter in filters {
326/// println!("Filter: {} - {}", filter.name, filter.description);
327/// }
328/// ```
329///
330/// # Defining and Using a Custom Filter
331///
332/// Below is a minimal example showing how to implement a custom filter and attach it to
333/// an `Output` so that every frame is processed before encoding. You could likewise
334/// attach it to an `Input` if you want the frames processed immediately after decoding.
335///
336/// ```rust
337///
338/// // 1. Define your custom filter by implementing the FrameFilter trait.
339/// struct FlipFilter;
340///
341/// impl FrameFilter for FlipFilter {
342/// fn media_type(&self) -> AVMediaType {
343/// // This filter operates on video frames.
344/// AVMediaType::AVMEDIA_TYPE_VIDEO
345/// }
346///
347/// fn filter_frame(
348/// &mut self,
349/// mut frame: Frame,
350/// _ctx: &FrameFilterContext,
351/// ) -> Result<Option<Frame>, String> {
352/// unsafe {
353/// if frame.as_ptr().is_null() || frame.is_empty() {
354/// return Ok(Some(frame));
355/// }
356/// }
357///
358/// // Here you would implement the logic to transform the frame.
359/// // As a trivial example, we just return the original frame.
360/// // (Replace this with your actual transformation code.)
361///
362/// Ok(Some(frame))
363/// }
364/// }
365///
366/// fn main() -> Result<(), Box<dyn std::error::Error>> {
367/// // 2. Create a pipeline builder for video frames.
368/// let mut pipeline_builder: FramePipelineBuilder = AVMediaType::AVMEDIA_TYPE_VIDEO.into();
369///
370/// // 3. Add your custom filter to the pipeline, giving it a unique name.
371/// pipeline_builder = pipeline_builder.filter("flip-filter", Box::new(FlipFilter));
372///
373/// // 4. Attach the pipeline to an Output (could also attach to an Input).
374/// let mut output: Output = "output.mp4".into();
375/// output.add_frame_pipeline(pipeline_builder);
376///
377/// // 5. Build the FFmpeg context with both input and output.
378/// let context = FfmpegContext::builder()
379/// .input("input.mp4")
380/// .output(output)
381/// .build()?;
382///
383/// // 6. Run the FFmpeg job via the scheduler.
384/// FfmpegScheduler::new(context)
385/// .start()?
386/// .wait()?;
387///
388/// Ok(())
389/// }
390/// ```
391///
392/// In this example:
393/// 1. We define a **`FlipFilter`** that implements the [`FrameFilter`](filter::frame_filter::FrameFilter) trait and specifies
394/// `AVMediaType::AVMEDIA_TYPE_VIDEO`.
395/// 2. We create a **`FramePipelineBuilder`** for `VIDEO` frames and add our filter to it.
396/// 3. We attach that pipeline to the **`Output`** configuration, so frames will be processed
397/// (in this case, “flipped”) before encoding.
398/// 4. Finally, we build the FFmpeg context and run it with the **`FfmpegScheduler`**.
399///
400/// # More Advanced Filters
401///
402/// For a more complex, GPU-accelerated example, see the **OpenGL**-based filters in the
403/// [`opengl` module](crate::opengl). There, you can use custom GLSL shaders to apply
404/// sophisticated transformations or visual effects on video frames.
405///
406/// # Trait Overview
407///
408/// The [`FrameFilter`](filter::frame_filter::FrameFilter) trait exposes several methods you can override:
409/// - [`FrameFilter::media_type()`](filter::frame_filter::FrameFilter::media_type): Indicates which media type (video, audio, etc.) this filter handles.
410/// - [`FrameFilter::init()`](filter::frame_filter::FrameFilter::init): Called once when the filter is first created (e.g., allocate resources).
411/// - [`FrameFilter::filter_frame()`](filter::frame_filter::FrameFilter::filter_frame): The primary method for transforming an incoming frame.
412/// - [`FrameFilter::request_frame()`](filter::frame_filter::FrameFilter::request_frame): If your filter generates frames on its own, you can override this.
413/// - [`FrameFilter::uninit()`](filter::frame_filter::FrameFilter::uninit): Called during cleanup when the filter is removed or the pipeline ends.
414///
415/// By chaining multiple filters in a pipeline, you can create sophisticated processing
416/// chains for your media data.
417pub mod filter;
418
419static INIT_FFMPEG: std::sync::Once = std::sync::Once::new();
420
421extern "C" fn cleanup() {
422 unsafe {
423 hwaccel::hw_device_free_all();
424 ffmpeg_sys_next::avformat_network_deinit();
425 }
426
427 log::debug!("FFmpeg cleaned up");
428}
429
430// The following type definitions for `VaListType` are inspired by the Rust standard library's
431// implementation of `va_list` (see std::ffi::va_list::VaListImpl). These definitions ensure compatibility
432// with platform-specific ABI requirements when interfacing with C variadic functions.
433
434#[cfg(any(
435 all(
436 not(target_arch = "aarch64"),
437 not(target_arch = "powerpc"),
438 not(target_arch = "s390x"),
439 not(target_arch = "x86_64")
440 ),
441 all(target_arch = "aarch64", target_vendor = "apple"),
442 target_family = "wasm",
443 target_os = "uefi",
444 windows,
445))]
446type VaListType = *mut libc::c_char;
447
448#[cfg(all(target_arch = "x86_64", not(target_os = "uefi"), not(windows)))]
449type VaListType = *mut ffmpeg_sys_next::__va_list_tag;
450
451#[cfg(all(
452 target_arch = "aarch64",
453 not(target_vendor = "apple"),
454 not(target_os = "uefi"),
455 not(windows),
456))]
457pub type VaListType = *mut ffmpeg_sys_next::__va_list_tag_aarch64;
458
459#[cfg(all(target_arch = "powerpc", not(target_os = "uefi"), not(windows)))]
460pub type VaListType = *mut ffmpeg_sys_next::__va_list_tag_powerpc;
461
462#[cfg(target_arch = "s390x")]
463pub type VaListType = *mut ffmpeg_sys_next::__va_list_tag_s390x;
464
465unsafe extern "C" fn ffmpeg_log_callback(
466 ptr: *mut libc::c_void,
467 level: libc::c_int,
468 fmt: *const libc::c_char,
469 args: VaListType,
470) {
471 // Create a fixed-size buffer to hold the formatted log message.
472 let mut buffer = [0u8; 1024];
473 // 'print_prefix' is used internally by av_log_format_line to decide whether to print a prefix.
474 let mut print_prefix = 1;
475
476 // Call FFmpeg's av_log_format_line to format the variable arguments into the buffer.
477 ffmpeg_sys_next::av_log_format_line(
478 ptr,
479 level,
480 fmt,
481 args,
482 buffer.as_mut_ptr() as *mut libc::c_char,
483 buffer.len() as libc::c_int,
484 &mut print_prefix,
485 );
486
487 // Convert the C string in the buffer to a Rust &str.
488 if let Ok(msg) = std::ffi::CStr::from_ptr(buffer.as_ptr() as *const libc::c_char).to_str() {
489 // Trim any trailing newline characters (\n or \r).
490 let trimmed_msg = msg.trim_end_matches(|c| c == '\n' || c == '\r');
491
492 // Map FFmpeg log levels to the corresponding Rust log levels.
493 if level <= ffmpeg_sys_next::AV_LOG_ERROR {
494 log::error!("FFmpeg: {}", trimmed_msg);
495 } else if level <= ffmpeg_sys_next::AV_LOG_WARNING {
496 log::warn!("FFmpeg: {}", trimmed_msg);
497 } else if level <= ffmpeg_sys_next::AV_LOG_INFO {
498 log::info!("FFmpeg: {}", trimmed_msg);
499 }
500 }
501}
502
503fn initialize_ffmpeg() {
504 INIT_FFMPEG.call_once(|| {
505 unsafe {
506 libc::atexit(cleanup as extern "C" fn());
507 ffmpeg_sys_next::avdevice_register_all();
508 ffmpeg_sys_next::avformat_network_init();
509 ffmpeg_sys_next::av_log_set_callback(Some(ffmpeg_log_callback));
510 }
511 log::info!("FFmpeg initialized.");
512 });
513}