Skip to main content

ez_ffmpeg/core/context/
mod.rs

1use crate::error::AllocFrameError;
2use ffmpeg_sys_next::AVMediaType::{
3    AVMEDIA_TYPE_ATTACHMENT, AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_DATA, AVMEDIA_TYPE_SUBTITLE,
4    AVMEDIA_TYPE_VIDEO,
5};
6use ffmpeg_sys_next::{
7    av_freep, avcodec_free_context, avformat_close_input, avformat_free_context, avio_closep,
8    avio_context_free, AVCodecContext, AVCodecParameters, AVFormatContext, AVIOContext,
9    AVMediaType, AVRational, AVStream, AVFMT_NOFILE,
10};
11use std::ffi::c_void;
12use std::ptr::null_mut;
13
14
15/// The **ffmpeg_context** module is responsible for assembling FFmpeg’s configuration:
16/// inputs, outputs, codecs, filters, and other parameters needed to construct a
17/// complete media processing pipeline.
18///
19/// # Example
20/// ```rust,ignore
21///
22/// // Build an FFmpeg context with one input, some filter settings, and one output
23/// let context = FfmpegContext::builder()
24///     .input("test.mp4")
25///     .filter_desc("hue=s=0")
26///     .output("output.mp4")
27///     .build()
28///     .unwrap();
29/// // The context now holds all info needed for an FFmpeg job.
30/// ```
31pub mod ffmpeg_context;
32
33/// The **ffmpeg_context_builder** module defines the builder pattern for creating
34/// [`FfmpegContext`](ffmpeg_context::FfmpegContext) objects.
35///
36/// It exposes the [`FfmpegContextBuilder`](ffmpeg_context_builder::FfmpegContextBuilder) struct, which allows you to:
37/// - Configure multiple [`Input`](input::Input) and
38///   [`Output`](output::Output) streams.
39/// - Attach filter descriptions via [`FilterComplex`](crate::core::context::filter_complex::FilterComplex)
40///   or inline strings (e.g., `"scale=1280:720"`, `"hue=s=0"`).
41/// - Produce a finished `FfmpegContext` that can then be executed by
42///   [`FfmpegScheduler`](crate::core::scheduler::ffmpeg_scheduler::FfmpegScheduler).
43///
44/// # Examples
45///
46/// ```rust,ignore
47/// // 1. Create a builder (usually via FfmpegContext::builder())
48/// let builder = FfmpegContext::builder();
49///
50/// // 2. Add inputs, outputs, and filters
51/// let ffmpeg_context = builder
52///     .input("input.mp4")
53///     .filter_desc("hue=s=0")
54///     .output("output.mp4")
55///     .build()
56///     .expect("Failed to build FfmpegContext");
57///
58/// // 3. Use `ffmpeg_context` with FfmpegScheduler (e.g., `.start()` and `.wait()`).
59/// ```
60pub mod ffmpeg_context_builder;
61
62/// The **input** module defines the [`Input`](crate::core::context::input::Input) struct,
63/// representing an FFmpeg input source. An input can be:
64/// - A file path or URL (e.g., `"video.mp4"`, `rtmp://example.com/live/stream`).
65/// - A **custom data source** via a `read_callback` (and optionally `seek_callback`) for
66///   advanced scenarios like in-memory buffers or network protocols.
67///
68/// You can also specify **frame pipelines** to apply custom [`FrameFilter`](crate::core::filter::frame_filter::FrameFilter)
69/// transformations **after decoding** but **before** the frames move on to the rest of the pipeline.
70///
71/// # Example
72///
73/// ```rust,ignore
74/// use ez_ffmpeg::core::context::input::Input;
75///
76/// // Basic file or network URL:
77/// let file_input: Input = "example.mp4".into();
78///
79/// // Or a custom read callback:
80/// let custom_input = Input::new_by_read_callback(|buf| {
81///     // Fill `buf` with data from your source
82///     // Return the number of bytes read, or negative for errors
83///     0
84/// });
85/// ```
86pub mod input;
87
88/// The **output** module defines the [`Output`](crate::core::context::output::Output) struct,
89/// representing an FFmpeg output destination. An output may be:
90/// - A file path or URL (e.g., `"output.mp4"`, `rtmp://...`).
91/// - A **custom write callback** that processes encoded data (e.g., storing it
92///   in-memory or sending it over a custom network protocol).
93///
94/// You can specify additional details such as:
95/// - **Container format** (e.g., `"mp4"`, `"flv"`, `"mkv"`).
96/// - **Video/Audio/Subtitle codecs** (e.g., `"h264"`, `"aac"`, `"mov_text"`).
97/// - **Frame pipelines** to apply [`FrameFilter`](crate::core::filter::frame_filter::FrameFilter)
98///   transformations **before encoding**.
99///
100/// # Example
101///
102/// ```rust,ignore
103/// use ez_ffmpeg::core::context::output::Output;
104///
105/// // Basic file/URL output:
106/// let file_output: Output = "output.mp4".into();
107///
108/// // Or a custom write callback:
109/// let custom_output = Output::new_by_write_callback(|encoded_data| {
110///     // Write `encoded_data` somewhere
111///     encoded_data.len() as i32
112/// }).set_format("mp4");
113/// ```
114pub mod output;
115
116/// The **filter_complex** module defines the [`FilterComplex`](crate::core::context::filter_complex::FilterComplex)
117/// struct, which encapsulates one or more FFmpeg filter descriptions (e.g., `"scale=1280:720"`,
118/// `"hue=s=0"`, etc.). You can use `FilterComplex` to construct more advanced or multi-step
119/// filter graphs than simple inline strings allow.
120///
121/// `FilterComplex` can also associate a particular hardware device (e.g., for GPU-based
122/// filtering) via `hw_device`.
123///
124/// # Example
125///
126/// ```rust,ignore
127/// use ez_ffmpeg::core::context::filter_complex::FilterComplex;
128///
129/// // Build a FilterComplex from a string:
130/// let my_filters = FilterComplex::from("scale=1280:720");
131///
132/// // Optionally specify a hardware device (e.g., "cuda"):
133/// // my_filters.set_hw_device("cuda");
134/// ```
135pub mod filter_complex;
136
137
138pub(super) mod decoder_stream;
139pub(super) mod demuxer;
140pub(super) mod encoder_stream;
141pub(super) mod filter_graph;
142pub(super) mod input_filter;
143pub(super) mod muxer;
144pub(super) mod obj_pool;
145pub(super) mod output_filter;
146
147/// The **null_output** module provides a custom null output implementation for FFmpeg
148/// that discards all data while supporting seeking.
149///
150/// It exposes the [`create_null_output`](null_output::create_null_output) function, which returns an
151/// [`Output`](ez_ffmpeg::Output) object configured to:
152/// - Discard all written data, behaving like `/dev/null`.
153/// - Maintain a seekable position state using atomic operations for thread-safe, high-performance access.
154/// - Support scenarios such as testing or processing streaming inputs (e.g., RTMP) where no output file is needed.
155///
156/// # Usage Scenario
157/// This module is useful when processing FFmpeg input streams without generating an output file, such as
158/// when handling RTMP streams that require a seekable output format like MP4, even if the output is discarded.
159///
160/// # Examples
161///
162/// ```rust,ignore
163/// use ez_ffmpeg::Output;
164/// let output: Output = create_null_output();
165/// // Pass `output` to an FFmpeg context for processing
166/// ```
167///
168/// # Performance
169/// - Utilizes `AtomicU64` with `Relaxed` ordering for lock-free position tracking, ensuring efficient concurrent access.
170/// - Write and seek operations are optimized to minimize overhead by avoiding locks.
171///
172/// # Notes
173/// - The default output format is "mp4", but this can be modified using `set_format` as needed.
174/// - Write operations assume individual buffers do not exceed `i32::MAX` bytes, which aligns with typical FFmpeg usage.
175pub mod null_output;
176
177pub(crate) struct CodecContext {
178    inner: *mut AVCodecContext,
179}
180
181unsafe impl Send for CodecContext {}
182unsafe impl Sync for CodecContext {}
183
184impl CodecContext {
185    pub(crate) fn new(avcodec_context: *mut AVCodecContext) -> Self {
186        Self {
187            inner: avcodec_context,
188        }
189    }
190
191    pub(crate) fn replace(&mut self, avcodec_context: *mut AVCodecContext) -> *mut AVCodecContext {
192        let mut tmp = self.inner;
193        if !tmp.is_null() {
194            unsafe {
195                avcodec_free_context(&mut tmp);
196            }
197        }
198        self.inner = avcodec_context;
199        tmp
200    }
201
202    pub(crate) fn null() -> Self {
203        Self { inner: null_mut() }
204    }
205
206    pub(crate) fn as_mut_ptr(&self) -> *mut AVCodecContext {
207        self.inner
208    }
209
210    pub(crate) fn as_ptr(&self) -> *const AVCodecContext {
211        self.inner as *const AVCodecContext
212    }
213}
214
215impl Drop for CodecContext {
216    fn drop(&mut self) {
217        unsafe {
218            avcodec_free_context(&mut self.inner);
219        }
220    }
221}
222
223#[derive(Copy, Clone)]
224pub(crate) struct Stream {
225    pub(crate) inner: *mut AVStream,
226}
227
228unsafe impl Send for Stream {}
229unsafe impl Sync for Stream {}
230
231pub(crate) struct FrameBox {
232    pub(crate) frame: ffmpeg_next::Frame,
233    // stream copy or filtergraph
234    pub(crate) frame_data: FrameData,
235}
236
237unsafe impl Send for FrameBox {}
238unsafe impl Sync for FrameBox {}
239
240pub fn frame_alloc() -> crate::error::Result<ffmpeg_next::Frame> {
241    unsafe {
242        let frame = ffmpeg_next::Frame::empty();
243        if frame.as_ptr().is_null() {
244            return Err(AllocFrameError::OutOfMemory.into());
245        }
246        Ok(frame)
247    }
248}
249
250pub fn null_frame() -> ffmpeg_next::Frame {
251    unsafe { ffmpeg_next::Frame::wrap(null_mut()) }
252}
253
254#[derive(Clone)]
255pub(crate) struct FrameData {
256    pub(crate) framerate: Option<AVRational>,
257    pub(crate) bits_per_raw_sample: i32,
258    pub(crate) input_stream_width: i32,
259    pub(crate) input_stream_height: i32,
260    pub(crate) subtitle_header_size: i32,
261    pub(crate) subtitle_header: *mut u8,
262
263    pub(crate) fg_input_index: usize,
264}
265
266unsafe impl Send for FrameData {}
267unsafe impl Sync for FrameData {}
268
269pub(crate) struct PacketBox {
270    pub(crate) packet: ffmpeg_next::Packet,
271    pub(crate) packet_data: PacketData,
272}
273
274unsafe impl Send for PacketBox {}
275unsafe impl Sync for PacketBox {}
276
277// optionally attached as opaque_ref to decoded AVFrames
278#[derive(Clone)]
279pub(crate) struct PacketData {
280    // demuxer-estimated dts in AV_TIME_BASE_Q,
281    // to be used when real dts is missing
282    pub(crate) dts_est: i64,
283    pub(crate) codec_type: AVMediaType,
284    pub(crate) output_stream_index: i32,
285    pub(crate) is_copy: bool,
286    pub(crate) codecpar: *mut AVCodecParameters,
287}
288
289unsafe impl Send for PacketData {}
290unsafe impl Sync for PacketData {}
291
292pub(crate) struct AVFormatContextBox {
293    pub(crate) fmt_ctx: *mut AVFormatContext,
294    pub(crate) is_input: bool,
295    pub(crate) is_set_callback: bool,
296}
297unsafe impl Send for AVFormatContextBox {}
298unsafe impl Sync for AVFormatContextBox {}
299
300impl AVFormatContextBox {
301    pub(crate) fn new(
302        fmt_ctx: *mut AVFormatContext,
303        is_input: bool,
304        is_set_callback: bool,
305    ) -> Self {
306        Self {
307            fmt_ctx,
308            is_input,
309            is_set_callback,
310        }
311    }
312}
313
314impl Drop for AVFormatContextBox {
315    fn drop(&mut self) {
316        if self.fmt_ctx.is_null() {
317            return;
318        }
319        if self.is_input {
320            in_fmt_ctx_free(self.fmt_ctx, self.is_set_callback)
321        } else {
322            out_fmt_ctx_free(self.fmt_ctx, self.is_set_callback)
323        }
324    }
325}
326
327pub(crate) fn out_fmt_ctx_free(out_fmt_ctx: *mut AVFormatContext, is_set_write_callback: bool) {
328    if out_fmt_ctx.is_null() {
329        return;
330    }
331    unsafe {
332        if is_set_write_callback {
333            free_output_opaque((*out_fmt_ctx).pb);
334        } else if (*out_fmt_ctx).flags & AVFMT_NOFILE == 0 {
335            let mut pb = (*out_fmt_ctx).pb;
336            if !pb.is_null() {
337                avio_closep(&mut pb);
338            }
339        }
340        avformat_free_context(out_fmt_ctx);
341    }
342}
343
344unsafe fn free_output_opaque(mut avio_ctx: *mut AVIOContext) {
345    if avio_ctx.is_null() {
346        return;
347    }
348    if !(*avio_ctx).buffer.is_null() {
349        av_freep(&mut (*avio_ctx).buffer as *mut _ as *mut c_void);
350    }
351    let opaque_ptr = (*avio_ctx).opaque as *mut Box<dyn FnMut(&[u8]) -> i32>;
352    if !opaque_ptr.is_null() {
353        let _ = Box::from_raw(opaque_ptr);
354    }
355    avio_context_free(&mut avio_ctx);
356}
357
358pub(crate) fn in_fmt_ctx_free(mut in_fmt_ctx: *mut AVFormatContext, is_set_read_callback: bool) {
359    if in_fmt_ctx.is_null() {
360        return;
361    }
362    if is_set_read_callback {
363        unsafe {
364            free_input_opaque((*in_fmt_ctx).pb);
365        }
366    }
367    unsafe {
368        avformat_close_input(&mut in_fmt_ctx);
369    }
370}
371
372unsafe fn free_input_opaque(mut avio_ctx: *mut AVIOContext) {
373    if !avio_ctx.is_null() {
374        let opaque_ptr = (*avio_ctx).opaque as *mut Box<dyn FnMut(&mut [u8]) -> i32>;
375        if !opaque_ptr.is_null() {
376            let _ = Box::from_raw(opaque_ptr);
377        }
378        av_freep(&mut (*avio_ctx).buffer as *mut _ as *mut c_void);
379        avio_context_free(&mut avio_ctx);
380    }
381}
382
383#[allow(dead_code)]
384pub(crate) fn type_to_linklabel(media_type: AVMediaType, index: usize) -> Option<String> {
385    match media_type {
386        AVMediaType::AVMEDIA_TYPE_UNKNOWN => None,
387        AVMEDIA_TYPE_VIDEO => Some(format!("{index}:v")),
388        AVMEDIA_TYPE_AUDIO => Some(format!("{index}:a")),
389        AVMEDIA_TYPE_DATA => Some(format!("{index}:d")),
390        AVMEDIA_TYPE_SUBTITLE => Some(format!("{index}:s")),
391        AVMEDIA_TYPE_ATTACHMENT => Some(format!("{index}:t")),
392        AVMediaType::AVMEDIA_TYPE_NB => None,
393    }
394}