ez_ffmpeg/core/context/
mod.rs

1use crate::error::AllocFrameError;
2use ffmpeg_sys_next::AVMediaType::{
3    AVMEDIA_TYPE_ATTACHMENT, AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_DATA, AVMEDIA_TYPE_SUBTITLE,
4    AVMEDIA_TYPE_VIDEO,
5};
6use ffmpeg_sys_next::{
7    av_freep, avcodec_free_context, avformat_close_input, avformat_free_context, avio_closep,
8    avio_context_free, AVCodecContext, AVCodecParameters, AVFormatContext, AVIOContext,
9    AVMediaType, AVRational, AVStream, AVFMT_NOFILE,
10};
11use std::ffi::c_void;
12use std::ptr::null_mut;
13
14
15/// The **ffmpeg_context** module is responsible for assembling FFmpeg’s configuration:
16/// inputs, outputs, codecs, filters, and other parameters needed to construct a
17/// complete media processing pipeline.
18///
19/// # Example
20/// ```rust
21///
22/// // Build an FFmpeg context with one input, some filter settings, and one output
23/// let context = FfmpegContext::builder()
24///     .input("test.mp4")
25///     .filter_desc("hue=s=0")
26///     .output("output.mp4")
27///     .build()
28///     .unwrap();
29/// // The context now holds all info needed for an FFmpeg job.
30/// ```
31pub mod ffmpeg_context;
32
33/// The **ffmpeg_context_builder** module defines the builder pattern for creating
34/// [`FfmpegContext`](ffmpeg_context::FfmpegContext) objects.
35///
36/// It exposes the [`FfmpegContextBuilder`](ffmpeg_context_builder::FfmpegContextBuilder) struct, which allows you to:
37/// - Configure multiple [`Input`](input::Input) and
38///   [`Output`](output::Output) streams.
39/// - Attach filter descriptions via [`FilterComplex`](crate::core::context::filter_complex::FilterComplex)
40///   or inline strings (e.g., `"scale=1280:720"`, `"hue=s=0"`).
41/// - Produce a finished `FfmpegContext` that can then be executed by
42///   [`FfmpegScheduler`](crate::core::scheduler::ffmpeg_scheduler::FfmpegScheduler).
43///
44/// # Examples
45///
46/// ```rust
47/// // 1. Create a builder (usually via FfmpegContext::builder())
48/// let builder = FfmpegContext::builder();
49///
50/// // 2. Add inputs, outputs, and filters
51/// let ffmpeg_context = builder
52///     .input("input.mp4")
53///     .filter_desc("hue=s=0")
54///     .output("output.mp4")
55///     .build()
56///     .expect("Failed to build FfmpegContext");
57///
58/// // 3. Use `ffmpeg_context` with FfmpegScheduler (e.g., `.start()` and `.wait()`).
59/// ```
60pub mod ffmpeg_context_builder;
61
62/// The **input** module defines the [`Input`](crate::core::context::input::Input) struct,
63/// representing an FFmpeg input source. An input can be:
64/// - A file path or URL (e.g., `"video.mp4"`, `rtmp://example.com/live/stream`).
65/// - A **custom data source** via a `read_callback` (and optionally `seek_callback`) for
66///   advanced scenarios like in-memory buffers or network protocols.
67///
68/// You can also specify **frame pipelines** to apply custom [`FrameFilter`](crate::core::filter::frame_filter::FrameFilter)
69/// transformations **after decoding** but **before** the frames move on to the rest of the pipeline.
70///
71/// # Example
72///
73/// ```rust
74/// use ez_ffmpeg::core::context::input::Input;
75///
76/// // Basic file or network URL:
77/// let file_input: Input = "example.mp4".into();
78///
79/// // Or a custom read callback:
80/// let custom_input = Input::new_by_read_callback(|buf| {
81///     // Fill `buf` with data from your source
82///     // Return the number of bytes read, or negative for errors
83///     0
84/// });
85/// ```
86pub mod input;
87
88/// The **output** module defines the [`Output`](crate::core::context::output::Output) struct,
89/// representing an FFmpeg output destination. An output may be:
90/// - A file path or URL (e.g., `"output.mp4"`, `rtmp://...`).
91/// - A **custom write callback** that processes encoded data (e.g., storing it
92///   in-memory or sending it over a custom network protocol).
93///
94/// You can specify additional details such as:
95/// - **Container format** (e.g., `"mp4"`, `"flv"`, `"mkv"`).
96/// - **Video/Audio/Subtitle codecs** (e.g., `"h264"`, `"aac"`, `"mov_text"`).
97/// - **Frame pipelines** to apply [`FrameFilter`](crate::core::filter::frame_filter::FrameFilter)
98///   transformations **before encoding**.
99///
100/// # Example
101///
102/// ```rust
103/// use ez_ffmpeg::core::context::output::Output;
104///
105/// // Basic file/URL output:
106/// let file_output: Output = "output.mp4".into();
107///
108/// // Or a custom write callback:
109/// let custom_output = Output::new_by_write_callback(|encoded_data| {
110///     // Write `encoded_data` somewhere
111///     encoded_data.len() as i32
112/// }).set_format("mp4");
113/// ```
114pub mod output;
115
116/// The **filter_complex** module defines the [`FilterComplex`](crate::core::context::filter_complex::FilterComplex)
117/// struct, which encapsulates one or more FFmpeg filter descriptions (e.g., `"scale=1280:720"`,
118/// `"hue=s=0"`, etc.). You can use `FilterComplex` to construct more advanced or multi-step
119/// filter graphs than simple inline strings allow.
120///
121/// `FilterComplex` can also associate a particular hardware device (e.g., for GPU-based
122/// filtering) via `hw_device`.
123///
124/// # Example
125///
126/// ```rust
127/// use ez_ffmpeg::core::context::filter_complex::FilterComplex;
128///
129/// // Build a FilterComplex from a string:
130/// let my_filters = FilterComplex::from("scale=1280:720");
131///
132/// // Optionally specify a hardware device (e.g., "cuda"):
133/// // my_filters.set_hw_device("cuda");
134/// ```
135pub mod filter_complex;
136
137
138pub(super) mod decoder_stream;
139pub(super) mod demuxer;
140pub(super) mod encoder_stream;
141pub(super) mod filter_graph;
142pub(super) mod input_filter;
143pub(super) mod muxer;
144pub(super) mod obj_pool;
145pub(super) mod output_filter;
146
147pub(crate) struct CodecContext {
148    inner: *mut AVCodecContext,
149}
150
151unsafe impl Send for CodecContext {}
152unsafe impl Sync for CodecContext {}
153
154impl CodecContext {
155    pub(crate) fn new(avcodec_context: *mut AVCodecContext) -> Self {
156        Self {
157            inner: avcodec_context,
158        }
159    }
160
161    pub(crate) fn replace(&mut self, avcodec_context: *mut AVCodecContext) -> *mut AVCodecContext {
162        let mut tmp = self.inner;
163        if !tmp.is_null() {
164            unsafe {
165                avcodec_free_context(&mut tmp);
166            }
167        }
168        self.inner = avcodec_context;
169        tmp
170    }
171
172    pub(crate) fn null() -> Self {
173        Self { inner: null_mut() }
174    }
175
176    pub(crate) fn as_mut_ptr(&self) -> *mut AVCodecContext {
177        self.inner
178    }
179
180    pub(crate) fn as_ptr(&self) -> *const AVCodecContext {
181        self.inner as *const AVCodecContext
182    }
183}
184
185impl Drop for CodecContext {
186    fn drop(&mut self) {
187        unsafe {
188            avcodec_free_context(&mut self.inner);
189        }
190    }
191}
192
193#[derive(Copy, Clone)]
194pub(crate) struct Stream {
195    pub(crate) inner: *mut AVStream,
196}
197
198unsafe impl Send for Stream {}
199unsafe impl Sync for Stream {}
200
201pub(crate) struct FrameBox {
202    pub(crate) frame: ffmpeg_next::Frame,
203    // stream copy or filtergraph
204    pub(crate) frame_data: FrameData,
205}
206
207unsafe impl Send for FrameBox {}
208unsafe impl Sync for FrameBox {}
209
210pub fn frame_alloc() -> crate::error::Result<ffmpeg_next::Frame> {
211    unsafe {
212        let frame = ffmpeg_next::Frame::empty();
213        if frame.as_ptr().is_null() {
214            return Err(AllocFrameError::OutOfMemory.into());
215        }
216        Ok(frame)
217    }
218}
219
220pub fn null_frame() -> ffmpeg_next::Frame {
221    unsafe { ffmpeg_next::Frame::wrap(null_mut()) }
222}
223
224#[derive(Clone)]
225pub(crate) struct FrameData {
226    pub(crate) framerate: Option<AVRational>,
227    pub(crate) bits_per_raw_sample: i32,
228    pub(crate) input_stream_width: i32,
229    pub(crate) input_stream_height: i32,
230    pub(crate) subtitle_header_size: i32,
231    pub(crate) subtitle_header: *mut u8,
232
233    pub(crate) fg_input_index: usize,
234}
235
236unsafe impl Send for FrameData {}
237unsafe impl Sync for FrameData {}
238
239pub(crate) struct PacketBox {
240    pub(crate) packet: ffmpeg_next::Packet,
241    pub(crate) packet_data: PacketData,
242}
243
244unsafe impl Send for PacketBox {}
245unsafe impl Sync for PacketBox {}
246
247// optionally attached as opaque_ref to decoded AVFrames
248#[derive(Clone)]
249pub(crate) struct PacketData {
250    // demuxer-estimated dts in AV_TIME_BASE_Q,
251    // to be used when real dts is missing
252    pub(crate) dts_est: i64,
253    pub(crate) codec_type: AVMediaType,
254    pub(crate) output_stream_index: i32,
255    pub(crate) is_copy: bool,
256    pub(crate) codecpar: *mut AVCodecParameters,
257}
258
259unsafe impl Send for PacketData {}
260unsafe impl Sync for PacketData {}
261
262pub(crate) struct AVFormatContextBox {
263    pub(crate) fmt_ctx: *mut AVFormatContext,
264    pub(crate) is_input: bool,
265    pub(crate) is_set_callback: bool,
266}
267unsafe impl Send for AVFormatContextBox {}
268unsafe impl Sync for AVFormatContextBox {}
269
270impl AVFormatContextBox {
271    pub(crate) fn new(
272        fmt_ctx: *mut AVFormatContext,
273        is_input: bool,
274        is_set_callback: bool,
275    ) -> Self {
276        Self {
277            fmt_ctx,
278            is_input,
279            is_set_callback,
280        }
281    }
282}
283
284impl Drop for AVFormatContextBox {
285    fn drop(&mut self) {
286        if self.fmt_ctx.is_null() {
287            return;
288        }
289        if self.is_input {
290            in_fmt_ctx_free(self.fmt_ctx, self.is_set_callback)
291        } else {
292            out_fmt_ctx_free(self.fmt_ctx, self.is_set_callback)
293        }
294    }
295}
296
297pub(crate) fn out_fmt_ctx_free(out_fmt_ctx: *mut AVFormatContext, is_set_write_callback: bool) {
298    if out_fmt_ctx.is_null() {
299        return;
300    }
301    unsafe {
302        if is_set_write_callback {
303            free_output_opaque((*out_fmt_ctx).pb);
304        } else if (*out_fmt_ctx).flags & AVFMT_NOFILE == 0 {
305            let mut pb = (*out_fmt_ctx).pb;
306            if !pb.is_null() {
307                avio_closep(&mut pb);
308            }
309        }
310        avformat_free_context(out_fmt_ctx);
311    }
312}
313
314unsafe fn free_output_opaque(mut avio_ctx: *mut AVIOContext) {
315    if avio_ctx.is_null() {
316        return;
317    }
318    if !(*avio_ctx).buffer.is_null() {
319        av_freep(&mut (*avio_ctx).buffer as *mut _ as *mut c_void);
320    }
321    let opaque_ptr = (*avio_ctx).opaque as *mut Box<dyn FnMut(&[u8]) -> i32>;
322    if !opaque_ptr.is_null() {
323        let _ = Box::from_raw(opaque_ptr);
324    }
325    avio_context_free(&mut avio_ctx);
326}
327
328pub(crate) fn in_fmt_ctx_free(mut in_fmt_ctx: *mut AVFormatContext, is_set_read_callback: bool) {
329    if in_fmt_ctx.is_null() {
330        return;
331    }
332    if is_set_read_callback {
333        unsafe {
334            free_input_opaque((*in_fmt_ctx).pb);
335        }
336    }
337    unsafe {
338        avformat_close_input(&mut in_fmt_ctx);
339    }
340}
341
342unsafe fn free_input_opaque(mut avio_ctx: *mut AVIOContext) {
343    if !avio_ctx.is_null() {
344        let opaque_ptr = (*avio_ctx).opaque as *mut Box<dyn FnMut(&mut [u8]) -> i32>;
345        if !opaque_ptr.is_null() {
346            let _ = Box::from_raw(opaque_ptr);
347        }
348        av_freep(&mut (*avio_ctx).buffer as *mut _ as *mut c_void);
349        avio_context_free(&mut avio_ctx);
350    }
351}
352
353#[allow(dead_code)]
354pub(crate) fn type_to_linklabel(media_type: AVMediaType, index: usize) -> Option<String> {
355    match media_type {
356        AVMediaType::AVMEDIA_TYPE_UNKNOWN => None,
357        AVMEDIA_TYPE_VIDEO => Some(format!("{index}:v")),
358        AVMEDIA_TYPE_AUDIO => Some(format!("{index}:a")),
359        AVMEDIA_TYPE_DATA => Some(format!("{index}:d")),
360        AVMEDIA_TYPE_SUBTITLE => Some(format!("{index}:s")),
361        AVMEDIA_TYPE_ATTACHMENT => Some(format!("{index}:t")),
362        AVMediaType::AVMEDIA_TYPE_NB => None,
363    }
364}