ez_ffmpeg/core/context/mod.rs
1use crate::error::AllocFrameError;
2use ffmpeg_sys_next::AVMediaType::{
3 AVMEDIA_TYPE_ATTACHMENT, AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_DATA, AVMEDIA_TYPE_SUBTITLE,
4 AVMEDIA_TYPE_VIDEO,
5};
6use ffmpeg_sys_next::{
7 av_freep, avcodec_free_context, avformat_close_input, avformat_free_context, avio_closep,
8 avio_context_free, AVCodecContext, AVCodecParameters, AVFormatContext, AVIOContext,
9 AVMediaType, AVRational, AVStream, AVFMT_NOFILE,
10};
11use std::ffi::c_void;
12use std::ptr::null_mut;
13
14use ffmpeg_context::{InputOpaque, OutputOpaque};
15
16
17/// The **ffmpeg_context** module is responsible for assembling FFmpeg’s configuration:
18/// inputs, outputs, codecs, filters, and other parameters needed to construct a
19/// complete media processing pipeline.
20///
21/// # Example
22/// ```rust,ignore
23///
24/// // Build an FFmpeg context with one input, some filter settings, and one output
25/// let context = FfmpegContext::builder()
26/// .input("test.mp4")
27/// .filter_desc("hue=s=0")
28/// .output("output.mp4")
29/// .build()
30/// .unwrap();
31/// // The context now holds all info needed for an FFmpeg job.
32/// ```
33pub mod ffmpeg_context;
34
35/// The **ffmpeg_context_builder** module defines the builder pattern for creating
36/// [`FfmpegContext`](ffmpeg_context::FfmpegContext) objects.
37///
38/// It exposes the [`FfmpegContextBuilder`](ffmpeg_context_builder::FfmpegContextBuilder) struct, which allows you to:
39/// - Configure multiple [`Input`](input::Input) and
40/// [`Output`](output::Output) streams.
41/// - Attach filter descriptions via [`FilterComplex`](crate::core::context::filter_complex::FilterComplex)
42/// or inline strings (e.g., `"scale=1280:720"`, `"hue=s=0"`).
43/// - Produce a finished `FfmpegContext` that can then be executed by
44/// [`FfmpegScheduler`](crate::core::scheduler::ffmpeg_scheduler::FfmpegScheduler).
45///
46/// # Examples
47///
48/// ```rust,ignore
49/// // 1. Create a builder (usually via FfmpegContext::builder())
50/// let builder = FfmpegContext::builder();
51///
52/// // 2. Add inputs, outputs, and filters
53/// let ffmpeg_context = builder
54/// .input("input.mp4")
55/// .filter_desc("hue=s=0")
56/// .output("output.mp4")
57/// .build()
58/// .expect("Failed to build FfmpegContext");
59///
60/// // 3. Use `ffmpeg_context` with FfmpegScheduler (e.g., `.start()` and `.wait()`).
61/// ```
62pub mod ffmpeg_context_builder;
63
64/// The **input** module defines the [`Input`](crate::core::context::input::Input) struct,
65/// representing an FFmpeg input source. An input can be:
66/// - A file path or URL (e.g., `"video.mp4"`, `rtmp://example.com/live/stream`).
67/// - A **custom data source** via a `read_callback` (and optionally `seek_callback`) for
68/// advanced scenarios like in-memory buffers or network protocols.
69///
70/// You can also specify **frame pipelines** to apply custom [`FrameFilter`](crate::core::filter::frame_filter::FrameFilter)
71/// transformations **after decoding** but **before** the frames move on to the rest of the pipeline.
72///
73/// # Example
74///
75/// ```rust,ignore
76/// use ez_ffmpeg::core::context::input::Input;
77///
78/// // Basic file or network URL:
79/// let file_input: Input = "example.mp4".into();
80///
81/// // Or a custom read callback:
82/// let custom_input = Input::new_by_read_callback(|buf| {
83/// // Fill `buf` with data from your source
84/// // Return the number of bytes read, or negative for errors
85/// 0
86/// });
87/// ```
88pub mod input;
89
90/// The **output** module defines the [`Output`](crate::core::context::output::Output) struct,
91/// representing an FFmpeg output destination. An output may be:
92/// - A file path or URL (e.g., `"output.mp4"`, `rtmp://...`).
93/// - A **custom write callback** that processes encoded data (e.g., storing it
94/// in-memory or sending it over a custom network protocol).
95///
96/// You can specify additional details such as:
97/// - **Container format** (e.g., `"mp4"`, `"flv"`, `"mkv"`).
98/// - **Video/Audio/Subtitle codecs** (e.g., `"h264"`, `"aac"`, `"mov_text"`).
99/// - **Frame pipelines** to apply [`FrameFilter`](crate::core::filter::frame_filter::FrameFilter)
100/// transformations **before encoding**.
101///
102/// # Example
103///
104/// ```rust,ignore
105/// use ez_ffmpeg::core::context::output::Output;
106///
107/// // Basic file/URL output:
108/// let file_output: Output = "output.mp4".into();
109///
110/// // Or a custom write callback:
111/// let custom_output = Output::new_by_write_callback(|encoded_data| {
112/// // Write `encoded_data` somewhere
113/// encoded_data.len() as i32
114/// }).set_format("mp4");
115/// ```
116pub mod output;
117
118/// The **filter_complex** module defines the [`FilterComplex`](crate::core::context::filter_complex::FilterComplex)
119/// struct, which encapsulates one or more FFmpeg filter descriptions (e.g., `"scale=1280:720"`,
120/// `"hue=s=0"`, etc.). You can use `FilterComplex` to construct more advanced or multi-step
121/// filter graphs than simple inline strings allow.
122///
123/// `FilterComplex` can also associate a particular hardware device (e.g., for GPU-based
124/// filtering) via `hw_device`.
125///
126/// # Example
127///
128/// ```rust,ignore
129/// use ez_ffmpeg::core::context::filter_complex::FilterComplex;
130///
131/// // Build a FilterComplex from a string:
132/// let my_filters = FilterComplex::from("scale=1280:720");
133///
134/// // Optionally specify a hardware device (e.g., "cuda"):
135/// // my_filters.set_hw_device("cuda");
136/// ```
137pub mod filter_complex;
138
139
140pub(super) mod decoder_stream;
141pub(super) mod demuxer;
142pub(super) mod encoder_stream;
143pub(super) mod filter_graph;
144pub(super) mod input_filter;
145pub(super) mod muxer;
146pub(super) mod obj_pool;
147pub(super) mod output_filter;
148
149/// The **null_output** module provides a custom null output implementation for FFmpeg
150/// that discards all data while supporting seeking.
151///
152/// It exposes the [`create_null_output`](null_output::create_null_output) function, which returns an
153/// [`Output`](ez_ffmpeg::Output) object configured to:
154/// - Discard all written data, behaving like `/dev/null`.
155/// - Maintain a seekable position state using atomic operations for thread-safe, high-performance access.
156/// - Support scenarios such as testing or processing streaming inputs (e.g., RTMP) where no output file is needed.
157///
158/// # Usage Scenario
159/// This module is useful when processing FFmpeg input streams without generating an output file, such as
160/// when handling RTMP streams that require a seekable output format like MP4, even if the output is discarded.
161///
162/// # Examples
163///
164/// ```rust,ignore
165/// use ez_ffmpeg::Output;
166/// let output: Output = create_null_output();
167/// // Pass `output` to an FFmpeg context for processing
168/// ```
169///
170/// # Performance
171/// - Utilizes `AtomicU64` with `Relaxed` ordering for lock-free position tracking, ensuring efficient concurrent access.
172/// - Write and seek operations are optimized to minimize overhead by avoiding locks.
173///
174/// # Notes
175/// - The default output format is "mp4", but this can be modified using `set_format` as needed.
176/// - Write operations assume individual buffers do not exceed `i32::MAX` bytes, which aligns with typical FFmpeg usage.
177pub mod null_output;
178
179pub(crate) struct CodecContext {
180 inner: *mut AVCodecContext,
181}
182
183// SAFETY: CodecContext can be sent to another thread. The raw AVCodecContext pointer
184// is only accessed from the thread that owns the CodecContext, and the crate ensures
185// single-threaded access to codec operations.
186unsafe impl Send for CodecContext {}
187// SAFETY: CodecContext can be shared across threads because the crate's architecture
188// ensures that codec operations are synchronized at the scheduler level. Direct
189// concurrent access to AVCodecContext is prevented by the ownership model.
190unsafe impl Sync for CodecContext {}
191
192impl CodecContext {
193 pub(crate) fn new(avcodec_context: *mut AVCodecContext) -> Self {
194 Self {
195 inner: avcodec_context,
196 }
197 }
198
199 pub(crate) fn replace(&mut self, avcodec_context: *mut AVCodecContext) -> *mut AVCodecContext {
200 let mut tmp = self.inner;
201 if !tmp.is_null() {
202 unsafe {
203 avcodec_free_context(&mut tmp);
204 }
205 }
206 self.inner = avcodec_context;
207 tmp
208 }
209
210 pub(crate) fn null() -> Self {
211 Self { inner: null_mut() }
212 }
213
214 pub(crate) fn as_mut_ptr(&self) -> *mut AVCodecContext {
215 self.inner
216 }
217
218 pub(crate) fn as_ptr(&self) -> *const AVCodecContext {
219 self.inner as *const AVCodecContext
220 }
221}
222
223impl Drop for CodecContext {
224 fn drop(&mut self) {
225 unsafe {
226 avcodec_free_context(&mut self.inner);
227 }
228 }
229}
230
231#[derive(Copy, Clone)]
232pub(crate) struct Stream {
233 pub(crate) inner: *mut AVStream,
234}
235
236// SAFETY: Stream can be sent to another thread. The raw AVStream pointer is owned
237// by the parent AVFormatContext, and the crate ensures the format context outlives
238// all Stream references.
239unsafe impl Send for Stream {}
240// SAFETY: Stream is Copy and contains only a raw pointer. Concurrent read access to
241// AVStream metadata is safe. The crate architecture ensures no concurrent mutations
242// to the underlying AVStream occur during stream processing.
243unsafe impl Sync for Stream {}
244
245pub(crate) struct FrameBox {
246 pub(crate) frame: ffmpeg_next::Frame,
247 // stream copy or filtergraph
248 pub(crate) frame_data: FrameData,
249}
250
251// SAFETY: FrameBox can be sent to another thread. It contains an ffmpeg_next::Frame
252// (which wraps AVFrame) and FrameData, both of which are only accessed from the owning thread.
253unsafe impl Send for FrameBox {}
254// SAFETY: FrameBox is Sync because the scheduler ensures frames are processed sequentially
255// within their pipeline. No concurrent access occurs to the underlying AVFrame data.
256unsafe impl Sync for FrameBox {}
257
258pub fn frame_alloc() -> crate::error::Result<ffmpeg_next::Frame> {
259 unsafe {
260 let frame = ffmpeg_next::Frame::empty();
261 if frame.as_ptr().is_null() {
262 return Err(AllocFrameError::OutOfMemory.into());
263 }
264 Ok(frame)
265 }
266}
267
268pub fn null_frame() -> ffmpeg_next::Frame {
269 unsafe { ffmpeg_next::Frame::wrap(null_mut()) }
270}
271
272#[derive(Clone)]
273pub(crate) struct FrameData {
274 pub(crate) framerate: Option<AVRational>,
275 pub(crate) bits_per_raw_sample: i32,
276 pub(crate) input_stream_width: i32,
277 pub(crate) input_stream_height: i32,
278 pub(crate) subtitle_header_size: i32,
279 pub(crate) subtitle_header: *mut u8,
280
281 pub(crate) fg_input_index: usize,
282}
283
284// SAFETY: FrameData can be sent to another thread. The subtitle_header pointer is owned
285// by the FrameData and only accessed from the processing thread.
286unsafe impl Send for FrameData {}
287// SAFETY: FrameData is Sync because concurrent access is prevented by the scheduler's
288// sequential frame processing within each pipeline.
289unsafe impl Sync for FrameData {}
290
291pub(crate) struct PacketBox {
292 pub(crate) packet: ffmpeg_next::Packet,
293 pub(crate) packet_data: PacketData,
294}
295
296// SAFETY: PacketBox can be sent to another thread. It contains an ffmpeg_next::Packet
297// and PacketData, both only accessed from the owning thread.
298unsafe impl Send for PacketBox {}
299// SAFETY: PacketBox is Sync because the scheduler ensures packets are processed sequentially.
300// No concurrent access occurs to the underlying AVPacket data.
301unsafe impl Sync for PacketBox {}
302
303// optionally attached as opaque_ref to decoded AVFrames
304#[derive(Clone)]
305pub(crate) struct PacketData {
306 // demuxer-estimated dts in AV_TIME_BASE_Q,
307 // to be used when real dts is missing
308 pub(crate) dts_est: i64,
309 pub(crate) codec_type: AVMediaType,
310 pub(crate) output_stream_index: i32,
311 pub(crate) is_copy: bool,
312 pub(crate) codecpar: *mut AVCodecParameters,
313}
314
315// SAFETY: PacketData can be sent to another thread. The codecpar pointer references
316// data owned by the parent stream/context and is only read, not mutated.
317unsafe impl Send for PacketData {}
318// SAFETY: PacketData is Sync because the codecpar pointer is only used for reading
319// codec parameters, and concurrent reads are safe.
320unsafe impl Sync for PacketData {}
321
322pub(crate) struct AVFormatContextBox {
323 pub(crate) fmt_ctx: *mut AVFormatContext,
324 pub(crate) is_input: bool,
325 pub(crate) is_set_callback: bool,
326}
327// SAFETY: AVFormatContextBox can be sent to another thread. The fmt_ctx pointer is only
328// accessed from the thread that owns the box, and the crate ensures proper cleanup.
329unsafe impl Send for AVFormatContextBox {}
330// SAFETY: AVFormatContextBox is Sync because the crate architecture ensures the format
331// context is only accessed from its owning demuxer/muxer thread during processing.
332unsafe impl Sync for AVFormatContextBox {}
333
334impl AVFormatContextBox {
335 pub(crate) fn new(
336 fmt_ctx: *mut AVFormatContext,
337 is_input: bool,
338 is_set_callback: bool,
339 ) -> Self {
340 Self {
341 fmt_ctx,
342 is_input,
343 is_set_callback,
344 }
345 }
346}
347
348impl Drop for AVFormatContextBox {
349 fn drop(&mut self) {
350 if self.fmt_ctx.is_null() {
351 return;
352 }
353 if self.is_input {
354 in_fmt_ctx_free(self.fmt_ctx, self.is_set_callback)
355 } else {
356 out_fmt_ctx_free(self.fmt_ctx, self.is_set_callback)
357 }
358 }
359}
360
361pub(crate) fn out_fmt_ctx_free(out_fmt_ctx: *mut AVFormatContext, is_set_write_callback: bool) {
362 if out_fmt_ctx.is_null() {
363 return;
364 }
365 unsafe {
366 if is_set_write_callback {
367 free_output_opaque((*out_fmt_ctx).pb);
368 } else if (*out_fmt_ctx).flags & AVFMT_NOFILE == 0 {
369 let mut pb = (*out_fmt_ctx).pb;
370 if !pb.is_null() {
371 avio_closep(&mut pb);
372 }
373 }
374 avformat_free_context(out_fmt_ctx);
375 }
376}
377
378unsafe fn free_output_opaque(mut avio_ctx: *mut AVIOContext) {
379 if avio_ctx.is_null() {
380 return;
381 }
382 if !(*avio_ctx).buffer.is_null() {
383 av_freep(&mut (*avio_ctx).buffer as *mut _ as *mut c_void);
384 }
385 let opaque_ptr = (*avio_ctx).opaque as *mut OutputOpaque;
386 if !opaque_ptr.is_null() {
387 let _ = Box::from_raw(opaque_ptr);
388 }
389 avio_context_free(&mut avio_ctx);
390}
391
392pub(crate) fn in_fmt_ctx_free(mut in_fmt_ctx: *mut AVFormatContext, is_set_read_callback: bool) {
393 if in_fmt_ctx.is_null() {
394 return;
395 }
396 if is_set_read_callback {
397 unsafe {
398 free_input_opaque((*in_fmt_ctx).pb);
399 }
400 }
401 unsafe {
402 avformat_close_input(&mut in_fmt_ctx);
403 }
404}
405
406unsafe fn free_input_opaque(mut avio_ctx: *mut AVIOContext) {
407 if !avio_ctx.is_null() {
408 let opaque_ptr = (*avio_ctx).opaque as *mut InputOpaque;
409 if !opaque_ptr.is_null() {
410 let _ = Box::from_raw(opaque_ptr);
411 }
412 av_freep(&mut (*avio_ctx).buffer as *mut _ as *mut c_void);
413 avio_context_free(&mut avio_ctx);
414 }
415}
416
417#[allow(dead_code)]
418pub(crate) fn type_to_linklabel(media_type: AVMediaType, index: usize) -> Option<String> {
419 match media_type {
420 AVMediaType::AVMEDIA_TYPE_UNKNOWN => None,
421 AVMEDIA_TYPE_VIDEO => Some(format!("{index}:v")),
422 AVMEDIA_TYPE_AUDIO => Some(format!("{index}:a")),
423 AVMEDIA_TYPE_DATA => Some(format!("{index}:d")),
424 AVMEDIA_TYPE_SUBTITLE => Some(format!("{index}:s")),
425 AVMEDIA_TYPE_ATTACHMENT => Some(format!("{index}:t")),
426 AVMediaType::AVMEDIA_TYPE_NB => None,
427 }
428}