Skip to main content

ez_ffmpeg/core/context/
input.rs

1use std::collections::HashMap;
2use crate::filter::frame_pipeline::FramePipeline;
3
4unsafe impl Send for Input {}
5
6pub struct Input {
7    /// The URL of the input source.
8    ///
9    /// This specifies the source from which the input stream is obtained. It can be:
10    /// - A local file path (e.g., `file:///path/to/video.mp4`).
11    /// - A network stream (e.g., `rtmp://example.com/live/stream`).
12    /// - Any other URL supported by FFmpeg (e.g., `http://example.com/video.mp4`, `udp://...`).
13    ///
14    /// The URL must be valid. If the URL is invalid or unsupported,
15    /// the library will return an error when attempting to open the input stream.
16    pub(crate) url: Option<String>,
17
18    /// A callback function for custom data reading.
19    ///
20    /// The `read_callback` function allows you to provide custom logic for feeding data into
21    /// the input stream. This is useful for scenarios where the input does not come directly
22    /// from a standard source (like a file or URL), but instead from a custom data source,
23    /// such as an in-memory buffer or a custom network stream.
24    ///
25    /// ### Parameters:
26    /// - `buf: &mut [u8]`: A mutable buffer into which the data should be written.
27    ///   The callback should fill this buffer with as much data as possible, up to its length.
28    ///
29    /// ### Return Value:
30    /// - **Positive Value**: The number of bytes successfully read into `buf`.
31    /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the input stream. No more data will be read.
32    /// - **Negative Value**: Indicates an error occurred, such as:
33    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
34    ///   - Custom-defined error codes depending on your implementation.
35    ///
36    /// ### Example:
37    /// ```rust,ignore
38    /// fn custom_read_callback(buf: &mut [u8]) -> i32 {
39    ///     let data = b"example data stream";
40    ///     let len = data.len().min(buf.len());
41    ///     buf[..len].copy_from_slice(&data[..len]);
42    ///     len as i32 // Return the number of bytes written into the buffer
43    /// }
44    /// ```
45    pub(crate) read_callback: Option<Box<dyn FnMut(&mut [u8]) -> i32>>,
46
47    /// A callback function for custom seeking within the input stream.
48    ///
49    /// The `seek_callback` function allows defining custom seeking behavior.
50    /// This is useful for data sources that support seeking, such as files or memory-mapped data.
51    /// For non-seekable streams (e.g., live network streams), this function may return an error.
52    ///
53    /// **FFmpeg may invoke `seek_callback` from multiple threads, so thread safety is required.**
54    /// When using a `File` as an input source, **use `Arc<Mutex<File>>` to ensure safe access.**
55    ///
56    /// ### Parameters:
57    /// - `offset: i64`: The target position in the stream for seeking.
58    /// - `whence: i32`: The seek mode defining how the `offset` should be interpreted:
59    ///   - `ffmpeg_sys_next::SEEK_SET` (0): Seek to an absolute position.
60    ///   - `ffmpeg_sys_next::SEEK_CUR` (1): Seek relative to the current position.
61    ///   - `ffmpeg_sys_next::SEEK_END` (2): Seek relative to the end of the stream.
62    ///   - `ffmpeg_sys_next::SEEK_HOLE` (3): Find the next file hole (sparse file support).
63    ///   - `ffmpeg_sys_next::SEEK_DATA` (4): Find the next data block (sparse file support).
64    ///   - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2): Seek using **byte offsets** instead of timestamps.
65    ///   - `ffmpeg_sys_next::AVSEEK_SIZE` (65536): Query the **total size** of the stream.
66    ///   - `ffmpeg_sys_next::AVSEEK_FORCE` (131072): **Force seeking even if normally restricted.**
67    ///
68    /// ### Return Value:
69    /// - **Positive Value**: The new offset position after seeking.
70    /// - **Negative Value**: An error occurred. Common errors include:
71    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
72    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
73    ///
74    /// ### Example (Handling multi-threaded access safely with `Arc<Mutex<File>>`):
75    /// Since FFmpeg may call `read_callback` and `seek_callback` from different threads,
76    /// **`Arc<Mutex<File>>` is used to ensure safe access across threads.**
77    ///
78    /// ```rust,ignore
79    /// use std::fs::File;
80    /// use std::io::{Seek, SeekFrom};
81    /// use std::sync::{Arc, Mutex};
82    ///
83    /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
84    ///
85    /// let seek_callback = {
86    ///     let file = Arc::clone(&file);
87    ///     Box::new(move |offset: i64, whence: i32| -> i64 {
88    ///         let mut file = file.lock().unwrap(); // Acquire lock
89    ///
90    ///         // ✅ Handle AVSEEK_SIZE: Return total file size
91    ///         if whence == ffmpeg_sys_next::AVSEEK_SIZE {
92    ///             if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
93    ///                 println!("FFmpeg requested stream size: {}", size);
94    ///                 return size;
95    ///             }
96    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
97    ///         }
98    ///
99    ///         // ✅ Handle AVSEEK_FORCE: Ignore this flag when processing seek
100    ///         let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
101    ///
102    ///         // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
103    ///         if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
104    ///             println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
105    ///             if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
106    ///                 return new_pos as i64;
107    ///             }
108    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
109    ///         }
110    ///
111    ///         // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
112    ///         #[cfg(target_os = "linux")]
113    ///         if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
114    ///             println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
115    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
116    ///         }
117    ///         #[cfg(target_os = "linux")]
118    ///         if actual_whence == ffmpeg_sys_next::SEEK_DATA {
119    ///             println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
120    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
121    ///         }
122    ///
123    ///         // ✅ Standard seek modes
124    ///         let seek_result = match actual_whence {
125    ///             ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
126    ///             ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
127    ///             ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
128    ///             _ => {
129    ///                 println!("Unsupported seek mode: {}", whence);
130    ///                 return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
131    ///             }
132    ///         };
133    ///
134    ///         match seek_result {
135    ///             Ok(new_pos) => {
136    ///                 println!("Seek successful, new position: {}", new_pos);
137    ///                 new_pos as i64
138    ///             }
139    ///             Err(e) => {
140    ///                 println!("Seek failed: {}", e);
141    ///                 ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
142    ///             }
143    ///         }
144    ///     })
145    /// };
146    /// ```
147    pub(crate) seek_callback: Option<Box<dyn FnMut(i64, i32) -> i64>>,
148
149    /// The pipeline that provides custom processing for decoded frames.
150    ///
151    /// After the input data is decoded into `Frame` objects, these frames
152    /// are passed through the `frame_pipeline`. Each frame goes through
153    /// a series of `FrameFilter` objects in the pipeline, allowing for
154    /// customized processing (e.g., filtering, transformation, etc.).
155    ///
156    /// If `None`, no processing pipeline is applied to the decoded frames.
157    pub(crate) frame_pipelines: Option<Vec<FramePipeline>>,
158
159    /// The input format for the source.
160    ///
161    /// This field specifies which container or device format FFmpeg should use to read the input.
162    /// If `None`, FFmpeg will attempt to automatically detect the format based on the source URL,
163    /// file extension, or stream data.
164    ///
165    /// You might need to specify a format explicitly in cases where automatic detection fails or
166    /// when you must force a particular format. For example:
167    /// - When capturing from a specific device on macOS (using `avfoundation`).
168    /// - When capturing on Windows devices (using `dshow`).
169    /// - When dealing with raw streams or unusual data sources.
170    pub(crate) format: Option<String>,
171
172    /// The codec to be used for **video** decoding.
173    ///
174    /// If set, this forces FFmpeg to use the specified video codec for decoding.
175    /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
176    pub(crate) video_codec: Option<String>,
177
178    /// The codec to be used for **audio** decoding.
179    ///
180    /// If set, this forces FFmpeg to use the specified audio codec for decoding.
181    /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
182    pub(crate) audio_codec: Option<String>,
183
184    /// The codec to be used for **subtitle** decoding.
185    ///
186    /// If set, this forces FFmpeg to use the specified subtitle codec for decoding.
187    /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
188    pub(crate) subtitle_codec: Option<String>,
189
190    pub(crate) exit_on_error: Option<bool>,
191
192    /// read input at specified rate.
193    /// when set 1. read input at native frame rate.
194    pub(crate) readrate: Option<f32>,
195    pub(crate) start_time_us: Option<i64>,
196    pub(crate) recording_time_us: Option<i64>,
197    pub(crate) stop_time_us: Option<i64>,
198
199    /// set number of times input stream shall be looped
200    pub(crate) stream_loop: Option<i32>,
201
202    /// Hardware Acceleration name
203    /// use Hardware accelerated decoding
204    pub(crate) hwaccel: Option<String>,
205    /// select a device for HW acceleration
206    pub(crate) hwaccel_device: Option<String>,
207    /// select output format used with HW accelerated decoding
208    pub(crate) hwaccel_output_format: Option<String>,
209
210    /// Input options for avformat_open_input.
211    ///
212    /// This field stores options that are passed to FFmpeg's `avformat_open_input()` function.
213    /// These options can affect different layers of the input processing pipeline:
214    ///
215    /// **Format/Demuxer options:**
216    /// - `probesize` - Maximum data to probe for format detection
217    /// - `analyzeduration` - Duration to analyze for stream info
218    /// - `fflags` - Format flags (e.g., "+genpts")
219    ///
220    /// **Protocol options:**
221    /// - `user_agent` - HTTP User-Agent header
222    /// - `timeout` - Network timeout in microseconds
223    /// - `headers` - Custom HTTP headers
224    ///
225    /// **Device options:**
226    /// - `framerate` - Input framerate (for avfoundation, dshow, etc.)
227    /// - `video_size` - Input video resolution
228    /// - `pixel_format` - Input pixel format
229    ///
230    /// **General input options:**
231    /// - `thread_queue_size` - Input thread queue size
232    /// - `re` - Read input at native frame rate
233    ///
234    /// These options allow fine-tuning of input behavior across different components
235    /// of the FFmpeg input pipeline.
236    pub(crate) input_opts: Option<HashMap<String, String>>,
237
238    /// Automatically rotate video based on display matrix metadata.
239    ///
240    /// When enabled (default), videos with rotation metadata (common in smartphone
241    /// recordings) will be automatically rotated to the correct orientation using
242    /// transpose/hflip/vflip filters.
243    ///
244    /// Set to `false` to disable automatic rotation and preserve the original
245    /// video orientation.
246    ///
247    /// ## FFmpeg CLI equivalent
248    /// ```bash
249    /// # Disable autorotate
250    /// ffmpeg -autorotate 0 -i input.mp4 output.mp4
251    ///
252    /// # Enable autorotate (default)
253    /// ffmpeg -autorotate 1 -i input.mp4 output.mp4
254    /// ```
255    ///
256    /// ## FFmpeg source reference (FFmpeg 7.x)
257    /// - Default value: `ffmpeg_demux.c:1319` (`ds->autorotate = 1`)
258    /// - Flag setting: `ffmpeg_demux.c:1137` (`IFILTER_FLAG_AUTOROTATE`)
259    /// - Filter insertion: `ffmpeg_filter.c:1744-1778`
260    pub(crate) autorotate: Option<bool>,
261
262    /// Timestamp scale factor for pts/dts values.
263    ///
264    /// This multiplier is applied to packet timestamps after ts_offset addition.
265    /// Default is 1.0 (no scaling). Values must be positive.
266    ///
267    /// This is useful for fixing videos with incorrect timestamps or for
268    /// special timestamp manipulation scenarios.
269    ///
270    /// ## FFmpeg CLI equivalent
271    /// ```bash
272    /// # Scale timestamps by 2x
273    /// ffmpeg -itsscale 2.0 -i input.mp4 output.mp4
274    ///
275    /// # Scale timestamps by 0.5x (half speed effect on timestamps)
276    /// ffmpeg -itsscale 0.5 -i input.mp4 output.mp4
277    /// ```
278    ///
279    /// ## FFmpeg source reference (FFmpeg 7.x)
280    /// - Default value: `ffmpeg_demux.c:1316` (`ds->ts_scale = 1.0`)
281    /// - Application: `ffmpeg_demux.c:420-422` (applied after ts_offset)
282    pub(crate) ts_scale: Option<f64>,
283}
284
285impl Input {
286    pub fn new(url: impl Into<String>) -> Self {
287        url.into().into()
288    }
289
290    /// Creates a new `Input` instance with a custom read callback.
291    ///
292    /// This method initializes an `Input` object that uses a provided `read_callback` function
293    /// to supply data to the input stream. This is particularly useful for custom data sources
294    /// such as in-memory buffers, network streams, or other non-standard input mechanisms.
295    ///
296    /// ### Parameters:
297    /// - `read_callback: fn(buf: &mut [u8]) -> i32`: A function pointer that fills the provided
298    ///   mutable buffer with data and returns the number of bytes read.
299    ///
300    /// ### Return Value:
301    /// - Returns a new `Input` instance configured with the specified `read_callback`.
302    ///
303    /// ### Behavior of `read_callback`:
304    /// - **Positive Value**: Indicates the number of bytes successfully read.
305    /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the stream. The library will stop requesting data.
306    /// - **Negative Value**: Indicates an error occurred. For example:
307    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: Represents an input/output error.
308    ///   - Other custom-defined error codes can also be returned to signal specific issues.
309    ///
310    /// ### Example:
311    /// ```rust,ignore
312    /// let input = Input::new_by_read_callback(move |buf| {
313    ///     let data = b"example custom data source";
314    ///     let len = data.len().min(buf.len());
315    ///     buf[..len].copy_from_slice(&data[..len]);
316    ///     len as i32 // Return the number of bytes written
317    /// });
318    /// ```
319    pub fn new_by_read_callback<F>(read_callback: F) -> Self
320    where
321        F: FnMut(&mut [u8]) -> i32 + 'static,
322    {
323        (Box::new(read_callback) as Box<dyn FnMut(&mut [u8]) -> i32>).into()
324    }
325
326    /// Sets a custom seek callback for the input stream.
327    ///
328    /// This function assigns a user-defined function that handles seeking within the input stream.
329    /// It is required when using custom data sources that support random access, such as files,
330    /// memory-mapped buffers, or seekable network streams.
331    ///
332    /// **FFmpeg may invoke `seek_callback` from different threads.**
333    /// If using a `File` as the data source, **wrap it in `Arc<Mutex<File>>`** to ensure
334    /// thread-safe access across multiple threads.
335    ///
336    /// ### Parameters:
337    /// - `seek_callback: FnMut(i64, i32) -> i64`: A function that handles seek operations.
338    ///   - `offset: i64`: The target seek position in the stream.
339    ///   - `whence: i32`: The seek mode, which determines how `offset` should be interpreted:
340    ///     - `ffmpeg_sys_next::SEEK_SET` (0) - Seek to an absolute position.
341    ///     - `ffmpeg_sys_next::SEEK_CUR` (1) - Seek relative to the current position.
342    ///     - `ffmpeg_sys_next::SEEK_END` (2) - Seek relative to the end of the stream.
343    ///     - `ffmpeg_sys_next::SEEK_HOLE` (3) - Find the next hole in a sparse file (Linux only).
344    ///     - `ffmpeg_sys_next::SEEK_DATA` (4) - Find the next data block in a sparse file (Linux only).
345    ///     - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2) - Seek using byte offset instead of timestamps.
346    ///     - `ffmpeg_sys_next::AVSEEK_SIZE` (65536) - Query the total size of the stream.
347    ///     - `ffmpeg_sys_next::AVSEEK_FORCE` (131072) - Force seeking, even if normally restricted.
348    ///
349    /// ### Return Value:
350    /// - Returns `Self`, allowing for method chaining.
351    ///
352    /// ### Behavior of `seek_callback`:
353    /// - **Positive Value**: The new offset position after seeking.
354    /// - **Negative Value**: An error occurred, such as:
355    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
356    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
357    ///
358    /// ### Example (Thread-safe seek callback using `Arc<Mutex<File>>`):
359    /// Since `FFmpeg` may call `read_callback` and `seek_callback` from different threads,
360    /// **use `Arc<Mutex<File>>` to ensure safe concurrent access.**
361    ///
362    /// ```rust,ignore
363    /// use std::fs::File;
364    /// use std::io::{Read, Seek, SeekFrom};
365    /// use std::sync::{Arc, Mutex};
366    ///
367    /// // ✅ Wrap the file in Arc<Mutex<>> for safe shared access
368    /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
369    ///
370    /// // ✅ Thread-safe read callback
371    /// let read_callback = {
372    ///     let file = Arc::clone(&file);
373    ///     move |buf: &mut [u8]| -> i32 {
374    ///         let mut file = file.lock().unwrap();
375    ///         match file.read(buf) {
376    ///             Ok(0) => {
377    ///                 println!("Read EOF");
378    ///                 ffmpeg_sys_next::AVERROR_EOF
379    ///             }
380    ///             Ok(bytes_read) => bytes_read as i32,
381    ///             Err(e) => {
382    ///                 println!("Read error: {}", e);
383    ///                 ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)
384    ///             }
385    ///         }
386    ///     }
387    /// };
388    ///
389    /// // ✅ Thread-safe seek callback
390    /// let seek_callback = {
391    ///     let file = Arc::clone(&file);
392    ///     Box::new(move |offset: i64, whence: i32| -> i64 {
393    ///         let mut file = file.lock().unwrap();
394    ///
395    ///         // ✅ Handle AVSEEK_SIZE: Return total file size
396    ///         if whence == ffmpeg_sys_next::AVSEEK_SIZE {
397    ///             if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
398    ///                 println!("FFmpeg requested stream size: {}", size);
399    ///                 return size;
400    ///             }
401    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
402    ///         }
403    ///
404    ///         // ✅ Ignore AVSEEK_FORCE flag
405    ///         let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
406    ///
407    ///         // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
408    ///         if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
409    ///             println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
410    ///             if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
411    ///                 return new_pos as i64;
412    ///             }
413    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
414    ///         }
415    ///
416    ///         // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
417    ///         #[cfg(target_os = "linux")]
418    ///         if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
419    ///             println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
420    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
421    ///         }
422    ///         #[cfg(target_os = "linux")]
423    ///         if actual_whence == ffmpeg_sys_next::SEEK_DATA {
424    ///             println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
425    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
426    ///         }
427    ///
428    ///         // ✅ Standard seek modes
429    ///         let seek_result = match actual_whence {
430    ///             ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
431    ///             ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
432    ///             ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
433    ///             _ => {
434    ///                 println!("Unsupported seek mode: {}", whence);
435    ///                 return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
436    ///             }
437    ///         };
438    ///
439    ///         match seek_result {
440    ///             Ok(new_pos) => {
441    ///                 println!("Seek successful, new position: {}", new_pos);
442    ///                 new_pos as i64
443    ///             }
444    ///             Err(e) => {
445    ///                 println!("Seek failed: {}", e);
446    ///                 ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
447    ///             }
448    ///         }
449    ///     })
450    /// };
451    ///
452    /// let input = Input::new_by_read_callback(read_callback).set_seek_callback(seek_callback);
453    /// ```
454    pub fn set_seek_callback<F>(mut self, seek_callback: F) -> Self
455    where
456        F: FnMut(i64, i32) -> i64 + 'static,
457    {
458        self.seek_callback = Some(Box::new(seek_callback) as Box<dyn FnMut(i64, i32) -> i64>);
459        self
460    }
461
462    /// Replaces the entire frame-processing pipeline with a new sequence
463    /// of transformations for **post-decoding** frames on this `Input`.
464    ///
465    /// This method clears any previously set pipelines and replaces them with the provided list.
466    ///
467    /// # Parameters
468    /// * `frame_pipelines` - A list of [`FramePipeline`] instances defining the
469    ///   transformations to apply to decoded frames.
470    ///
471    /// # Returns
472    /// * `Self` - Returns the modified `Input`, enabling method chaining.
473    ///
474    /// # Example
475    /// ```rust,ignore
476    /// let input = Input::from("my_video.mp4")
477    ///     .set_frame_pipelines(vec![
478    ///         FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)),
479    ///         // Additional pipelines...
480    ///     ]);
481    /// ```
482    pub fn set_frame_pipelines(mut self, frame_pipelines: Vec<impl Into<FramePipeline>>) -> Self {
483        self.frame_pipelines = Some(frame_pipelines.into_iter().map(|frame_pipeline| frame_pipeline.into()).collect());
484        self
485    }
486
487    /// Adds a single [`FramePipeline`] to the existing pipeline list.
488    ///
489    /// If no pipelines are currently defined, this method creates a new pipeline list.
490    /// Otherwise, it appends the provided pipeline to the existing transformations.
491    ///
492    /// # Parameters
493    /// * `frame_pipeline` - A [`FramePipeline`] defining a transformation.
494    ///
495    /// # Returns
496    /// * `Self` - Returns the modified `Input`, enabling method chaining.
497    ///
498    /// # Example
499    /// ```rust,ignore
500    /// let input = Input::from("my_video.mp4")
501    ///     .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)).build())
502    ///     .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_AUDIO).filter("my_custom_filter1", Box::new(...)).filter("my_custom_filter2", Box::new(...)).build());
503    /// ```
504    pub fn add_frame_pipeline(mut self, frame_pipeline: impl Into<FramePipeline>) -> Self {
505        if self.frame_pipelines.is_none() {
506            self.frame_pipelines = Some(vec![frame_pipeline.into()]);
507        } else {
508            self.frame_pipelines
509                .as_mut()
510                .unwrap()
511                .push(frame_pipeline.into());
512        }
513        self
514    }
515
516    /// Sets the input format for the container or device.
517    ///
518    /// By default, if no format is specified,
519    /// FFmpeg will attempt to detect the format automatically. However, certain
520    /// use cases require specifying the format explicitly:
521    /// - Using device-specific inputs (e.g., `avfoundation` on macOS, `dshow` on Windows).
522    /// - Handling raw streams or formats that FFmpeg may not detect automatically.
523    ///
524    /// ### Parameters:
525    /// - `format`: A string specifying the desired input format (e.g., `mp4`, `flv`, `avfoundation`).
526    ///
527    /// ### Return Value:
528    /// - Returns the `Input` instance with the newly set format.
529    pub fn set_format(mut self, format: impl Into<String>) -> Self {
530        self.format = Some(format.into());
531        self
532    }
533
534    /// Sets the **video codec** to be used for decoding.
535    ///
536    /// By default, FFmpeg will automatically select an appropriate video codec
537    /// based on the input format and available decoders. However, this method
538    /// allows you to override that selection and force a specific codec.
539    ///
540    /// # Common Video Codecs:
541    /// | Codec | Description |
542    /// |-------|-------------|
543    /// | `h264` | H.264 (AVC), widely supported and efficient |
544    /// | `hevc` | H.265 (HEVC), better compression at higher complexity |
545    /// | `vp9` | VP9, open-source alternative to H.265 |
546    /// | `av1` | AV1, newer open-source codec with improved compression |
547    /// | `mpeg4` | MPEG-4 Part 2, older but still used in some cases |
548    ///
549    /// # Arguments
550    /// * `video_codec` - A string representing the desired video codec (e.g., `"h264"`, `"hevc"`).
551    ///
552    /// # Returns
553    /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
554    ///
555    /// # Example:
556    /// ```rust,ignore
557    /// let input = Input::from("video.mp4").set_video_codec("h264");
558    /// ```
559    pub fn set_video_codec(mut self, video_codec: impl Into<String>) -> Self {
560        self.video_codec = Some(video_codec.into());
561        self
562    }
563
564    /// Sets the **audio codec** to be used for decoding.
565    ///
566    /// By default, FFmpeg will automatically select an appropriate audio codec
567    /// based on the input format and available decoders. However, this method
568    /// allows you to specify a preferred codec.
569    ///
570    /// # Common Audio Codecs:
571    /// | Codec | Description |
572    /// |-------|-------------|
573    /// | `aac` | AAC, commonly used for MP4 and streaming |
574    /// | `mp3` | MP3, widely supported but lower efficiency |
575    /// | `opus` | Opus, high-quality open-source codec |
576    /// | `vorbis` | Vorbis, used in Ogg containers |
577    /// | `flac` | FLAC, lossless audio format |
578    ///
579    /// # Arguments
580    /// * `audio_codec` - A string representing the desired audio codec (e.g., `"aac"`, `"mp3"`).
581    ///
582    /// # Returns
583    /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
584    ///
585    /// # Example:
586    /// ```rust,ignore
587    /// let input = Input::from("audio.mp3").set_audio_codec("aac");
588    /// ```
589    pub fn set_audio_codec(mut self, audio_codec: impl Into<String>) -> Self {
590        self.audio_codec = Some(audio_codec.into());
591        self
592    }
593
594    /// Sets the **subtitle codec** to be used for decoding.
595    ///
596    /// By default, FFmpeg will automatically select an appropriate subtitle codec
597    /// based on the input format and available decoders. This method lets you specify
598    /// a particular subtitle codec.
599    ///
600    /// # Common Subtitle Codecs:
601    /// | Codec | Description |
602    /// |-------|-------------|
603    /// | `ass` | Advanced SubStation Alpha (ASS) subtitles |
604    /// | `srt` | SubRip Subtitle format (SRT) |
605    /// | `mov_text` | Subtitles in MP4 containers |
606    /// | `subrip` | Plain-text subtitle format |
607    ///
608    /// # Arguments
609    /// * `subtitle_codec` - A string representing the desired subtitle codec (e.g., `"mov_text"`, `"ass"`, `"srt"`).
610    ///
611    /// # Returns
612    /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
613    ///
614    /// # Example:
615    /// ```rust,ignore
616    /// let input = Input::from("movie.mkv").set_subtitle_codec("ass");
617    /// ```
618    pub fn set_subtitle_codec(mut self, subtitle_codec: impl Into<String>) -> Self {
619        self.subtitle_codec = Some(subtitle_codec.into());
620        self
621    }
622
623    /// Enables or disables **exit on error** behavior for the input.
624    ///
625    /// If set to `true`, FFmpeg will exit (stop processing) if it encounters any
626    /// decoding or demuxing error on this input. If set to `false` (the default),
627    /// FFmpeg may attempt to continue despite errors, skipping damaged portions.
628    ///
629    /// # Parameters
630    /// - `exit_on_error`: `true` to stop on errors, `false` to keep going.
631    ///
632    /// # Returns
633    /// * `Self` - allowing method chaining.
634    ///
635    /// # Example
636    /// ```rust,ignore
637    /// let input = Input::from("test.mp4")
638    ///     .set_exit_on_error(true);
639    /// ```
640    pub fn set_exit_on_error(mut self, exit_on_error: bool) -> Self {
641        self.exit_on_error = Some(exit_on_error);
642        self
643    }
644
645    /// Sets a **read rate** for this input, controlling how quickly frames are read.
646    ///
647    /// - If set to `1.0`, frames are read at their native frame rate.
648    /// - If set to another value (e.g., `0.5` or `2.0`), FFmpeg may attempt to read
649    ///   slower or faster, simulating changes in real-time playback speed.
650    ///
651    /// # Parameters
652    /// - `rate`: A floating-point value indicating the read rate multiplier.
653    ///
654    /// # Returns
655    /// * `Self` - allowing method chaining.
656    ///
657    /// # Example
658    /// ```rust,ignore
659    /// let input = Input::from("video.mp4")
660    ///     .set_readrate(0.5); // read at half speed
661    /// ```
662    pub fn set_readrate(mut self, rate: f32) -> Self {
663        self.readrate = Some(rate);
664        self
665    }
666
667    /// Sets the **start time** (in microseconds) from which to begin reading.
668    ///
669    /// FFmpeg will skip all data before this timestamp. This can be used to
670    /// implement “input seeking” or to only process a portion of the input.
671    ///
672    /// # Parameters
673    /// - `start_time_us`: The timestamp (in microseconds) at which to start reading.
674    ///
675    /// # Returns
676    /// * `Self` - allowing method chaining.
677    ///
678    /// # Example
679    /// ```rust,ignore
680    /// let input = Input::from("long_clip.mp4")
681    ///     .set_start_time_us(2_000_000); // Start at 2 seconds
682    /// ```
683    pub fn set_start_time_us(mut self, start_time_us: i64) -> Self {
684        self.start_time_us = Some(start_time_us);
685        self
686    }
687
688    /// Sets the **recording time** (in microseconds) for this input.
689    ///
690    /// FFmpeg will only read for the specified duration, ignoring data past this
691    /// limit. This can be used to trim or limit how much of the input is processed.
692    ///
693    /// # Parameters
694    /// - `recording_time_us`: The number of microseconds to read from the input.
695    ///
696    /// # Returns
697    /// * `Self` - allowing method chaining.
698    ///
699    /// # Example
700    /// ```rust,ignore
701    /// let input = Input::from("long_clip.mp4")
702    ///     .set_recording_time_us(5_000_000); // Only read 5 seconds
703    /// ```
704    pub fn set_recording_time_us(mut self, recording_time_us: i64) -> Self {
705        self.recording_time_us = Some(recording_time_us);
706        self
707    }
708
709    /// Sets a **stop time** (in microseconds) beyond which input data will be ignored.
710    ///
711    /// This is similar to [`set_recording_time_us`](Self::set_recording_time_us) but
712    /// specifically references an absolute timestamp in the stream. Once this timestamp
713    /// is reached, FFmpeg stops reading.
714    ///
715    /// # Parameters
716    /// - `stop_time_us`: The absolute timestamp (in microseconds) at which to stop reading.
717    ///
718    /// # Returns
719    /// * `Self` - allowing method chaining.
720    ///
721    /// # Example
722    /// ```rust,ignore
723    /// let input = Input::from("long_clip.mp4")
724    ///     .set_stop_time_us(10_000_000); // Stop reading at 10 seconds
725    /// ```
726    pub fn set_stop_time_us(mut self, stop_time_us: i64) -> Self {
727        self.stop_time_us = Some(stop_time_us);
728        self
729    }
730
731    /// Sets the number of **loops** to perform on this input stream.
732    ///
733    /// If FFmpeg reaches the end of the input, it can loop back and start from the
734    /// beginning, effectively repeating the content `stream_loop` times.
735    /// A negative value may indicate infinite looping (depending on FFmpeg’s actual behavior).
736    ///
737    /// # Parameters
738    /// - `count`: How many times to loop (e.g. `1` means one loop, `-1` might mean infinite).
739    ///
740    /// # Returns
741    /// * `Self` - allowing method chaining.
742    ///
743    /// # Example
744    /// ```rust,ignore
745    /// let input = Input::from("music.mp3")
746    ///     .set_stream_loop(2); // play the input 2 extra times
747    /// ```
748    pub fn set_stream_loop(mut self, count: i32) -> Self {
749        self.stream_loop = Some(count);
750        self
751    }
752
753    /// Specifies a **hardware acceleration** name for decoding this input.
754    ///
755    /// Common values might include `"cuda"`, `"vaapi"`, `"dxva2"`, `"videotoolbox"`, etc.
756    /// Whether it works depends on your FFmpeg build and the hardware you have available.
757    ///
758    /// # Parameters
759    /// - `hwaccel_name`: A string naming the hardware accel to use.
760    ///
761    /// # Returns
762    /// * `Self` - allowing method chaining.
763    ///
764    /// # Example
765    /// ```rust,ignore
766    /// let input = Input::from("video.mp4")
767    ///     .set_hwaccel("cuda");
768    /// ```
769    pub fn set_hwaccel(mut self, hwaccel_name: impl Into<String>) -> Self {
770        self.hwaccel = Some(hwaccel_name.into());
771        self
772    }
773
774    /// Selects a **hardware acceleration device** for decoding.
775    ///
776    /// For example, if you have multiple GPUs or want to specify a device node (like
777    /// `"/dev/dri/renderD128"` on Linux for VAAPI), you can pass it here. This option
778    /// must match the hardware accel you set via [`set_hwaccel`](Self::set_hwaccel) if
779    /// you expect decoding to succeed.
780    ///
781    /// # Parameters
782    /// - `device`: A string indicating the device path or identifier.
783    ///
784    /// # Returns
785    /// * `Self` - allowing method chaining.
786    ///
787    /// # Example
788    /// ```rust,ignore
789    /// let input = Input::from("video.mp4")
790    ///     .set_hwaccel("vaapi")
791    ///     .set_hwaccel_device("/dev/dri/renderD128");
792    /// ```
793    pub fn set_hwaccel_device(mut self, device: impl Into<String>) -> Self {
794        self.hwaccel_device = Some(device.into());
795        self
796    }
797
798    /// Sets the **output pixel format** to be used with hardware-accelerated decoding.
799    ///
800    /// Certain hardware decoders can produce various output pixel formats. This option
801    /// lets you specify which format (e.g., `"nv12"`, `"vaapi"`, etc.) is used during
802    /// the decode process.
803    /// Must be compatible with the chosen hardware accel and device.
804    ///
805    /// # Parameters
806    /// - `format`: A string naming the desired output pixel format (e.g. `"nv12"`).
807    ///
808    /// # Returns
809    /// * `Self` - allowing method chaining.
810    ///
811    /// # Example
812    /// ```rust,ignore
813    /// let input = Input::from("video.mp4")
814    ///     .set_hwaccel("cuda")
815    ///     .set_hwaccel_output_format("cuda");
816    /// ```
817    pub fn set_hwaccel_output_format(mut self, format: impl Into<String>) -> Self {
818        self.hwaccel_output_format = Some(format.into());
819        self
820    }
821
822    /// Sets a single input option for avformat_open_input.
823    ///
824    /// This method configures options that will be passed to FFmpeg's `avformat_open_input()`
825    /// function. The options can control behavior at different levels including format detection,
826    /// protocol handling, device configuration, and general input processing.
827    ///
828    /// **Example Usage:**
829    /// ```rust,ignore
830    /// let input = Input::new("avfoundation:0")
831    ///     .set_input_opt("framerate", "30")
832    ///     .set_input_opt("probesize", "5000000");
833    /// ```
834    ///
835    /// ### Parameters:
836    /// - `key`: The option name (e.g., `"framerate"`, `"probesize"`, `"timeout"`).
837    /// - `value`: The option value (e.g., `"30"`, `"5000000"`, `"10000000"`).
838    ///
839    /// ### Return Value:
840    /// - Returns the modified `Input` instance for method chaining.
841    pub fn set_input_opt(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
842        if let Some(ref mut opts) = self.input_opts {
843            opts.insert(key.into(), value.into());
844        } else {
845            let mut opts = HashMap::new();
846            opts.insert(key.into(), value.into());
847            self.input_opts = Some(opts);
848        }
849        self
850    }
851
852    /// Sets multiple input options at once for avformat_open_input.
853    ///
854    /// This method allows setting multiple options in a single call, which will all be
855    /// passed to FFmpeg's `avformat_open_input()` function. Each key-value pair will be
856    /// inserted into the options map, overwriting any existing keys with the same name.
857    ///
858    /// **Example Usage:**
859    /// ```rust,ignore
860    /// let input = Input::new("http://example.com/stream.m3u8")
861    ///     .set_input_opts(vec![
862    ///         ("user_agent", "MyApp/1.0"),
863    ///         ("timeout", "10000000"),
864    ///         ("probesize", "5000000"),
865    ///     ]);
866    /// ```
867    ///
868    /// ### Parameters:
869    /// - `opts`: A vector of key-value pairs representing input options.
870    ///
871    /// ### Return Value:
872    /// - Returns the modified `Input` instance for method chaining.
873    pub fn set_input_opts(mut self, opts: Vec<(impl Into<String>, impl Into<String>)>) -> Self {
874        if let Some(ref mut input_opts) = self.input_opts {
875            for (key, value) in opts {
876                input_opts.insert(key.into(), value.into());
877            }
878        } else {
879            let mut input_opts = HashMap::new();
880            for (key, value) in opts {
881                input_opts.insert(key.into(), value.into());
882            }
883            self.input_opts = Some(input_opts);
884        }
885        self
886    }
887
888    /// Sets whether to automatically rotate video based on display matrix metadata.
889    ///
890    /// When enabled (default is `true`), videos with rotation metadata (common in
891    /// smartphone recordings) will be automatically rotated to the correct orientation
892    /// using transpose/hflip/vflip filters.
893    ///
894    /// # Parameters
895    /// - `autorotate`: `true` to enable automatic rotation (default), `false` to disable.
896    ///
897    /// # Returns
898    /// * `Self` - allowing method chaining.
899    ///
900    /// # FFmpeg CLI equivalent
901    /// ```bash
902    /// ffmpeg -autorotate 0 -i input.mp4 output.mp4
903    /// ```
904    ///
905    /// # Example
906    /// ```rust,ignore
907    /// // Disable automatic rotation to preserve original video orientation
908    /// let input = Input::from("smartphone_video.mp4")
909    ///     .set_autorotate(false);
910    /// ```
911    pub fn set_autorotate(mut self, autorotate: bool) -> Self {
912        self.autorotate = Some(autorotate);
913        self
914    }
915
916    /// Sets a timestamp scale factor for pts/dts values.
917    ///
918    /// This multiplier is applied to packet timestamps after ts_offset addition.
919    /// Default is `1.0` (no scaling). Values must be positive.
920    ///
921    /// This is useful for fixing videos with incorrect timestamps or for
922    /// special timestamp manipulation scenarios.
923    ///
924    /// # Parameters
925    /// - `scale`: A positive floating-point value for timestamp scaling.
926    ///
927    /// # Returns
928    /// * `Self` - allowing method chaining.
929    ///
930    /// # FFmpeg CLI equivalent
931    /// ```bash
932    /// ffmpeg -itsscale 2.0 -i input.mp4 output.mp4
933    /// ```
934    ///
935    /// # Example
936    /// ```rust,ignore
937    /// // Scale timestamps by 2x (double the playback speed effect on timestamps)
938    /// let input = Input::from("video.mp4")
939    ///     .set_ts_scale(2.0);
940    /// ```
941    pub fn set_ts_scale(mut self, scale: f64) -> Self {
942        self.ts_scale = Some(scale);
943        self
944    }
945}
946
947impl From<Box<dyn FnMut(&mut [u8]) -> i32>> for Input {
948    fn from(read_callback: Box<dyn FnMut(&mut [u8]) -> i32>) -> Self {
949        Self {
950            url: None,
951            read_callback: Some(read_callback),
952            seek_callback: None,
953            frame_pipelines: None,
954            format: None,
955            video_codec: None,
956            audio_codec: None,
957            subtitle_codec: None,
958            exit_on_error: None,
959            readrate: None,
960            start_time_us: None,
961            recording_time_us: None,
962            stop_time_us: None,
963            stream_loop: None,
964            hwaccel: None,
965            hwaccel_device: None,
966            hwaccel_output_format: None,
967            input_opts: None,
968            autorotate: None,
969            ts_scale: None,
970        }
971    }
972}
973
974impl From<String> for Input {
975    fn from(url: String) -> Self {
976        Self {
977            url: Some(url),
978            read_callback: None,
979            seek_callback: None,
980            frame_pipelines: None,
981            format: None,
982            video_codec: None,
983            audio_codec: None,
984            subtitle_codec: None,
985            exit_on_error: None,
986            readrate: None,
987            start_time_us: None,
988            recording_time_us: None,
989            stop_time_us: None,
990            stream_loop: None,
991            hwaccel: None,
992            hwaccel_device: None,
993            hwaccel_output_format: None,
994            input_opts: None,
995            autorotate: None,
996            ts_scale: None,
997        }
998    }
999}
1000
1001impl From<&str> for Input {
1002    fn from(url: &str) -> Self {
1003        Self::from(String::from(url))
1004    }
1005}
1006
1007
1008#[cfg(test)]
1009mod tests {
1010    use crate::core::context::input::Input;
1011
1012    #[test]
1013    fn test_new_by_read_callback() {
1014        let data_source = b"example custom data source".to_vec();
1015        let _input = Input::new_by_read_callback(move |buf| {
1016            let len = data_source.len().min(buf.len());
1017            buf[..len].copy_from_slice(&data_source[..len]);
1018            len as i32 // Return the number of bytes written
1019        });
1020
1021        let data_source2 = b"example custom data source2".to_vec();
1022        let _input = Input::new_by_read_callback(move |buf2| {
1023            let len = data_source2.len().min(buf2.len());
1024            buf2[..len].copy_from_slice(&data_source2[..len]);
1025            len as i32 // Return the number of bytes written
1026        });
1027    }
1028}