Skip to main content

ez_ffmpeg/core/context/
input.rs

1use std::collections::HashMap;
2use crate::filter::frame_pipeline::FramePipeline;
3
4// Note: Input is Send if all callback fields are Send.
5// We require `+ Send` on callback types to ensure this.
6// Input is !Sync because FnMut callbacks require exclusive access.
7
8pub struct Input {
9    /// The URL of the input source.
10    ///
11    /// This specifies the source from which the input stream is obtained. It can be:
12    /// - A local file path (e.g., `file:///path/to/video.mp4`).
13    /// - A network stream (e.g., `rtmp://example.com/live/stream`).
14    /// - Any other URL supported by FFmpeg (e.g., `http://example.com/video.mp4`, `udp://...`).
15    ///
16    /// The URL must be valid. If the URL is invalid or unsupported,
17    /// the library will return an error when attempting to open the input stream.
18    pub(crate) url: Option<String>,
19
20    /// A callback function for custom data reading.
21    ///
22    /// The `read_callback` function allows you to provide custom logic for feeding data into
23    /// the input stream. This is useful for scenarios where the input does not come directly
24    /// from a standard source (like a file or URL), but instead from a custom data source,
25    /// such as an in-memory buffer or a custom network stream.
26    ///
27    /// ### Parameters:
28    /// - `buf: &mut [u8]`: A mutable buffer into which the data should be written.
29    ///   The callback should fill this buffer with as much data as possible, up to its length.
30    ///
31    /// ### Return Value:
32    /// - **Positive Value**: The number of bytes successfully read into `buf`.
33    /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the input stream. No more data will be read.
34    /// - **Negative Value**: Indicates an error occurred, such as:
35    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
36    ///   - Custom-defined error codes depending on your implementation.
37    ///
38    /// ### Example:
39    /// ```rust,ignore
40    /// fn custom_read_callback(buf: &mut [u8]) -> i32 {
41    ///     let data = b"example data stream";
42    ///     let len = data.len().min(buf.len());
43    ///     buf[..len].copy_from_slice(&data[..len]);
44    ///     len as i32 // Return the number of bytes written into the buffer
45    /// }
46    /// ```
47    pub(crate) read_callback: Option<Box<dyn FnMut(&mut [u8]) -> i32 + Send>>,
48
49    /// A callback function for custom seeking within the input stream.
50    ///
51    /// The `seek_callback` function allows defining custom seeking behavior.
52    /// This is useful for data sources that support seeking, such as files or memory-mapped data.
53    /// For non-seekable streams (e.g., live network streams), this function may return an error.
54    ///
55    /// **FFmpeg may invoke `seek_callback` from multiple threads, so thread safety is required.**
56    /// When using a `File` as an input source, **use `Arc<Mutex<File>>` to ensure safe access.**
57    ///
58    /// ### Parameters:
59    /// - `offset: i64`: The target position in the stream for seeking.
60    /// - `whence: i32`: The seek mode defining how the `offset` should be interpreted:
61    ///   - `ffmpeg_sys_next::SEEK_SET` (0): Seek to an absolute position.
62    ///   - `ffmpeg_sys_next::SEEK_CUR` (1): Seek relative to the current position.
63    ///   - `ffmpeg_sys_next::SEEK_END` (2): Seek relative to the end of the stream.
64    ///   - `ffmpeg_sys_next::SEEK_HOLE` (3): Find the next file hole (sparse file support).
65    ///   - `ffmpeg_sys_next::SEEK_DATA` (4): Find the next data block (sparse file support).
66    ///   - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2): Seek using **byte offsets** instead of timestamps.
67    ///   - `ffmpeg_sys_next::AVSEEK_SIZE` (65536): Query the **total size** of the stream.
68    ///   - `ffmpeg_sys_next::AVSEEK_FORCE` (131072): **Force seeking even if normally restricted.**
69    ///
70    /// ### Return Value:
71    /// - **Positive Value**: The new offset position after seeking.
72    /// - **Negative Value**: An error occurred. Common errors include:
73    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
74    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
75    ///
76    /// ### Example (Handling multi-threaded access safely with `Arc<Mutex<File>>`):
77    /// Since FFmpeg may call `read_callback` and `seek_callback` from different threads,
78    /// **`Arc<Mutex<File>>` is used to ensure safe access across threads.**
79    ///
80    /// ```rust,ignore
81    /// use std::fs::File;
82    /// use std::io::{Seek, SeekFrom};
83    /// use std::sync::{Arc, Mutex};
84    ///
85    /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
86    ///
87    /// let seek_callback = {
88    ///     let file = Arc::clone(&file);
89    ///     Box::new(move |offset: i64, whence: i32| -> i64 {
90    ///         let mut file = file.lock().unwrap(); // Acquire lock
91    ///
92    ///         // ✅ Handle AVSEEK_SIZE: Return total file size
93    ///         if whence == ffmpeg_sys_next::AVSEEK_SIZE {
94    ///             if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
95    ///                 println!("FFmpeg requested stream size: {}", size);
96    ///                 return size;
97    ///             }
98    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
99    ///         }
100    ///
101    ///         // ✅ Handle AVSEEK_FORCE: Ignore this flag when processing seek
102    ///         let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
103    ///
104    ///         // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
105    ///         if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
106    ///             println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
107    ///             if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
108    ///                 return new_pos as i64;
109    ///             }
110    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
111    ///         }
112    ///
113    ///         // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
114    ///         #[cfg(target_os = "linux")]
115    ///         if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
116    ///             println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
117    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
118    ///         }
119    ///         #[cfg(target_os = "linux")]
120    ///         if actual_whence == ffmpeg_sys_next::SEEK_DATA {
121    ///             println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
122    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
123    ///         }
124    ///
125    ///         // ✅ Standard seek modes
126    ///         let seek_result = match actual_whence {
127    ///             ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
128    ///             ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
129    ///             ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
130    ///             _ => {
131    ///                 println!("Unsupported seek mode: {}", whence);
132    ///                 return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
133    ///             }
134    ///         };
135    ///
136    ///         match seek_result {
137    ///             Ok(new_pos) => {
138    ///                 println!("Seek successful, new position: {}", new_pos);
139    ///                 new_pos as i64
140    ///             }
141    ///             Err(e) => {
142    ///                 println!("Seek failed: {}", e);
143    ///                 ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
144    ///             }
145    ///         }
146    ///     })
147    /// };
148    /// ```
149    pub(crate) seek_callback: Option<Box<dyn FnMut(i64, i32) -> i64 + Send>>,
150
151    /// The pipeline that provides custom processing for decoded frames.
152    ///
153    /// After the input data is decoded into `Frame` objects, these frames
154    /// are passed through the `frame_pipeline`. Each frame goes through
155    /// a series of `FrameFilter` objects in the pipeline, allowing for
156    /// customized processing (e.g., filtering, transformation, etc.).
157    ///
158    /// If `None`, no processing pipeline is applied to the decoded frames.
159    pub(crate) frame_pipelines: Option<Vec<FramePipeline>>,
160
161    /// The input format for the source.
162    ///
163    /// This field specifies which container or device format FFmpeg should use to read the input.
164    /// If `None`, FFmpeg will attempt to automatically detect the format based on the source URL,
165    /// file extension, or stream data.
166    ///
167    /// You might need to specify a format explicitly in cases where automatic detection fails or
168    /// when you must force a particular format. For example:
169    /// - When capturing from a specific device on macOS (using `avfoundation`).
170    /// - When capturing on Windows devices (using `dshow`).
171    /// - When dealing with raw streams or unusual data sources.
172    pub(crate) format: Option<String>,
173
174    /// The codec to be used for **video** decoding.
175    ///
176    /// If set, this forces FFmpeg to use the specified video codec for decoding.
177    /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
178    pub(crate) video_codec: Option<String>,
179
180    /// The codec to be used for **audio** decoding.
181    ///
182    /// If set, this forces FFmpeg to use the specified audio codec for decoding.
183    /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
184    pub(crate) audio_codec: Option<String>,
185
186    /// The codec to be used for **subtitle** decoding.
187    ///
188    /// If set, this forces FFmpeg to use the specified subtitle codec for decoding.
189    /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
190    pub(crate) subtitle_codec: Option<String>,
191
192    pub(crate) exit_on_error: Option<bool>,
193
194    /// read input at specified rate.
195    /// when set 1. read input at native frame rate.
196    pub(crate) readrate: Option<f32>,
197    pub(crate) start_time_us: Option<i64>,
198    pub(crate) recording_time_us: Option<i64>,
199    pub(crate) stop_time_us: Option<i64>,
200
201    /// set number of times input stream shall be looped
202    pub(crate) stream_loop: Option<i32>,
203
204    /// Hardware Acceleration name
205    /// use Hardware accelerated decoding
206    pub(crate) hwaccel: Option<String>,
207    /// select a device for HW acceleration
208    pub(crate) hwaccel_device: Option<String>,
209    /// select output format used with HW accelerated decoding
210    pub(crate) hwaccel_output_format: Option<String>,
211
212    /// Input options for avformat_open_input.
213    ///
214    /// This field stores options that are passed to FFmpeg's `avformat_open_input()` function.
215    /// These options can affect different layers of the input processing pipeline:
216    ///
217    /// **Format/Demuxer options:**
218    /// - `probesize` - Maximum data to probe for format detection
219    /// - `analyzeduration` - Duration to analyze for stream info
220    /// - `fflags` - Format flags (e.g., "+genpts")
221    ///
222    /// **Protocol options:**
223    /// - `user_agent` - HTTP User-Agent header
224    /// - `timeout` - Network timeout in microseconds
225    /// - `headers` - Custom HTTP headers
226    ///
227    /// **Device options:**
228    /// - `framerate` - Input framerate (for avfoundation, dshow, etc.)
229    /// - `video_size` - Input video resolution
230    /// - `pixel_format` - Input pixel format
231    ///
232    /// **General input options:**
233    /// - `thread_queue_size` - Input thread queue size
234    /// - `re` - Read input at native frame rate
235    ///
236    /// These options allow fine-tuning of input behavior across different components
237    /// of the FFmpeg input pipeline.
238    pub(crate) input_opts: Option<HashMap<String, String>>,
239
240    /// Automatically rotate video based on display matrix metadata.
241    ///
242    /// When enabled (default), videos with rotation metadata (common in smartphone
243    /// recordings) will be automatically rotated to the correct orientation using
244    /// transpose/hflip/vflip filters.
245    ///
246    /// Set to `false` to disable automatic rotation and preserve the original
247    /// video orientation.
248    ///
249    /// ## FFmpeg CLI equivalent
250    /// ```bash
251    /// # Disable autorotate
252    /// ffmpeg -autorotate 0 -i input.mp4 output.mp4
253    ///
254    /// # Enable autorotate (default)
255    /// ffmpeg -autorotate 1 -i input.mp4 output.mp4
256    /// ```
257    ///
258    /// ## FFmpeg source reference (FFmpeg 7.x)
259    /// - Default value: `ffmpeg_demux.c:1319` (`ds->autorotate = 1`)
260    /// - Flag setting: `ffmpeg_demux.c:1137` (`IFILTER_FLAG_AUTOROTATE`)
261    /// - Filter insertion: `ffmpeg_filter.c:1744-1778`
262    pub(crate) autorotate: Option<bool>,
263
264    /// Timestamp scale factor for pts/dts values.
265    ///
266    /// This multiplier is applied to packet timestamps after ts_offset addition.
267    /// Default is 1.0 (no scaling). Values must be positive.
268    ///
269    /// This is useful for fixing videos with incorrect timestamps or for
270    /// special timestamp manipulation scenarios.
271    ///
272    /// ## FFmpeg CLI equivalent
273    /// ```bash
274    /// # Scale timestamps by 2x
275    /// ffmpeg -itsscale 2.0 -i input.mp4 output.mp4
276    ///
277    /// # Scale timestamps by 0.5x (half speed effect on timestamps)
278    /// ffmpeg -itsscale 0.5 -i input.mp4 output.mp4
279    /// ```
280    ///
281    /// ## FFmpeg source reference (FFmpeg 7.x)
282    /// - Default value: `ffmpeg_demux.c:1316` (`ds->ts_scale = 1.0`)
283    /// - Application: `ffmpeg_demux.c:420-422` (applied after ts_offset)
284    pub(crate) ts_scale: Option<f64>,
285
286    /// Forced framerate for the input video stream.
287    ///
288    /// When set, this overrides the DTS estimation logic to use the specified
289    /// framerate for computing `next_dts` in the video stream. By default (None),
290    /// the actual packet duration is used for DTS estimation, matching FFmpeg CLI
291    /// behavior when `-r` is not specified.
292    ///
293    /// This affects all video DTS estimation, including recording_time cutoff
294    /// decisions during stream copy and the output stream time_base when set via
295    /// `streamcopy_init`.
296    ///
297    /// ## FFmpeg CLI equivalent
298    /// ```bash
299    /// # Force input framerate to 30fps
300    /// ffmpeg -r 30 -i input.mp4 output.mp4
301    /// ```
302    ///
303    /// ## FFmpeg source reference (FFmpeg 7.x)
304    /// - Field: `ffmpeg.h:452` (`ist->framerate`, only set with `-r`)
305    /// - Application: `ffmpeg_demux.c:329-333` (used in `ist_dts_update`)
306    pub(crate) framerate: Option<(i32, i32)>,
307}
308
309impl Input {
310    pub fn new(url: impl Into<String>) -> Self {
311        url.into().into()
312    }
313
314    /// Creates a new `Input` instance with a custom read callback.
315    ///
316    /// This method initializes an `Input` object that uses a provided `read_callback` function
317    /// to supply data to the input stream. This is particularly useful for custom data sources
318    /// such as in-memory buffers, network streams, or other non-standard input mechanisms.
319    ///
320    /// ### Parameters:
321    /// - `read_callback: fn(buf: &mut [u8]) -> i32`: A function pointer that fills the provided
322    ///   mutable buffer with data and returns the number of bytes read.
323    ///
324    /// ### Return Value:
325    /// - Returns a new `Input` instance configured with the specified `read_callback`.
326    ///
327    /// ### Behavior of `read_callback`:
328    /// - **Positive Value**: Indicates the number of bytes successfully read.
329    /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the stream. The library will stop requesting data.
330    /// - **Negative Value**: Indicates an error occurred. For example:
331    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: Represents an input/output error.
332    ///   - Other custom-defined error codes can also be returned to signal specific issues.
333    ///
334    /// ### Example:
335    /// ```rust,ignore
336    /// let input = Input::new_by_read_callback(move |buf| {
337    ///     let data = b"example custom data source";
338    ///     let len = data.len().min(buf.len());
339    ///     buf[..len].copy_from_slice(&data[..len]);
340    ///     len as i32 // Return the number of bytes written
341    /// });
342    /// ```
343    pub fn new_by_read_callback<F>(read_callback: F) -> Self
344    where
345        F: FnMut(&mut [u8]) -> i32 + Send + 'static,
346    {
347        (Box::new(read_callback) as Box<dyn FnMut(&mut [u8]) -> i32 + Send>).into()
348    }
349
350    /// Sets a custom seek callback for the input stream.
351    ///
352    /// This function assigns a user-defined function that handles seeking within the input stream.
353    /// It is required when using custom data sources that support random access, such as files,
354    /// memory-mapped buffers, or seekable network streams.
355    ///
356    /// **FFmpeg may invoke `seek_callback` from different threads.**
357    /// If using a `File` as the data source, **wrap it in `Arc<Mutex<File>>`** to ensure
358    /// thread-safe access across multiple threads.
359    ///
360    /// ### Parameters:
361    /// - `seek_callback: FnMut(i64, i32) -> i64`: A function that handles seek operations.
362    ///   - `offset: i64`: The target seek position in the stream.
363    ///   - `whence: i32`: The seek mode, which determines how `offset` should be interpreted:
364    ///     - `ffmpeg_sys_next::SEEK_SET` (0) - Seek to an absolute position.
365    ///     - `ffmpeg_sys_next::SEEK_CUR` (1) - Seek relative to the current position.
366    ///     - `ffmpeg_sys_next::SEEK_END` (2) - Seek relative to the end of the stream.
367    ///     - `ffmpeg_sys_next::SEEK_HOLE` (3) - Find the next hole in a sparse file (Linux only).
368    ///     - `ffmpeg_sys_next::SEEK_DATA` (4) - Find the next data block in a sparse file (Linux only).
369    ///     - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2) - Seek using byte offset instead of timestamps.
370    ///     - `ffmpeg_sys_next::AVSEEK_SIZE` (65536) - Query the total size of the stream.
371    ///     - `ffmpeg_sys_next::AVSEEK_FORCE` (131072) - Force seeking, even if normally restricted.
372    ///
373    /// ### Return Value:
374    /// - Returns `Self`, allowing for method chaining.
375    ///
376    /// ### Behavior of `seek_callback`:
377    /// - **Positive Value**: The new offset position after seeking.
378    /// - **Negative Value**: An error occurred, such as:
379    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
380    ///   - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
381    ///
382    /// ### Example (Thread-safe seek callback using `Arc<Mutex<File>>`):
383    /// Since `FFmpeg` may call `read_callback` and `seek_callback` from different threads,
384    /// **use `Arc<Mutex<File>>` to ensure safe concurrent access.**
385    ///
386    /// ```rust,ignore
387    /// use std::fs::File;
388    /// use std::io::{Read, Seek, SeekFrom};
389    /// use std::sync::{Arc, Mutex};
390    ///
391    /// // ✅ Wrap the file in Arc<Mutex<>> for safe shared access
392    /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
393    ///
394    /// // ✅ Thread-safe read callback
395    /// let read_callback = {
396    ///     let file = Arc::clone(&file);
397    ///     move |buf: &mut [u8]| -> i32 {
398    ///         let mut file = file.lock().unwrap();
399    ///         match file.read(buf) {
400    ///             Ok(0) => {
401    ///                 println!("Read EOF");
402    ///                 ffmpeg_sys_next::AVERROR_EOF
403    ///             }
404    ///             Ok(bytes_read) => bytes_read as i32,
405    ///             Err(e) => {
406    ///                 println!("Read error: {}", e);
407    ///                 ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)
408    ///             }
409    ///         }
410    ///     }
411    /// };
412    ///
413    /// // ✅ Thread-safe seek callback
414    /// let seek_callback = {
415    ///     let file = Arc::clone(&file);
416    ///     Box::new(move |offset: i64, whence: i32| -> i64 {
417    ///         let mut file = file.lock().unwrap();
418    ///
419    ///         // ✅ Handle AVSEEK_SIZE: Return total file size
420    ///         if whence == ffmpeg_sys_next::AVSEEK_SIZE {
421    ///             if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
422    ///                 println!("FFmpeg requested stream size: {}", size);
423    ///                 return size;
424    ///             }
425    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
426    ///         }
427    ///
428    ///         // ✅ Ignore AVSEEK_FORCE flag
429    ///         let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
430    ///
431    ///         // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
432    ///         if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
433    ///             println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
434    ///             if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
435    ///                 return new_pos as i64;
436    ///             }
437    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
438    ///         }
439    ///
440    ///         // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
441    ///         #[cfg(target_os = "linux")]
442    ///         if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
443    ///             println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
444    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
445    ///         }
446    ///         #[cfg(target_os = "linux")]
447    ///         if actual_whence == ffmpeg_sys_next::SEEK_DATA {
448    ///             println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
449    ///             return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
450    ///         }
451    ///
452    ///         // ✅ Standard seek modes
453    ///         let seek_result = match actual_whence {
454    ///             ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
455    ///             ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
456    ///             ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
457    ///             _ => {
458    ///                 println!("Unsupported seek mode: {}", whence);
459    ///                 return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
460    ///             }
461    ///         };
462    ///
463    ///         match seek_result {
464    ///             Ok(new_pos) => {
465    ///                 println!("Seek successful, new position: {}", new_pos);
466    ///                 new_pos as i64
467    ///             }
468    ///             Err(e) => {
469    ///                 println!("Seek failed: {}", e);
470    ///                 ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
471    ///             }
472    ///         }
473    ///     })
474    /// };
475    ///
476    /// let input = Input::new_by_read_callback(read_callback).set_seek_callback(seek_callback);
477    /// ```
478    pub fn set_seek_callback<F>(mut self, seek_callback: F) -> Self
479    where
480        F: FnMut(i64, i32) -> i64 + Send + 'static,
481    {
482        self.seek_callback = Some(Box::new(seek_callback) as Box<dyn FnMut(i64, i32) -> i64 + Send>);
483        self
484    }
485
486    /// Replaces the entire frame-processing pipeline with a new sequence
487    /// of transformations for **post-decoding** frames on this `Input`.
488    ///
489    /// This method clears any previously set pipelines and replaces them with the provided list.
490    ///
491    /// # Parameters
492    /// * `frame_pipelines` - A list of [`FramePipeline`] instances defining the
493    ///   transformations to apply to decoded frames.
494    ///
495    /// # Returns
496    /// * `Self` - Returns the modified `Input`, enabling method chaining.
497    ///
498    /// # Example
499    /// ```rust,ignore
500    /// let input = Input::from("my_video.mp4")
501    ///     .set_frame_pipelines(vec![
502    ///         FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)),
503    ///         // Additional pipelines...
504    ///     ]);
505    /// ```
506    pub fn set_frame_pipelines(mut self, frame_pipelines: Vec<impl Into<FramePipeline>>) -> Self {
507        self.frame_pipelines = Some(frame_pipelines.into_iter().map(|frame_pipeline| frame_pipeline.into()).collect());
508        self
509    }
510
511    /// Adds a single [`FramePipeline`] to the existing pipeline list.
512    ///
513    /// If no pipelines are currently defined, this method creates a new pipeline list.
514    /// Otherwise, it appends the provided pipeline to the existing transformations.
515    ///
516    /// # Parameters
517    /// * `frame_pipeline` - A [`FramePipeline`] defining a transformation.
518    ///
519    /// # Returns
520    /// * `Self` - Returns the modified `Input`, enabling method chaining.
521    ///
522    /// # Example
523    /// ```rust,ignore
524    /// let input = Input::from("my_video.mp4")
525    ///     .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)).build())
526    ///     .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_AUDIO).filter("my_custom_filter1", Box::new(...)).filter("my_custom_filter2", Box::new(...)).build());
527    /// ```
528    pub fn add_frame_pipeline(mut self, frame_pipeline: impl Into<FramePipeline>) -> Self {
529        if self.frame_pipelines.is_none() {
530            self.frame_pipelines = Some(vec![frame_pipeline.into()]);
531        } else {
532            self.frame_pipelines
533                .as_mut()
534                .unwrap()
535                .push(frame_pipeline.into());
536        }
537        self
538    }
539
540    /// Sets the input format for the container or device.
541    ///
542    /// By default, if no format is specified,
543    /// FFmpeg will attempt to detect the format automatically. However, certain
544    /// use cases require specifying the format explicitly:
545    /// - Using device-specific inputs (e.g., `avfoundation` on macOS, `dshow` on Windows).
546    /// - Handling raw streams or formats that FFmpeg may not detect automatically.
547    ///
548    /// ### Parameters:
549    /// - `format`: A string specifying the desired input format (e.g., `mp4`, `flv`, `avfoundation`).
550    ///
551    /// ### Return Value:
552    /// - Returns the `Input` instance with the newly set format.
553    pub fn set_format(mut self, format: impl Into<String>) -> Self {
554        self.format = Some(format.into());
555        self
556    }
557
558    /// Sets the **video codec** to be used for decoding.
559    ///
560    /// By default, FFmpeg will automatically select an appropriate video codec
561    /// based on the input format and available decoders. However, this method
562    /// allows you to override that selection and force a specific codec.
563    ///
564    /// # Common Video Codecs:
565    /// | Codec | Description |
566    /// |-------|-------------|
567    /// | `h264` | H.264 (AVC), widely supported and efficient |
568    /// | `hevc` | H.265 (HEVC), better compression at higher complexity |
569    /// | `vp9` | VP9, open-source alternative to H.265 |
570    /// | `av1` | AV1, newer open-source codec with improved compression |
571    /// | `mpeg4` | MPEG-4 Part 2, older but still used in some cases |
572    ///
573    /// # Arguments
574    /// * `video_codec` - A string representing the desired video codec (e.g., `"h264"`, `"hevc"`).
575    ///
576    /// # Returns
577    /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
578    ///
579    /// # Example:
580    /// ```rust,ignore
581    /// let input = Input::from("video.mp4").set_video_codec("h264");
582    /// ```
583    pub fn set_video_codec(mut self, video_codec: impl Into<String>) -> Self {
584        self.video_codec = Some(video_codec.into());
585        self
586    }
587
588    /// Sets the **audio codec** to be used for decoding.
589    ///
590    /// By default, FFmpeg will automatically select an appropriate audio codec
591    /// based on the input format and available decoders. However, this method
592    /// allows you to specify a preferred codec.
593    ///
594    /// # Common Audio Codecs:
595    /// | Codec | Description |
596    /// |-------|-------------|
597    /// | `aac` | AAC, commonly used for MP4 and streaming |
598    /// | `mp3` | MP3, widely supported but lower efficiency |
599    /// | `opus` | Opus, high-quality open-source codec |
600    /// | `vorbis` | Vorbis, used in Ogg containers |
601    /// | `flac` | FLAC, lossless audio format |
602    ///
603    /// # Arguments
604    /// * `audio_codec` - A string representing the desired audio codec (e.g., `"aac"`, `"mp3"`).
605    ///
606    /// # Returns
607    /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
608    ///
609    /// # Example:
610    /// ```rust,ignore
611    /// let input = Input::from("audio.mp3").set_audio_codec("aac");
612    /// ```
613    pub fn set_audio_codec(mut self, audio_codec: impl Into<String>) -> Self {
614        self.audio_codec = Some(audio_codec.into());
615        self
616    }
617
618    /// Sets the **subtitle codec** to be used for decoding.
619    ///
620    /// By default, FFmpeg will automatically select an appropriate subtitle codec
621    /// based on the input format and available decoders. This method lets you specify
622    /// a particular subtitle codec.
623    ///
624    /// # Common Subtitle Codecs:
625    /// | Codec | Description |
626    /// |-------|-------------|
627    /// | `ass` | Advanced SubStation Alpha (ASS) subtitles |
628    /// | `srt` | SubRip Subtitle format (SRT) |
629    /// | `mov_text` | Subtitles in MP4 containers |
630    /// | `subrip` | Plain-text subtitle format |
631    ///
632    /// # Arguments
633    /// * `subtitle_codec` - A string representing the desired subtitle codec (e.g., `"mov_text"`, `"ass"`, `"srt"`).
634    ///
635    /// # Returns
636    /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
637    ///
638    /// # Example:
639    /// ```rust,ignore
640    /// let input = Input::from("movie.mkv").set_subtitle_codec("ass");
641    /// ```
642    pub fn set_subtitle_codec(mut self, subtitle_codec: impl Into<String>) -> Self {
643        self.subtitle_codec = Some(subtitle_codec.into());
644        self
645    }
646
647    /// Enables or disables **exit on error** behavior for the input.
648    ///
649    /// If set to `true`, FFmpeg will exit (stop processing) if it encounters any
650    /// decoding or demuxing error on this input. If set to `false` (the default),
651    /// FFmpeg may attempt to continue despite errors, skipping damaged portions.
652    ///
653    /// # Parameters
654    /// - `exit_on_error`: `true` to stop on errors, `false` to keep going.
655    ///
656    /// # Returns
657    /// * `Self` - allowing method chaining.
658    ///
659    /// # Example
660    /// ```rust,ignore
661    /// let input = Input::from("test.mp4")
662    ///     .set_exit_on_error(true);
663    /// ```
664    pub fn set_exit_on_error(mut self, exit_on_error: bool) -> Self {
665        self.exit_on_error = Some(exit_on_error);
666        self
667    }
668
669    /// Sets a **read rate** for this input, controlling how quickly frames are read.
670    ///
671    /// - If set to `1.0`, frames are read at their native frame rate.
672    /// - If set to another value (e.g., `0.5` or `2.0`), FFmpeg may attempt to read
673    ///   slower or faster, simulating changes in real-time playback speed.
674    ///
675    /// # Parameters
676    /// - `rate`: A floating-point value indicating the read rate multiplier.
677    ///
678    /// # Returns
679    /// * `Self` - allowing method chaining.
680    ///
681    /// # Example
682    /// ```rust,ignore
683    /// let input = Input::from("video.mp4")
684    ///     .set_readrate(0.5); // read at half speed
685    /// ```
686    pub fn set_readrate(mut self, rate: f32) -> Self {
687        self.readrate = Some(rate);
688        self
689    }
690
691    /// Sets the **start time** (in microseconds) from which to begin reading.
692    ///
693    /// FFmpeg will skip all data before this timestamp. This can be used to
694    /// implement “input seeking” or to only process a portion of the input.
695    ///
696    /// # Parameters
697    /// - `start_time_us`: The timestamp (in microseconds) at which to start reading.
698    ///
699    /// # Returns
700    /// * `Self` - allowing method chaining.
701    ///
702    /// # Example
703    /// ```rust,ignore
704    /// let input = Input::from("long_clip.mp4")
705    ///     .set_start_time_us(2_000_000); // Start at 2 seconds
706    /// ```
707    pub fn set_start_time_us(mut self, start_time_us: i64) -> Self {
708        self.start_time_us = Some(start_time_us);
709        self
710    }
711
712    /// Sets the **recording time** (in microseconds) for this input.
713    ///
714    /// FFmpeg will only read for the specified duration, ignoring data past this
715    /// limit. This can be used to trim or limit how much of the input is processed.
716    ///
717    /// # Parameters
718    /// - `recording_time_us`: The number of microseconds to read from the input.
719    ///
720    /// # Returns
721    /// * `Self` - allowing method chaining.
722    ///
723    /// # Example
724    /// ```rust,ignore
725    /// let input = Input::from("long_clip.mp4")
726    ///     .set_recording_time_us(5_000_000); // Only read 5 seconds
727    /// ```
728    pub fn set_recording_time_us(mut self, recording_time_us: i64) -> Self {
729        self.recording_time_us = Some(recording_time_us);
730        self
731    }
732
733    /// Sets a **stop time** (in microseconds) beyond which input data will be ignored.
734    ///
735    /// This is similar to [`set_recording_time_us`](Self::set_recording_time_us) but
736    /// specifically references an absolute timestamp in the stream. Once this timestamp
737    /// is reached, FFmpeg stops reading.
738    ///
739    /// # Parameters
740    /// - `stop_time_us`: The absolute timestamp (in microseconds) at which to stop reading.
741    ///
742    /// # Returns
743    /// * `Self` - allowing method chaining.
744    ///
745    /// # Example
746    /// ```rust,ignore
747    /// let input = Input::from("long_clip.mp4")
748    ///     .set_stop_time_us(10_000_000); // Stop reading at 10 seconds
749    /// ```
750    pub fn set_stop_time_us(mut self, stop_time_us: i64) -> Self {
751        self.stop_time_us = Some(stop_time_us);
752        self
753    }
754
755    /// Sets the number of **loops** to perform on this input stream.
756    ///
757    /// If FFmpeg reaches the end of the input, it can loop back and start from the
758    /// beginning, effectively repeating the content `stream_loop` times.
759    /// A negative value may indicate infinite looping (depending on FFmpeg’s actual behavior).
760    ///
761    /// # Parameters
762    /// - `count`: How many times to loop (e.g. `1` means one loop, `-1` might mean infinite).
763    ///
764    /// # Returns
765    /// * `Self` - allowing method chaining.
766    ///
767    /// # Example
768    /// ```rust,ignore
769    /// let input = Input::from("music.mp3")
770    ///     .set_stream_loop(2); // play the input 2 extra times
771    /// ```
772    pub fn set_stream_loop(mut self, count: i32) -> Self {
773        self.stream_loop = Some(count);
774        self
775    }
776
777    /// Specifies a **hardware acceleration** name for decoding this input.
778    ///
779    /// Common values might include `"cuda"`, `"vaapi"`, `"dxva2"`, `"videotoolbox"`, etc.
780    /// Whether it works depends on your FFmpeg build and the hardware you have available.
781    ///
782    /// # Parameters
783    /// - `hwaccel_name`: A string naming the hardware accel to use.
784    ///
785    /// # Returns
786    /// * `Self` - allowing method chaining.
787    ///
788    /// # Example
789    /// ```rust,ignore
790    /// let input = Input::from("video.mp4")
791    ///     .set_hwaccel("cuda");
792    /// ```
793    pub fn set_hwaccel(mut self, hwaccel_name: impl Into<String>) -> Self {
794        self.hwaccel = Some(hwaccel_name.into());
795        self
796    }
797
798    /// Selects a **hardware acceleration device** for decoding.
799    ///
800    /// For example, if you have multiple GPUs or want to specify a device node (like
801    /// `"/dev/dri/renderD128"` on Linux for VAAPI), you can pass it here. This option
802    /// must match the hardware accel you set via [`set_hwaccel`](Self::set_hwaccel) if
803    /// you expect decoding to succeed.
804    ///
805    /// # Parameters
806    /// - `device`: A string indicating the device path or identifier.
807    ///
808    /// # Returns
809    /// * `Self` - allowing method chaining.
810    ///
811    /// # Example
812    /// ```rust,ignore
813    /// let input = Input::from("video.mp4")
814    ///     .set_hwaccel("vaapi")
815    ///     .set_hwaccel_device("/dev/dri/renderD128");
816    /// ```
817    pub fn set_hwaccel_device(mut self, device: impl Into<String>) -> Self {
818        self.hwaccel_device = Some(device.into());
819        self
820    }
821
822    /// Sets the **output pixel format** to be used with hardware-accelerated decoding.
823    ///
824    /// Certain hardware decoders can produce various output pixel formats. This option
825    /// lets you specify which format (e.g., `"nv12"`, `"vaapi"`, etc.) is used during
826    /// the decode process.
827    /// Must be compatible with the chosen hardware accel and device.
828    ///
829    /// # Parameters
830    /// - `format`: A string naming the desired output pixel format (e.g. `"nv12"`).
831    ///
832    /// # Returns
833    /// * `Self` - allowing method chaining.
834    ///
835    /// # Example
836    /// ```rust,ignore
837    /// let input = Input::from("video.mp4")
838    ///     .set_hwaccel("cuda")
839    ///     .set_hwaccel_output_format("cuda");
840    /// ```
841    pub fn set_hwaccel_output_format(mut self, format: impl Into<String>) -> Self {
842        self.hwaccel_output_format = Some(format.into());
843        self
844    }
845
846    /// Sets a single input option for avformat_open_input.
847    ///
848    /// This method configures options that will be passed to FFmpeg's `avformat_open_input()`
849    /// function. The options can control behavior at different levels including format detection,
850    /// protocol handling, device configuration, and general input processing.
851    ///
852    /// **Example Usage:**
853    /// ```rust,ignore
854    /// let input = Input::new("avfoundation:0")
855    ///     .set_input_opt("framerate", "30")
856    ///     .set_input_opt("probesize", "5000000");
857    /// ```
858    ///
859    /// ### Parameters:
860    /// - `key`: The option name (e.g., `"framerate"`, `"probesize"`, `"timeout"`).
861    /// - `value`: The option value (e.g., `"30"`, `"5000000"`, `"10000000"`).
862    ///
863    /// ### Return Value:
864    /// - Returns the modified `Input` instance for method chaining.
865    pub fn set_input_opt(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
866        if let Some(ref mut opts) = self.input_opts {
867            opts.insert(key.into(), value.into());
868        } else {
869            let mut opts = HashMap::new();
870            opts.insert(key.into(), value.into());
871            self.input_opts = Some(opts);
872        }
873        self
874    }
875
876    /// Sets multiple input options at once for avformat_open_input.
877    ///
878    /// This method allows setting multiple options in a single call, which will all be
879    /// passed to FFmpeg's `avformat_open_input()` function. Each key-value pair will be
880    /// inserted into the options map, overwriting any existing keys with the same name.
881    ///
882    /// **Example Usage:**
883    /// ```rust,ignore
884    /// let input = Input::new("http://example.com/stream.m3u8")
885    ///     .set_input_opts(vec![
886    ///         ("user_agent", "MyApp/1.0"),
887    ///         ("timeout", "10000000"),
888    ///         ("probesize", "5000000"),
889    ///     ]);
890    /// ```
891    ///
892    /// ### Parameters:
893    /// - `opts`: A vector of key-value pairs representing input options.
894    ///
895    /// ### Return Value:
896    /// - Returns the modified `Input` instance for method chaining.
897    pub fn set_input_opts(mut self, opts: Vec<(impl Into<String>, impl Into<String>)>) -> Self {
898        if let Some(ref mut input_opts) = self.input_opts {
899            for (key, value) in opts {
900                input_opts.insert(key.into(), value.into());
901            }
902        } else {
903            let mut input_opts = HashMap::new();
904            for (key, value) in opts {
905                input_opts.insert(key.into(), value.into());
906            }
907            self.input_opts = Some(input_opts);
908        }
909        self
910    }
911
912    /// Sets whether to automatically rotate video based on display matrix metadata.
913    ///
914    /// When enabled (default is `true`), videos with rotation metadata (common in
915    /// smartphone recordings) will be automatically rotated to the correct orientation
916    /// using transpose/hflip/vflip filters.
917    ///
918    /// # Parameters
919    /// - `autorotate`: `true` to enable automatic rotation (default), `false` to disable.
920    ///
921    /// # Returns
922    /// * `Self` - allowing method chaining.
923    ///
924    /// # FFmpeg CLI equivalent
925    /// ```bash
926    /// ffmpeg -autorotate 0 -i input.mp4 output.mp4
927    /// ```
928    ///
929    /// # Example
930    /// ```rust,ignore
931    /// // Disable automatic rotation to preserve original video orientation
932    /// let input = Input::from("smartphone_video.mp4")
933    ///     .set_autorotate(false);
934    /// ```
935    pub fn set_autorotate(mut self, autorotate: bool) -> Self {
936        self.autorotate = Some(autorotate);
937        self
938    }
939
940    /// Sets a timestamp scale factor for pts/dts values.
941    ///
942    /// This multiplier is applied to packet timestamps after ts_offset addition.
943    /// Default is `1.0` (no scaling). Values must be positive.
944    ///
945    /// This is useful for fixing videos with incorrect timestamps or for
946    /// special timestamp manipulation scenarios.
947    ///
948    /// # Parameters
949    /// - `scale`: A positive floating-point value for timestamp scaling.
950    ///
951    /// # Returns
952    /// * `Self` - allowing method chaining.
953    ///
954    /// # FFmpeg CLI equivalent
955    /// ```bash
956    /// ffmpeg -itsscale 2.0 -i input.mp4 output.mp4
957    /// ```
958    ///
959    /// # Example
960    /// ```rust,ignore
961    /// // Scale timestamps by 2x (double the playback speed effect on timestamps)
962    /// let input = Input::from("video.mp4")
963    ///     .set_ts_scale(2.0);
964    /// ```
965    pub fn set_ts_scale(mut self, scale: f64) -> Self {
966        assert!(scale.is_finite(), "ts_scale must be finite, got {scale}");
967        assert!(scale > 0.0, "ts_scale must be positive, got {scale}");
968        self.ts_scale = Some(scale);
969        self
970    }
971
972    /// Sets a forced framerate for the input video stream.
973    ///
974    /// When set, this overrides the default DTS estimation behavior. By default,
975    /// ez-ffmpeg uses the actual packet duration for DTS estimation (matching FFmpeg
976    /// CLI behavior without `-r`). Setting a framerate forces DTS estimation to use
977    /// the specified rate instead, which snaps timestamps to a fixed frame grid.
978    ///
979    /// # Parameters
980    /// - `num`: Framerate numerator (e.g., 30 for 30fps, 24000 for 23.976fps)
981    /// - `den`: Framerate denominator (e.g., 1 for 30fps, 1001 for 23.976fps)
982    ///
983    /// # Returns
984    /// * `Self` - allowing method chaining.
985    ///
986    /// # FFmpeg CLI equivalent
987    /// ```bash
988    /// ffmpeg -r 30 -i input.mp4 output.mp4
989    /// ffmpeg -r 24000/1001 -i input.mp4 output.mp4
990    /// ```
991    ///
992    /// # Example
993    /// ```rust,ignore
994    /// // Force 30fps framerate for DTS estimation
995    /// let input = Input::from("video.mp4")
996    ///     .set_framerate(30, 1);
997    ///
998    /// // Force 23.976fps framerate
999    /// let input = Input::from("video.mp4")
1000    ///     .set_framerate(24000, 1001);
1001    /// ```
1002    pub fn set_framerate(mut self, num: i32, den: i32) -> Self {
1003        assert!(num > 0, "framerate numerator must be positive, got {num}");
1004        assert!(den > 0, "framerate denominator must be positive, got {den}");
1005        self.framerate = Some((num, den));
1006        self
1007    }
1008}
1009
1010impl From<Box<dyn FnMut(&mut [u8]) -> i32 + Send>> for Input {
1011    fn from(read_callback: Box<dyn FnMut(&mut [u8]) -> i32 + Send>) -> Self {
1012        Self {
1013            url: None,
1014            read_callback: Some(read_callback),
1015            seek_callback: None,
1016            frame_pipelines: None,
1017            format: None,
1018            video_codec: None,
1019            audio_codec: None,
1020            subtitle_codec: None,
1021            exit_on_error: None,
1022            readrate: None,
1023            start_time_us: None,
1024            recording_time_us: None,
1025            stop_time_us: None,
1026            stream_loop: None,
1027            hwaccel: None,
1028            hwaccel_device: None,
1029            hwaccel_output_format: None,
1030            input_opts: None,
1031            autorotate: None,
1032            ts_scale: None,
1033            framerate: None,
1034        }
1035    }
1036}
1037
1038impl From<String> for Input {
1039    fn from(url: String) -> Self {
1040        Self {
1041            url: Some(url),
1042            read_callback: None,
1043            seek_callback: None,
1044            frame_pipelines: None,
1045            format: None,
1046            video_codec: None,
1047            audio_codec: None,
1048            subtitle_codec: None,
1049            exit_on_error: None,
1050            readrate: None,
1051            start_time_us: None,
1052            recording_time_us: None,
1053            stop_time_us: None,
1054            stream_loop: None,
1055            hwaccel: None,
1056            hwaccel_device: None,
1057            hwaccel_output_format: None,
1058            input_opts: None,
1059            autorotate: None,
1060            ts_scale: None,
1061            framerate: None,
1062        }
1063    }
1064}
1065
1066impl From<&str> for Input {
1067    fn from(url: &str) -> Self {
1068        Self::from(String::from(url))
1069    }
1070}
1071
1072
1073#[cfg(test)]
1074mod tests {
1075    use crate::core::context::input::Input;
1076
1077    #[test]
1078    fn set_framerate_valid() {
1079        let input = Input::from("test.mp4").set_framerate(24000, 1001);
1080        assert_eq!(input.framerate, Some((24000, 1001)));
1081    }
1082
1083    #[test]
1084    fn set_framerate_simple() {
1085        let input = Input::from("test.mp4").set_framerate(30, 1);
1086        assert_eq!(input.framerate, Some((30, 1)));
1087    }
1088
1089    #[test]
1090    #[should_panic(expected = "framerate numerator must be positive")]
1091    fn set_framerate_zero_num() {
1092        Input::from("test.mp4").set_framerate(0, 1);
1093    }
1094
1095    #[test]
1096    #[should_panic(expected = "framerate denominator must be positive")]
1097    fn set_framerate_zero_den() {
1098        Input::from("test.mp4").set_framerate(24, 0);
1099    }
1100
1101    #[test]
1102    #[should_panic(expected = "framerate numerator must be positive")]
1103    fn set_framerate_negative_num() {
1104        Input::from("test.mp4").set_framerate(-1, 1);
1105    }
1106
1107    #[test]
1108    #[should_panic(expected = "framerate denominator must be positive")]
1109    fn set_framerate_negative_den() {
1110        Input::from("test.mp4").set_framerate(24, -1);
1111    }
1112
1113    #[test]
1114    fn set_ts_scale_valid() {
1115        let input = Input::from("test.mp4").set_ts_scale(2.0);
1116        assert_eq!(input.ts_scale, Some(2.0));
1117    }
1118
1119    #[test]
1120    fn set_ts_scale_fractional() {
1121        let input = Input::from("test.mp4").set_ts_scale(0.5);
1122        assert_eq!(input.ts_scale, Some(0.5));
1123    }
1124
1125    #[test]
1126    #[should_panic(expected = "ts_scale must be finite")]
1127    fn set_ts_scale_nan() {
1128        Input::from("test.mp4").set_ts_scale(f64::NAN);
1129    }
1130
1131    #[test]
1132    #[should_panic(expected = "ts_scale must be finite")]
1133    fn set_ts_scale_infinity() {
1134        Input::from("test.mp4").set_ts_scale(f64::INFINITY);
1135    }
1136
1137    #[test]
1138    #[should_panic(expected = "ts_scale must be finite")]
1139    fn set_ts_scale_neg_infinity() {
1140        Input::from("test.mp4").set_ts_scale(f64::NEG_INFINITY);
1141    }
1142
1143    #[test]
1144    #[should_panic(expected = "ts_scale must be positive")]
1145    fn set_ts_scale_zero() {
1146        Input::from("test.mp4").set_ts_scale(0.0);
1147    }
1148
1149    #[test]
1150    #[should_panic(expected = "ts_scale must be positive")]
1151    fn set_ts_scale_negative() {
1152        Input::from("test.mp4").set_ts_scale(-1.0);
1153    }
1154
1155    #[test]
1156    fn test_new_by_read_callback() {
1157        let data_source = b"example custom data source".to_vec();
1158        let _input = Input::new_by_read_callback(move |buf| {
1159            let len = data_source.len().min(buf.len());
1160            buf[..len].copy_from_slice(&data_source[..len]);
1161            len as i32 // Return the number of bytes written
1162        });
1163
1164        let data_source2 = b"example custom data source2".to_vec();
1165        let _input = Input::new_by_read_callback(move |buf2| {
1166            let len = data_source2.len().min(buf2.len());
1167            buf2[..len].copy_from_slice(&data_source2[..len]);
1168            len as i32 // Return the number of bytes written
1169        });
1170    }
1171}