ez_ffmpeg/core/context/input.rs
1use std::collections::HashMap;
2use crate::filter::frame_pipeline::FramePipeline;
3
4unsafe impl Send for Input {}
5
6pub struct Input {
7 /// The URL of the input source.
8 ///
9 /// This specifies the source from which the input stream is obtained. It can be:
10 /// - A local file path (e.g., `file:///path/to/video.mp4`).
11 /// - A network stream (e.g., `rtmp://example.com/live/stream`).
12 /// - Any other URL supported by FFmpeg (e.g., `http://example.com/video.mp4`, `udp://...`).
13 ///
14 /// The URL must be valid. If the URL is invalid or unsupported,
15 /// the library will return an error when attempting to open the input stream.
16 pub(crate) url: Option<String>,
17
18 /// A callback function for custom data reading.
19 ///
20 /// The `read_callback` function allows you to provide custom logic for feeding data into
21 /// the input stream. This is useful for scenarios where the input does not come directly
22 /// from a standard source (like a file or URL), but instead from a custom data source,
23 /// such as an in-memory buffer or a custom network stream.
24 ///
25 /// ### Parameters:
26 /// - `buf: &mut [u8]`: A mutable buffer into which the data should be written.
27 /// The callback should fill this buffer with as much data as possible, up to its length.
28 ///
29 /// ### Return Value:
30 /// - **Positive Value**: The number of bytes successfully read into `buf`.
31 /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the input stream. No more data will be read.
32 /// - **Negative Value**: Indicates an error occurred, such as:
33 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
34 /// - Custom-defined error codes depending on your implementation.
35 ///
36 /// ### Example:
37 /// ```rust,ignore
38 /// fn custom_read_callback(buf: &mut [u8]) -> i32 {
39 /// let data = b"example data stream";
40 /// let len = data.len().min(buf.len());
41 /// buf[..len].copy_from_slice(&data[..len]);
42 /// len as i32 // Return the number of bytes written into the buffer
43 /// }
44 /// ```
45 pub(crate) read_callback: Option<Box<dyn FnMut(&mut [u8]) -> i32>>,
46
47 /// A callback function for custom seeking within the input stream.
48 ///
49 /// The `seek_callback` function allows defining custom seeking behavior.
50 /// This is useful for data sources that support seeking, such as files or memory-mapped data.
51 /// For non-seekable streams (e.g., live network streams), this function may return an error.
52 ///
53 /// **FFmpeg may invoke `seek_callback` from multiple threads, so thread safety is required.**
54 /// When using a `File` as an input source, **use `Arc<Mutex<File>>` to ensure safe access.**
55 ///
56 /// ### Parameters:
57 /// - `offset: i64`: The target position in the stream for seeking.
58 /// - `whence: i32`: The seek mode defining how the `offset` should be interpreted:
59 /// - `ffmpeg_sys_next::SEEK_SET` (0): Seek to an absolute position.
60 /// - `ffmpeg_sys_next::SEEK_CUR` (1): Seek relative to the current position.
61 /// - `ffmpeg_sys_next::SEEK_END` (2): Seek relative to the end of the stream.
62 /// - `ffmpeg_sys_next::SEEK_HOLE` (3): Find the next file hole (sparse file support).
63 /// - `ffmpeg_sys_next::SEEK_DATA` (4): Find the next data block (sparse file support).
64 /// - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2): Seek using **byte offsets** instead of timestamps.
65 /// - `ffmpeg_sys_next::AVSEEK_SIZE` (65536): Query the **total size** of the stream.
66 /// - `ffmpeg_sys_next::AVSEEK_FORCE` (131072): **Force seeking even if normally restricted.**
67 ///
68 /// ### Return Value:
69 /// - **Positive Value**: The new offset position after seeking.
70 /// - **Negative Value**: An error occurred. Common errors include:
71 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
72 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
73 ///
74 /// ### Example (Handling multi-threaded access safely with `Arc<Mutex<File>>`):
75 /// Since FFmpeg may call `read_callback` and `seek_callback` from different threads,
76 /// **`Arc<Mutex<File>>` is used to ensure safe access across threads.**
77 ///
78 /// ```rust,ignore
79 /// use std::fs::File;
80 /// use std::io::{Seek, SeekFrom};
81 /// use std::sync::{Arc, Mutex};
82 ///
83 /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
84 ///
85 /// let seek_callback = {
86 /// let file = Arc::clone(&file);
87 /// Box::new(move |offset: i64, whence: i32| -> i64 {
88 /// let mut file = file.lock().unwrap(); // Acquire lock
89 ///
90 /// // ✅ Handle AVSEEK_SIZE: Return total file size
91 /// if whence == ffmpeg_sys_next::AVSEEK_SIZE {
92 /// if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
93 /// println!("FFmpeg requested stream size: {}", size);
94 /// return size;
95 /// }
96 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
97 /// }
98 ///
99 /// // ✅ Handle AVSEEK_FORCE: Ignore this flag when processing seek
100 /// let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
101 ///
102 /// // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
103 /// if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
104 /// println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
105 /// if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
106 /// return new_pos as i64;
107 /// }
108 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
109 /// }
110 ///
111 /// // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
112 /// #[cfg(target_os = "linux")]
113 /// if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
114 /// println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
115 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
116 /// }
117 /// #[cfg(target_os = "linux")]
118 /// if actual_whence == ffmpeg_sys_next::SEEK_DATA {
119 /// println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
120 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
121 /// }
122 ///
123 /// // ✅ Standard seek modes
124 /// let seek_result = match actual_whence {
125 /// ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
126 /// ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
127 /// ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
128 /// _ => {
129 /// println!("Unsupported seek mode: {}", whence);
130 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
131 /// }
132 /// };
133 ///
134 /// match seek_result {
135 /// Ok(new_pos) => {
136 /// println!("Seek successful, new position: {}", new_pos);
137 /// new_pos as i64
138 /// }
139 /// Err(e) => {
140 /// println!("Seek failed: {}", e);
141 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
142 /// }
143 /// }
144 /// })
145 /// };
146 /// ```
147 pub(crate) seek_callback: Option<Box<dyn FnMut(i64, i32) -> i64>>,
148
149 /// The pipeline that provides custom processing for decoded frames.
150 ///
151 /// After the input data is decoded into `Frame` objects, these frames
152 /// are passed through the `frame_pipeline`. Each frame goes through
153 /// a series of `FrameFilter` objects in the pipeline, allowing for
154 /// customized processing (e.g., filtering, transformation, etc.).
155 ///
156 /// If `None`, no processing pipeline is applied to the decoded frames.
157 pub(crate) frame_pipelines: Option<Vec<FramePipeline>>,
158
159 /// The input format for the source.
160 ///
161 /// This field specifies which container or device format FFmpeg should use to read the input.
162 /// If `None`, FFmpeg will attempt to automatically detect the format based on the source URL,
163 /// file extension, or stream data.
164 ///
165 /// You might need to specify a format explicitly in cases where automatic detection fails or
166 /// when you must force a particular format. For example:
167 /// - When capturing from a specific device on macOS (using `avfoundation`).
168 /// - When capturing on Windows devices (using `dshow`).
169 /// - When dealing with raw streams or unusual data sources.
170 pub(crate) format: Option<String>,
171
172 /// The codec to be used for **video** decoding.
173 ///
174 /// If set, this forces FFmpeg to use the specified video codec for decoding.
175 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
176 pub(crate) video_codec: Option<String>,
177
178 /// The codec to be used for **audio** decoding.
179 ///
180 /// If set, this forces FFmpeg to use the specified audio codec for decoding.
181 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
182 pub(crate) audio_codec: Option<String>,
183
184 /// The codec to be used for **subtitle** decoding.
185 ///
186 /// If set, this forces FFmpeg to use the specified subtitle codec for decoding.
187 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
188 pub(crate) subtitle_codec: Option<String>,
189
190 pub(crate) exit_on_error: Option<bool>,
191
192 /// read input at specified rate.
193 /// when set 1. read input at native frame rate.
194 pub(crate) readrate: Option<f32>,
195 pub(crate) start_time_us: Option<i64>,
196 pub(crate) recording_time_us: Option<i64>,
197 pub(crate) stop_time_us: Option<i64>,
198
199 /// set number of times input stream shall be looped
200 pub(crate) stream_loop: Option<i32>,
201
202 /// Hardware Acceleration name
203 /// use Hardware accelerated decoding
204 pub(crate) hwaccel: Option<String>,
205 /// select a device for HW acceleration
206 pub(crate) hwaccel_device: Option<String>,
207 /// select output format used with HW accelerated decoding
208 pub(crate) hwaccel_output_format: Option<String>,
209
210 /// Input options for avformat_open_input.
211 ///
212 /// This field stores options that are passed to FFmpeg's `avformat_open_input()` function.
213 /// These options can affect different layers of the input processing pipeline:
214 ///
215 /// **Format/Demuxer options:**
216 /// - `probesize` - Maximum data to probe for format detection
217 /// - `analyzeduration` - Duration to analyze for stream info
218 /// - `fflags` - Format flags (e.g., "+genpts")
219 ///
220 /// **Protocol options:**
221 /// - `user_agent` - HTTP User-Agent header
222 /// - `timeout` - Network timeout in microseconds
223 /// - `headers` - Custom HTTP headers
224 ///
225 /// **Device options:**
226 /// - `framerate` - Input framerate (for avfoundation, dshow, etc.)
227 /// - `video_size` - Input video resolution
228 /// - `pixel_format` - Input pixel format
229 ///
230 /// **General input options:**
231 /// - `thread_queue_size` - Input thread queue size
232 /// - `re` - Read input at native frame rate
233 ///
234 /// These options allow fine-tuning of input behavior across different components
235 /// of the FFmpeg input pipeline.
236 pub(crate) input_opts: Option<HashMap<String, String>>,
237
238 /// Automatically rotate video based on display matrix metadata.
239 ///
240 /// When enabled (default), videos with rotation metadata (common in smartphone
241 /// recordings) will be automatically rotated to the correct orientation using
242 /// transpose/hflip/vflip filters.
243 ///
244 /// Set to `false` to disable automatic rotation and preserve the original
245 /// video orientation.
246 ///
247 /// ## FFmpeg CLI equivalent
248 /// ```bash
249 /// # Disable autorotate
250 /// ffmpeg -autorotate 0 -i input.mp4 output.mp4
251 ///
252 /// # Enable autorotate (default)
253 /// ffmpeg -autorotate 1 -i input.mp4 output.mp4
254 /// ```
255 ///
256 /// ## FFmpeg source reference (FFmpeg 7.x)
257 /// - Default value: `ffmpeg_demux.c:1319` (`ds->autorotate = 1`)
258 /// - Flag setting: `ffmpeg_demux.c:1137` (`IFILTER_FLAG_AUTOROTATE`)
259 /// - Filter insertion: `ffmpeg_filter.c:1744-1778`
260 pub(crate) autorotate: Option<bool>,
261
262 /// Timestamp scale factor for pts/dts values.
263 ///
264 /// This multiplier is applied to packet timestamps after ts_offset addition.
265 /// Default is 1.0 (no scaling). Values must be positive.
266 ///
267 /// This is useful for fixing videos with incorrect timestamps or for
268 /// special timestamp manipulation scenarios.
269 ///
270 /// ## FFmpeg CLI equivalent
271 /// ```bash
272 /// # Scale timestamps by 2x
273 /// ffmpeg -itsscale 2.0 -i input.mp4 output.mp4
274 ///
275 /// # Scale timestamps by 0.5x (half speed effect on timestamps)
276 /// ffmpeg -itsscale 0.5 -i input.mp4 output.mp4
277 /// ```
278 ///
279 /// ## FFmpeg source reference (FFmpeg 7.x)
280 /// - Default value: `ffmpeg_demux.c:1316` (`ds->ts_scale = 1.0`)
281 /// - Application: `ffmpeg_demux.c:420-422` (applied after ts_offset)
282 pub(crate) ts_scale: Option<f64>,
283
284 /// Forced framerate for the input video stream.
285 ///
286 /// When set, this overrides the DTS estimation logic to use the specified
287 /// framerate for computing `next_dts` in the video stream. By default (None),
288 /// the actual packet duration is used for DTS estimation, matching FFmpeg CLI
289 /// behavior when `-r` is not specified.
290 ///
291 /// This affects all video DTS estimation, including recording_time cutoff
292 /// decisions during stream copy and the output stream time_base when set via
293 /// `streamcopy_init`.
294 ///
295 /// ## FFmpeg CLI equivalent
296 /// ```bash
297 /// # Force input framerate to 30fps
298 /// ffmpeg -r 30 -i input.mp4 output.mp4
299 /// ```
300 ///
301 /// ## FFmpeg source reference (FFmpeg 7.x)
302 /// - Field: `ffmpeg.h:452` (`ist->framerate`, only set with `-r`)
303 /// - Application: `ffmpeg_demux.c:329-333` (used in `ist_dts_update`)
304 pub(crate) framerate: Option<(i32, i32)>,
305}
306
307impl Input {
308 pub fn new(url: impl Into<String>) -> Self {
309 url.into().into()
310 }
311
312 /// Creates a new `Input` instance with a custom read callback.
313 ///
314 /// This method initializes an `Input` object that uses a provided `read_callback` function
315 /// to supply data to the input stream. This is particularly useful for custom data sources
316 /// such as in-memory buffers, network streams, or other non-standard input mechanisms.
317 ///
318 /// ### Parameters:
319 /// - `read_callback: fn(buf: &mut [u8]) -> i32`: A function pointer that fills the provided
320 /// mutable buffer with data and returns the number of bytes read.
321 ///
322 /// ### Return Value:
323 /// - Returns a new `Input` instance configured with the specified `read_callback`.
324 ///
325 /// ### Behavior of `read_callback`:
326 /// - **Positive Value**: Indicates the number of bytes successfully read.
327 /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the stream. The library will stop requesting data.
328 /// - **Negative Value**: Indicates an error occurred. For example:
329 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: Represents an input/output error.
330 /// - Other custom-defined error codes can also be returned to signal specific issues.
331 ///
332 /// ### Example:
333 /// ```rust,ignore
334 /// let input = Input::new_by_read_callback(move |buf| {
335 /// let data = b"example custom data source";
336 /// let len = data.len().min(buf.len());
337 /// buf[..len].copy_from_slice(&data[..len]);
338 /// len as i32 // Return the number of bytes written
339 /// });
340 /// ```
341 pub fn new_by_read_callback<F>(read_callback: F) -> Self
342 where
343 F: FnMut(&mut [u8]) -> i32 + 'static,
344 {
345 (Box::new(read_callback) as Box<dyn FnMut(&mut [u8]) -> i32>).into()
346 }
347
348 /// Sets a custom seek callback for the input stream.
349 ///
350 /// This function assigns a user-defined function that handles seeking within the input stream.
351 /// It is required when using custom data sources that support random access, such as files,
352 /// memory-mapped buffers, or seekable network streams.
353 ///
354 /// **FFmpeg may invoke `seek_callback` from different threads.**
355 /// If using a `File` as the data source, **wrap it in `Arc<Mutex<File>>`** to ensure
356 /// thread-safe access across multiple threads.
357 ///
358 /// ### Parameters:
359 /// - `seek_callback: FnMut(i64, i32) -> i64`: A function that handles seek operations.
360 /// - `offset: i64`: The target seek position in the stream.
361 /// - `whence: i32`: The seek mode, which determines how `offset` should be interpreted:
362 /// - `ffmpeg_sys_next::SEEK_SET` (0) - Seek to an absolute position.
363 /// - `ffmpeg_sys_next::SEEK_CUR` (1) - Seek relative to the current position.
364 /// - `ffmpeg_sys_next::SEEK_END` (2) - Seek relative to the end of the stream.
365 /// - `ffmpeg_sys_next::SEEK_HOLE` (3) - Find the next hole in a sparse file (Linux only).
366 /// - `ffmpeg_sys_next::SEEK_DATA` (4) - Find the next data block in a sparse file (Linux only).
367 /// - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2) - Seek using byte offset instead of timestamps.
368 /// - `ffmpeg_sys_next::AVSEEK_SIZE` (65536) - Query the total size of the stream.
369 /// - `ffmpeg_sys_next::AVSEEK_FORCE` (131072) - Force seeking, even if normally restricted.
370 ///
371 /// ### Return Value:
372 /// - Returns `Self`, allowing for method chaining.
373 ///
374 /// ### Behavior of `seek_callback`:
375 /// - **Positive Value**: The new offset position after seeking.
376 /// - **Negative Value**: An error occurred, such as:
377 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
378 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
379 ///
380 /// ### Example (Thread-safe seek callback using `Arc<Mutex<File>>`):
381 /// Since `FFmpeg` may call `read_callback` and `seek_callback` from different threads,
382 /// **use `Arc<Mutex<File>>` to ensure safe concurrent access.**
383 ///
384 /// ```rust,ignore
385 /// use std::fs::File;
386 /// use std::io::{Read, Seek, SeekFrom};
387 /// use std::sync::{Arc, Mutex};
388 ///
389 /// // ✅ Wrap the file in Arc<Mutex<>> for safe shared access
390 /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
391 ///
392 /// // ✅ Thread-safe read callback
393 /// let read_callback = {
394 /// let file = Arc::clone(&file);
395 /// move |buf: &mut [u8]| -> i32 {
396 /// let mut file = file.lock().unwrap();
397 /// match file.read(buf) {
398 /// Ok(0) => {
399 /// println!("Read EOF");
400 /// ffmpeg_sys_next::AVERROR_EOF
401 /// }
402 /// Ok(bytes_read) => bytes_read as i32,
403 /// Err(e) => {
404 /// println!("Read error: {}", e);
405 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)
406 /// }
407 /// }
408 /// }
409 /// };
410 ///
411 /// // ✅ Thread-safe seek callback
412 /// let seek_callback = {
413 /// let file = Arc::clone(&file);
414 /// Box::new(move |offset: i64, whence: i32| -> i64 {
415 /// let mut file = file.lock().unwrap();
416 ///
417 /// // ✅ Handle AVSEEK_SIZE: Return total file size
418 /// if whence == ffmpeg_sys_next::AVSEEK_SIZE {
419 /// if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
420 /// println!("FFmpeg requested stream size: {}", size);
421 /// return size;
422 /// }
423 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
424 /// }
425 ///
426 /// // ✅ Ignore AVSEEK_FORCE flag
427 /// let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
428 ///
429 /// // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
430 /// if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
431 /// println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
432 /// if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
433 /// return new_pos as i64;
434 /// }
435 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
436 /// }
437 ///
438 /// // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
439 /// #[cfg(target_os = "linux")]
440 /// if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
441 /// println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
442 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
443 /// }
444 /// #[cfg(target_os = "linux")]
445 /// if actual_whence == ffmpeg_sys_next::SEEK_DATA {
446 /// println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
447 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
448 /// }
449 ///
450 /// // ✅ Standard seek modes
451 /// let seek_result = match actual_whence {
452 /// ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
453 /// ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
454 /// ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
455 /// _ => {
456 /// println!("Unsupported seek mode: {}", whence);
457 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
458 /// }
459 /// };
460 ///
461 /// match seek_result {
462 /// Ok(new_pos) => {
463 /// println!("Seek successful, new position: {}", new_pos);
464 /// new_pos as i64
465 /// }
466 /// Err(e) => {
467 /// println!("Seek failed: {}", e);
468 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
469 /// }
470 /// }
471 /// })
472 /// };
473 ///
474 /// let input = Input::new_by_read_callback(read_callback).set_seek_callback(seek_callback);
475 /// ```
476 pub fn set_seek_callback<F>(mut self, seek_callback: F) -> Self
477 where
478 F: FnMut(i64, i32) -> i64 + 'static,
479 {
480 self.seek_callback = Some(Box::new(seek_callback) as Box<dyn FnMut(i64, i32) -> i64>);
481 self
482 }
483
484 /// Replaces the entire frame-processing pipeline with a new sequence
485 /// of transformations for **post-decoding** frames on this `Input`.
486 ///
487 /// This method clears any previously set pipelines and replaces them with the provided list.
488 ///
489 /// # Parameters
490 /// * `frame_pipelines` - A list of [`FramePipeline`] instances defining the
491 /// transformations to apply to decoded frames.
492 ///
493 /// # Returns
494 /// * `Self` - Returns the modified `Input`, enabling method chaining.
495 ///
496 /// # Example
497 /// ```rust,ignore
498 /// let input = Input::from("my_video.mp4")
499 /// .set_frame_pipelines(vec![
500 /// FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)),
501 /// // Additional pipelines...
502 /// ]);
503 /// ```
504 pub fn set_frame_pipelines(mut self, frame_pipelines: Vec<impl Into<FramePipeline>>) -> Self {
505 self.frame_pipelines = Some(frame_pipelines.into_iter().map(|frame_pipeline| frame_pipeline.into()).collect());
506 self
507 }
508
509 /// Adds a single [`FramePipeline`] to the existing pipeline list.
510 ///
511 /// If no pipelines are currently defined, this method creates a new pipeline list.
512 /// Otherwise, it appends the provided pipeline to the existing transformations.
513 ///
514 /// # Parameters
515 /// * `frame_pipeline` - A [`FramePipeline`] defining a transformation.
516 ///
517 /// # Returns
518 /// * `Self` - Returns the modified `Input`, enabling method chaining.
519 ///
520 /// # Example
521 /// ```rust,ignore
522 /// let input = Input::from("my_video.mp4")
523 /// .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)).build())
524 /// .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_AUDIO).filter("my_custom_filter1", Box::new(...)).filter("my_custom_filter2", Box::new(...)).build());
525 /// ```
526 pub fn add_frame_pipeline(mut self, frame_pipeline: impl Into<FramePipeline>) -> Self {
527 if self.frame_pipelines.is_none() {
528 self.frame_pipelines = Some(vec![frame_pipeline.into()]);
529 } else {
530 self.frame_pipelines
531 .as_mut()
532 .unwrap()
533 .push(frame_pipeline.into());
534 }
535 self
536 }
537
538 /// Sets the input format for the container or device.
539 ///
540 /// By default, if no format is specified,
541 /// FFmpeg will attempt to detect the format automatically. However, certain
542 /// use cases require specifying the format explicitly:
543 /// - Using device-specific inputs (e.g., `avfoundation` on macOS, `dshow` on Windows).
544 /// - Handling raw streams or formats that FFmpeg may not detect automatically.
545 ///
546 /// ### Parameters:
547 /// - `format`: A string specifying the desired input format (e.g., `mp4`, `flv`, `avfoundation`).
548 ///
549 /// ### Return Value:
550 /// - Returns the `Input` instance with the newly set format.
551 pub fn set_format(mut self, format: impl Into<String>) -> Self {
552 self.format = Some(format.into());
553 self
554 }
555
556 /// Sets the **video codec** to be used for decoding.
557 ///
558 /// By default, FFmpeg will automatically select an appropriate video codec
559 /// based on the input format and available decoders. However, this method
560 /// allows you to override that selection and force a specific codec.
561 ///
562 /// # Common Video Codecs:
563 /// | Codec | Description |
564 /// |-------|-------------|
565 /// | `h264` | H.264 (AVC), widely supported and efficient |
566 /// | `hevc` | H.265 (HEVC), better compression at higher complexity |
567 /// | `vp9` | VP9, open-source alternative to H.265 |
568 /// | `av1` | AV1, newer open-source codec with improved compression |
569 /// | `mpeg4` | MPEG-4 Part 2, older but still used in some cases |
570 ///
571 /// # Arguments
572 /// * `video_codec` - A string representing the desired video codec (e.g., `"h264"`, `"hevc"`).
573 ///
574 /// # Returns
575 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
576 ///
577 /// # Example:
578 /// ```rust,ignore
579 /// let input = Input::from("video.mp4").set_video_codec("h264");
580 /// ```
581 pub fn set_video_codec(mut self, video_codec: impl Into<String>) -> Self {
582 self.video_codec = Some(video_codec.into());
583 self
584 }
585
586 /// Sets the **audio codec** to be used for decoding.
587 ///
588 /// By default, FFmpeg will automatically select an appropriate audio codec
589 /// based on the input format and available decoders. However, this method
590 /// allows you to specify a preferred codec.
591 ///
592 /// # Common Audio Codecs:
593 /// | Codec | Description |
594 /// |-------|-------------|
595 /// | `aac` | AAC, commonly used for MP4 and streaming |
596 /// | `mp3` | MP3, widely supported but lower efficiency |
597 /// | `opus` | Opus, high-quality open-source codec |
598 /// | `vorbis` | Vorbis, used in Ogg containers |
599 /// | `flac` | FLAC, lossless audio format |
600 ///
601 /// # Arguments
602 /// * `audio_codec` - A string representing the desired audio codec (e.g., `"aac"`, `"mp3"`).
603 ///
604 /// # Returns
605 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
606 ///
607 /// # Example:
608 /// ```rust,ignore
609 /// let input = Input::from("audio.mp3").set_audio_codec("aac");
610 /// ```
611 pub fn set_audio_codec(mut self, audio_codec: impl Into<String>) -> Self {
612 self.audio_codec = Some(audio_codec.into());
613 self
614 }
615
616 /// Sets the **subtitle codec** to be used for decoding.
617 ///
618 /// By default, FFmpeg will automatically select an appropriate subtitle codec
619 /// based on the input format and available decoders. This method lets you specify
620 /// a particular subtitle codec.
621 ///
622 /// # Common Subtitle Codecs:
623 /// | Codec | Description |
624 /// |-------|-------------|
625 /// | `ass` | Advanced SubStation Alpha (ASS) subtitles |
626 /// | `srt` | SubRip Subtitle format (SRT) |
627 /// | `mov_text` | Subtitles in MP4 containers |
628 /// | `subrip` | Plain-text subtitle format |
629 ///
630 /// # Arguments
631 /// * `subtitle_codec` - A string representing the desired subtitle codec (e.g., `"mov_text"`, `"ass"`, `"srt"`).
632 ///
633 /// # Returns
634 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
635 ///
636 /// # Example:
637 /// ```rust,ignore
638 /// let input = Input::from("movie.mkv").set_subtitle_codec("ass");
639 /// ```
640 pub fn set_subtitle_codec(mut self, subtitle_codec: impl Into<String>) -> Self {
641 self.subtitle_codec = Some(subtitle_codec.into());
642 self
643 }
644
645 /// Enables or disables **exit on error** behavior for the input.
646 ///
647 /// If set to `true`, FFmpeg will exit (stop processing) if it encounters any
648 /// decoding or demuxing error on this input. If set to `false` (the default),
649 /// FFmpeg may attempt to continue despite errors, skipping damaged portions.
650 ///
651 /// # Parameters
652 /// - `exit_on_error`: `true` to stop on errors, `false` to keep going.
653 ///
654 /// # Returns
655 /// * `Self` - allowing method chaining.
656 ///
657 /// # Example
658 /// ```rust,ignore
659 /// let input = Input::from("test.mp4")
660 /// .set_exit_on_error(true);
661 /// ```
662 pub fn set_exit_on_error(mut self, exit_on_error: bool) -> Self {
663 self.exit_on_error = Some(exit_on_error);
664 self
665 }
666
667 /// Sets a **read rate** for this input, controlling how quickly frames are read.
668 ///
669 /// - If set to `1.0`, frames are read at their native frame rate.
670 /// - If set to another value (e.g., `0.5` or `2.0`), FFmpeg may attempt to read
671 /// slower or faster, simulating changes in real-time playback speed.
672 ///
673 /// # Parameters
674 /// - `rate`: A floating-point value indicating the read rate multiplier.
675 ///
676 /// # Returns
677 /// * `Self` - allowing method chaining.
678 ///
679 /// # Example
680 /// ```rust,ignore
681 /// let input = Input::from("video.mp4")
682 /// .set_readrate(0.5); // read at half speed
683 /// ```
684 pub fn set_readrate(mut self, rate: f32) -> Self {
685 self.readrate = Some(rate);
686 self
687 }
688
689 /// Sets the **start time** (in microseconds) from which to begin reading.
690 ///
691 /// FFmpeg will skip all data before this timestamp. This can be used to
692 /// implement “input seeking” or to only process a portion of the input.
693 ///
694 /// # Parameters
695 /// - `start_time_us`: The timestamp (in microseconds) at which to start reading.
696 ///
697 /// # Returns
698 /// * `Self` - allowing method chaining.
699 ///
700 /// # Example
701 /// ```rust,ignore
702 /// let input = Input::from("long_clip.mp4")
703 /// .set_start_time_us(2_000_000); // Start at 2 seconds
704 /// ```
705 pub fn set_start_time_us(mut self, start_time_us: i64) -> Self {
706 self.start_time_us = Some(start_time_us);
707 self
708 }
709
710 /// Sets the **recording time** (in microseconds) for this input.
711 ///
712 /// FFmpeg will only read for the specified duration, ignoring data past this
713 /// limit. This can be used to trim or limit how much of the input is processed.
714 ///
715 /// # Parameters
716 /// - `recording_time_us`: The number of microseconds to read from the input.
717 ///
718 /// # Returns
719 /// * `Self` - allowing method chaining.
720 ///
721 /// # Example
722 /// ```rust,ignore
723 /// let input = Input::from("long_clip.mp4")
724 /// .set_recording_time_us(5_000_000); // Only read 5 seconds
725 /// ```
726 pub fn set_recording_time_us(mut self, recording_time_us: i64) -> Self {
727 self.recording_time_us = Some(recording_time_us);
728 self
729 }
730
731 /// Sets a **stop time** (in microseconds) beyond which input data will be ignored.
732 ///
733 /// This is similar to [`set_recording_time_us`](Self::set_recording_time_us) but
734 /// specifically references an absolute timestamp in the stream. Once this timestamp
735 /// is reached, FFmpeg stops reading.
736 ///
737 /// # Parameters
738 /// - `stop_time_us`: The absolute timestamp (in microseconds) at which to stop reading.
739 ///
740 /// # Returns
741 /// * `Self` - allowing method chaining.
742 ///
743 /// # Example
744 /// ```rust,ignore
745 /// let input = Input::from("long_clip.mp4")
746 /// .set_stop_time_us(10_000_000); // Stop reading at 10 seconds
747 /// ```
748 pub fn set_stop_time_us(mut self, stop_time_us: i64) -> Self {
749 self.stop_time_us = Some(stop_time_us);
750 self
751 }
752
753 /// Sets the number of **loops** to perform on this input stream.
754 ///
755 /// If FFmpeg reaches the end of the input, it can loop back and start from the
756 /// beginning, effectively repeating the content `stream_loop` times.
757 /// A negative value may indicate infinite looping (depending on FFmpeg’s actual behavior).
758 ///
759 /// # Parameters
760 /// - `count`: How many times to loop (e.g. `1` means one loop, `-1` might mean infinite).
761 ///
762 /// # Returns
763 /// * `Self` - allowing method chaining.
764 ///
765 /// # Example
766 /// ```rust,ignore
767 /// let input = Input::from("music.mp3")
768 /// .set_stream_loop(2); // play the input 2 extra times
769 /// ```
770 pub fn set_stream_loop(mut self, count: i32) -> Self {
771 self.stream_loop = Some(count);
772 self
773 }
774
775 /// Specifies a **hardware acceleration** name for decoding this input.
776 ///
777 /// Common values might include `"cuda"`, `"vaapi"`, `"dxva2"`, `"videotoolbox"`, etc.
778 /// Whether it works depends on your FFmpeg build and the hardware you have available.
779 ///
780 /// # Parameters
781 /// - `hwaccel_name`: A string naming the hardware accel to use.
782 ///
783 /// # Returns
784 /// * `Self` - allowing method chaining.
785 ///
786 /// # Example
787 /// ```rust,ignore
788 /// let input = Input::from("video.mp4")
789 /// .set_hwaccel("cuda");
790 /// ```
791 pub fn set_hwaccel(mut self, hwaccel_name: impl Into<String>) -> Self {
792 self.hwaccel = Some(hwaccel_name.into());
793 self
794 }
795
796 /// Selects a **hardware acceleration device** for decoding.
797 ///
798 /// For example, if you have multiple GPUs or want to specify a device node (like
799 /// `"/dev/dri/renderD128"` on Linux for VAAPI), you can pass it here. This option
800 /// must match the hardware accel you set via [`set_hwaccel`](Self::set_hwaccel) if
801 /// you expect decoding to succeed.
802 ///
803 /// # Parameters
804 /// - `device`: A string indicating the device path or identifier.
805 ///
806 /// # Returns
807 /// * `Self` - allowing method chaining.
808 ///
809 /// # Example
810 /// ```rust,ignore
811 /// let input = Input::from("video.mp4")
812 /// .set_hwaccel("vaapi")
813 /// .set_hwaccel_device("/dev/dri/renderD128");
814 /// ```
815 pub fn set_hwaccel_device(mut self, device: impl Into<String>) -> Self {
816 self.hwaccel_device = Some(device.into());
817 self
818 }
819
820 /// Sets the **output pixel format** to be used with hardware-accelerated decoding.
821 ///
822 /// Certain hardware decoders can produce various output pixel formats. This option
823 /// lets you specify which format (e.g., `"nv12"`, `"vaapi"`, etc.) is used during
824 /// the decode process.
825 /// Must be compatible with the chosen hardware accel and device.
826 ///
827 /// # Parameters
828 /// - `format`: A string naming the desired output pixel format (e.g. `"nv12"`).
829 ///
830 /// # Returns
831 /// * `Self` - allowing method chaining.
832 ///
833 /// # Example
834 /// ```rust,ignore
835 /// let input = Input::from("video.mp4")
836 /// .set_hwaccel("cuda")
837 /// .set_hwaccel_output_format("cuda");
838 /// ```
839 pub fn set_hwaccel_output_format(mut self, format: impl Into<String>) -> Self {
840 self.hwaccel_output_format = Some(format.into());
841 self
842 }
843
844 /// Sets a single input option for avformat_open_input.
845 ///
846 /// This method configures options that will be passed to FFmpeg's `avformat_open_input()`
847 /// function. The options can control behavior at different levels including format detection,
848 /// protocol handling, device configuration, and general input processing.
849 ///
850 /// **Example Usage:**
851 /// ```rust,ignore
852 /// let input = Input::new("avfoundation:0")
853 /// .set_input_opt("framerate", "30")
854 /// .set_input_opt("probesize", "5000000");
855 /// ```
856 ///
857 /// ### Parameters:
858 /// - `key`: The option name (e.g., `"framerate"`, `"probesize"`, `"timeout"`).
859 /// - `value`: The option value (e.g., `"30"`, `"5000000"`, `"10000000"`).
860 ///
861 /// ### Return Value:
862 /// - Returns the modified `Input` instance for method chaining.
863 pub fn set_input_opt(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
864 if let Some(ref mut opts) = self.input_opts {
865 opts.insert(key.into(), value.into());
866 } else {
867 let mut opts = HashMap::new();
868 opts.insert(key.into(), value.into());
869 self.input_opts = Some(opts);
870 }
871 self
872 }
873
874 /// Sets multiple input options at once for avformat_open_input.
875 ///
876 /// This method allows setting multiple options in a single call, which will all be
877 /// passed to FFmpeg's `avformat_open_input()` function. Each key-value pair will be
878 /// inserted into the options map, overwriting any existing keys with the same name.
879 ///
880 /// **Example Usage:**
881 /// ```rust,ignore
882 /// let input = Input::new("http://example.com/stream.m3u8")
883 /// .set_input_opts(vec![
884 /// ("user_agent", "MyApp/1.0"),
885 /// ("timeout", "10000000"),
886 /// ("probesize", "5000000"),
887 /// ]);
888 /// ```
889 ///
890 /// ### Parameters:
891 /// - `opts`: A vector of key-value pairs representing input options.
892 ///
893 /// ### Return Value:
894 /// - Returns the modified `Input` instance for method chaining.
895 pub fn set_input_opts(mut self, opts: Vec<(impl Into<String>, impl Into<String>)>) -> Self {
896 if let Some(ref mut input_opts) = self.input_opts {
897 for (key, value) in opts {
898 input_opts.insert(key.into(), value.into());
899 }
900 } else {
901 let mut input_opts = HashMap::new();
902 for (key, value) in opts {
903 input_opts.insert(key.into(), value.into());
904 }
905 self.input_opts = Some(input_opts);
906 }
907 self
908 }
909
910 /// Sets whether to automatically rotate video based on display matrix metadata.
911 ///
912 /// When enabled (default is `true`), videos with rotation metadata (common in
913 /// smartphone recordings) will be automatically rotated to the correct orientation
914 /// using transpose/hflip/vflip filters.
915 ///
916 /// # Parameters
917 /// - `autorotate`: `true` to enable automatic rotation (default), `false` to disable.
918 ///
919 /// # Returns
920 /// * `Self` - allowing method chaining.
921 ///
922 /// # FFmpeg CLI equivalent
923 /// ```bash
924 /// ffmpeg -autorotate 0 -i input.mp4 output.mp4
925 /// ```
926 ///
927 /// # Example
928 /// ```rust,ignore
929 /// // Disable automatic rotation to preserve original video orientation
930 /// let input = Input::from("smartphone_video.mp4")
931 /// .set_autorotate(false);
932 /// ```
933 pub fn set_autorotate(mut self, autorotate: bool) -> Self {
934 self.autorotate = Some(autorotate);
935 self
936 }
937
938 /// Sets a timestamp scale factor for pts/dts values.
939 ///
940 /// This multiplier is applied to packet timestamps after ts_offset addition.
941 /// Default is `1.0` (no scaling). Values must be positive.
942 ///
943 /// This is useful for fixing videos with incorrect timestamps or for
944 /// special timestamp manipulation scenarios.
945 ///
946 /// # Parameters
947 /// - `scale`: A positive floating-point value for timestamp scaling.
948 ///
949 /// # Returns
950 /// * `Self` - allowing method chaining.
951 ///
952 /// # FFmpeg CLI equivalent
953 /// ```bash
954 /// ffmpeg -itsscale 2.0 -i input.mp4 output.mp4
955 /// ```
956 ///
957 /// # Example
958 /// ```rust,ignore
959 /// // Scale timestamps by 2x (double the playback speed effect on timestamps)
960 /// let input = Input::from("video.mp4")
961 /// .set_ts_scale(2.0);
962 /// ```
963 pub fn set_ts_scale(mut self, scale: f64) -> Self {
964 assert!(scale.is_finite(), "ts_scale must be finite, got {scale}");
965 assert!(scale > 0.0, "ts_scale must be positive, got {scale}");
966 self.ts_scale = Some(scale);
967 self
968 }
969
970 /// Sets a forced framerate for the input video stream.
971 ///
972 /// When set, this overrides the default DTS estimation behavior. By default,
973 /// ez-ffmpeg uses the actual packet duration for DTS estimation (matching FFmpeg
974 /// CLI behavior without `-r`). Setting a framerate forces DTS estimation to use
975 /// the specified rate instead, which snaps timestamps to a fixed frame grid.
976 ///
977 /// # Parameters
978 /// - `num`: Framerate numerator (e.g., 30 for 30fps, 24000 for 23.976fps)
979 /// - `den`: Framerate denominator (e.g., 1 for 30fps, 1001 for 23.976fps)
980 ///
981 /// # Returns
982 /// * `Self` - allowing method chaining.
983 ///
984 /// # FFmpeg CLI equivalent
985 /// ```bash
986 /// ffmpeg -r 30 -i input.mp4 output.mp4
987 /// ffmpeg -r 24000/1001 -i input.mp4 output.mp4
988 /// ```
989 ///
990 /// # Example
991 /// ```rust,ignore
992 /// // Force 30fps framerate for DTS estimation
993 /// let input = Input::from("video.mp4")
994 /// .set_framerate(30, 1);
995 ///
996 /// // Force 23.976fps framerate
997 /// let input = Input::from("video.mp4")
998 /// .set_framerate(24000, 1001);
999 /// ```
1000 pub fn set_framerate(mut self, num: i32, den: i32) -> Self {
1001 assert!(num > 0, "framerate numerator must be positive, got {num}");
1002 assert!(den > 0, "framerate denominator must be positive, got {den}");
1003 self.framerate = Some((num, den));
1004 self
1005 }
1006}
1007
1008impl From<Box<dyn FnMut(&mut [u8]) -> i32>> for Input {
1009 fn from(read_callback: Box<dyn FnMut(&mut [u8]) -> i32>) -> Self {
1010 Self {
1011 url: None,
1012 read_callback: Some(read_callback),
1013 seek_callback: None,
1014 frame_pipelines: None,
1015 format: None,
1016 video_codec: None,
1017 audio_codec: None,
1018 subtitle_codec: None,
1019 exit_on_error: None,
1020 readrate: None,
1021 start_time_us: None,
1022 recording_time_us: None,
1023 stop_time_us: None,
1024 stream_loop: None,
1025 hwaccel: None,
1026 hwaccel_device: None,
1027 hwaccel_output_format: None,
1028 input_opts: None,
1029 autorotate: None,
1030 ts_scale: None,
1031 framerate: None,
1032 }
1033 }
1034}
1035
1036impl From<String> for Input {
1037 fn from(url: String) -> Self {
1038 Self {
1039 url: Some(url),
1040 read_callback: None,
1041 seek_callback: None,
1042 frame_pipelines: None,
1043 format: None,
1044 video_codec: None,
1045 audio_codec: None,
1046 subtitle_codec: None,
1047 exit_on_error: None,
1048 readrate: None,
1049 start_time_us: None,
1050 recording_time_us: None,
1051 stop_time_us: None,
1052 stream_loop: None,
1053 hwaccel: None,
1054 hwaccel_device: None,
1055 hwaccel_output_format: None,
1056 input_opts: None,
1057 autorotate: None,
1058 ts_scale: None,
1059 framerate: None,
1060 }
1061 }
1062}
1063
1064impl From<&str> for Input {
1065 fn from(url: &str) -> Self {
1066 Self::from(String::from(url))
1067 }
1068}
1069
1070
1071#[cfg(test)]
1072mod tests {
1073 use crate::core::context::input::Input;
1074
1075 #[test]
1076 fn set_framerate_valid() {
1077 let input = Input::from("test.mp4").set_framerate(24000, 1001);
1078 assert_eq!(input.framerate, Some((24000, 1001)));
1079 }
1080
1081 #[test]
1082 fn set_framerate_simple() {
1083 let input = Input::from("test.mp4").set_framerate(30, 1);
1084 assert_eq!(input.framerate, Some((30, 1)));
1085 }
1086
1087 #[test]
1088 #[should_panic(expected = "framerate numerator must be positive")]
1089 fn set_framerate_zero_num() {
1090 Input::from("test.mp4").set_framerate(0, 1);
1091 }
1092
1093 #[test]
1094 #[should_panic(expected = "framerate denominator must be positive")]
1095 fn set_framerate_zero_den() {
1096 Input::from("test.mp4").set_framerate(24, 0);
1097 }
1098
1099 #[test]
1100 #[should_panic(expected = "framerate numerator must be positive")]
1101 fn set_framerate_negative_num() {
1102 Input::from("test.mp4").set_framerate(-1, 1);
1103 }
1104
1105 #[test]
1106 #[should_panic(expected = "framerate denominator must be positive")]
1107 fn set_framerate_negative_den() {
1108 Input::from("test.mp4").set_framerate(24, -1);
1109 }
1110
1111 #[test]
1112 fn set_ts_scale_valid() {
1113 let input = Input::from("test.mp4").set_ts_scale(2.0);
1114 assert_eq!(input.ts_scale, Some(2.0));
1115 }
1116
1117 #[test]
1118 fn set_ts_scale_fractional() {
1119 let input = Input::from("test.mp4").set_ts_scale(0.5);
1120 assert_eq!(input.ts_scale, Some(0.5));
1121 }
1122
1123 #[test]
1124 #[should_panic(expected = "ts_scale must be finite")]
1125 fn set_ts_scale_nan() {
1126 Input::from("test.mp4").set_ts_scale(f64::NAN);
1127 }
1128
1129 #[test]
1130 #[should_panic(expected = "ts_scale must be finite")]
1131 fn set_ts_scale_infinity() {
1132 Input::from("test.mp4").set_ts_scale(f64::INFINITY);
1133 }
1134
1135 #[test]
1136 #[should_panic(expected = "ts_scale must be finite")]
1137 fn set_ts_scale_neg_infinity() {
1138 Input::from("test.mp4").set_ts_scale(f64::NEG_INFINITY);
1139 }
1140
1141 #[test]
1142 #[should_panic(expected = "ts_scale must be positive")]
1143 fn set_ts_scale_zero() {
1144 Input::from("test.mp4").set_ts_scale(0.0);
1145 }
1146
1147 #[test]
1148 #[should_panic(expected = "ts_scale must be positive")]
1149 fn set_ts_scale_negative() {
1150 Input::from("test.mp4").set_ts_scale(-1.0);
1151 }
1152
1153 #[test]
1154 fn test_new_by_read_callback() {
1155 let data_source = b"example custom data source".to_vec();
1156 let _input = Input::new_by_read_callback(move |buf| {
1157 let len = data_source.len().min(buf.len());
1158 buf[..len].copy_from_slice(&data_source[..len]);
1159 len as i32 // Return the number of bytes written
1160 });
1161
1162 let data_source2 = b"example custom data source2".to_vec();
1163 let _input = Input::new_by_read_callback(move |buf2| {
1164 let len = data_source2.len().min(buf2.len());
1165 buf2[..len].copy_from_slice(&data_source2[..len]);
1166 len as i32 // Return the number of bytes written
1167 });
1168 }
1169}