ez_ffmpeg/core/context/input.rs
1use std::collections::HashMap;
2use crate::filter::frame_pipeline::FramePipeline;
3
4unsafe impl Send for Input {}
5
6pub struct Input {
7 /// The URL of the input source.
8 ///
9 /// This specifies the source from which the input stream is obtained. It can be:
10 /// - A local file path (e.g., `file:///path/to/video.mp4`).
11 /// - A network stream (e.g., `rtmp://example.com/live/stream`).
12 /// - Any other URL supported by FFmpeg (e.g., `http://example.com/video.mp4`, `udp://...`).
13 ///
14 /// The URL must be valid. If the URL is invalid or unsupported,
15 /// the library will return an error when attempting to open the input stream.
16 pub(crate) url: Option<String>,
17
18 /// A callback function for custom data reading.
19 ///
20 /// The `read_callback` function allows you to provide custom logic for feeding data into
21 /// the input stream. This is useful for scenarios where the input does not come directly
22 /// from a standard source (like a file or URL), but instead from a custom data source,
23 /// such as an in-memory buffer or a custom network stream.
24 ///
25 /// ### Parameters:
26 /// - `buf: &mut [u8]`: A mutable buffer into which the data should be written.
27 /// The callback should fill this buffer with as much data as possible, up to its length.
28 ///
29 /// ### Return Value:
30 /// - **Positive Value**: The number of bytes successfully read into `buf`.
31 /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the input stream. No more data will be read.
32 /// - **Negative Value**: Indicates an error occurred, such as:
33 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
34 /// - Custom-defined error codes depending on your implementation.
35 ///
36 /// ### Example:
37 /// ```rust
38 /// fn custom_read_callback(buf: &mut [u8]) -> i32 {
39 /// let data = b"example data stream";
40 /// let len = data.len().min(buf.len());
41 /// buf[..len].copy_from_slice(&data[..len]);
42 /// len as i32 // Return the number of bytes written into the buffer
43 /// }
44 /// ```
45 pub(crate) read_callback: Option<Box<dyn FnMut(&mut [u8]) -> i32>>,
46
47 /// A callback function for custom seeking within the input stream.
48 ///
49 /// The `seek_callback` function allows defining custom seeking behavior.
50 /// This is useful for data sources that support seeking, such as files or memory-mapped data.
51 /// For non-seekable streams (e.g., live network streams), this function may return an error.
52 ///
53 /// **FFmpeg may invoke `seek_callback` from multiple threads, so thread safety is required.**
54 /// When using a `File` as an input source, **use `Arc<Mutex<File>>` to ensure safe access.**
55 ///
56 /// ### Parameters:
57 /// - `offset: i64`: The target position in the stream for seeking.
58 /// - `whence: i32`: The seek mode defining how the `offset` should be interpreted:
59 /// - `ffmpeg_sys_next::SEEK_SET` (0): Seek to an absolute position.
60 /// - `ffmpeg_sys_next::SEEK_CUR` (1): Seek relative to the current position.
61 /// - `ffmpeg_sys_next::SEEK_END` (2): Seek relative to the end of the stream.
62 /// - `ffmpeg_sys_next::SEEK_HOLE` (3): Find the next file hole (sparse file support).
63 /// - `ffmpeg_sys_next::SEEK_DATA` (4): Find the next data block (sparse file support).
64 /// - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2): Seek using **byte offsets** instead of timestamps.
65 /// - `ffmpeg_sys_next::AVSEEK_SIZE` (65536): Query the **total size** of the stream.
66 /// - `ffmpeg_sys_next::AVSEEK_FORCE` (131072): **Force seeking even if normally restricted.**
67 ///
68 /// ### Return Value:
69 /// - **Positive Value**: The new offset position after seeking.
70 /// - **Negative Value**: An error occurred. Common errors include:
71 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
72 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
73 ///
74 /// ### Example (Handling multi-threaded access safely with `Arc<Mutex<File>>`):
75 /// Since FFmpeg may call `read_callback` and `seek_callback` from different threads,
76 /// **`Arc<Mutex<File>>` is used to ensure safe access across threads.**
77 ///
78 /// ```rust
79 /// use std::fs::File;
80 /// use std::io::{Seek, SeekFrom};
81 /// use std::sync::{Arc, Mutex};
82 ///
83 /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
84 ///
85 /// let seek_callback = {
86 /// let file = Arc::clone(&file);
87 /// Box::new(move |offset: i64, whence: i32| -> i64 {
88 /// let mut file = file.lock().unwrap(); // Acquire lock
89 ///
90 /// // ✅ Handle AVSEEK_SIZE: Return total file size
91 /// if whence == ffmpeg_sys_next::AVSEEK_SIZE {
92 /// if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
93 /// println!("FFmpeg requested stream size: {}", size);
94 /// return size;
95 /// }
96 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
97 /// }
98 ///
99 /// // ✅ Handle AVSEEK_FORCE: Ignore this flag when processing seek
100 /// let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
101 ///
102 /// // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
103 /// if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
104 /// println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
105 /// if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
106 /// return new_pos as i64;
107 /// }
108 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
109 /// }
110 ///
111 /// // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
112 /// #[cfg(target_os = "linux")]
113 /// if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
114 /// println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
115 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
116 /// }
117 /// #[cfg(target_os = "linux")]
118 /// if actual_whence == ffmpeg_sys_next::SEEK_DATA {
119 /// println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
120 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
121 /// }
122 ///
123 /// // ✅ Standard seek modes
124 /// let seek_result = match actual_whence {
125 /// ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
126 /// ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
127 /// ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
128 /// _ => {
129 /// println!("Unsupported seek mode: {}", whence);
130 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
131 /// }
132 /// };
133 ///
134 /// match seek_result {
135 /// Ok(new_pos) => {
136 /// println!("Seek successful, new position: {}", new_pos);
137 /// new_pos as i64
138 /// }
139 /// Err(e) => {
140 /// println!("Seek failed: {}", e);
141 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
142 /// }
143 /// }
144 /// })
145 /// };
146 /// ```
147 pub(crate) seek_callback: Option<Box<dyn FnMut(i64, i32) -> i64>>,
148
149 /// The pipeline that provides custom processing for decoded frames.
150 ///
151 /// After the input data is decoded into `Frame` objects, these frames
152 /// are passed through the `frame_pipeline`. Each frame goes through
153 /// a series of `FrameFilter` objects in the pipeline, allowing for
154 /// customized processing (e.g., filtering, transformation, etc.).
155 ///
156 /// If `None`, no processing pipeline is applied to the decoded frames.
157 pub(crate) frame_pipelines: Option<Vec<FramePipeline>>,
158
159 /// The input format for the source.
160 ///
161 /// This field specifies which container or device format FFmpeg should use to read the input.
162 /// If `None`, FFmpeg will attempt to automatically detect the format based on the source URL,
163 /// file extension, or stream data.
164 ///
165 /// You might need to specify a format explicitly in cases where automatic detection fails or
166 /// when you must force a particular format. For example:
167 /// - When capturing from a specific device on macOS (using `avfoundation`).
168 /// - When capturing on Windows devices (using `dshow`).
169 /// - When dealing with raw streams or unusual data sources.
170 pub(crate) format: Option<String>,
171
172 /// The codec to be used for **video** decoding.
173 ///
174 /// If set, this forces FFmpeg to use the specified video codec for decoding.
175 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
176 pub(crate) video_codec: Option<String>,
177
178 /// The codec to be used for **audio** decoding.
179 ///
180 /// If set, this forces FFmpeg to use the specified audio codec for decoding.
181 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
182 pub(crate) audio_codec: Option<String>,
183
184 /// The codec to be used for **subtitle** decoding.
185 ///
186 /// If set, this forces FFmpeg to use the specified subtitle codec for decoding.
187 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
188 pub(crate) subtitle_codec: Option<String>,
189
190 pub(crate) exit_on_error: Option<bool>,
191
192 /// read input at specified rate.
193 /// when set 1. read input at native frame rate.
194 pub(crate) readrate: Option<f32>,
195 pub(crate) start_time_us: Option<i64>,
196 pub(crate) recording_time_us: Option<i64>,
197 pub(crate) stop_time_us: Option<i64>,
198
199 /// set number of times input stream shall be looped
200 pub(crate) stream_loop: Option<i32>,
201
202 /// Hardware Acceleration name
203 /// use Hardware accelerated decoding
204 pub(crate) hwaccel: Option<String>,
205 /// select a device for HW acceleration
206 pub(crate) hwaccel_device: Option<String>,
207 /// select output format used with HW accelerated decoding
208 pub(crate) hwaccel_output_format: Option<String>,
209
210 /// Input options for avformat_open_input.
211 ///
212 /// This field stores options that are passed to FFmpeg's `avformat_open_input()` function.
213 /// These options can affect different layers of the input processing pipeline:
214 ///
215 /// **Format/Demuxer options:**
216 /// - `probesize` - Maximum data to probe for format detection
217 /// - `analyzeduration` - Duration to analyze for stream info
218 /// - `fflags` - Format flags (e.g., "+genpts")
219 ///
220 /// **Protocol options:**
221 /// - `user_agent` - HTTP User-Agent header
222 /// - `timeout` - Network timeout in microseconds
223 /// - `headers` - Custom HTTP headers
224 ///
225 /// **Device options:**
226 /// - `framerate` - Input framerate (for avfoundation, dshow, etc.)
227 /// - `video_size` - Input video resolution
228 /// - `pixel_format` - Input pixel format
229 ///
230 /// **General input options:**
231 /// - `thread_queue_size` - Input thread queue size
232 /// - `re` - Read input at native frame rate
233 ///
234 /// These options allow fine-tuning of input behavior across different components
235 /// of the FFmpeg input pipeline.
236 pub(crate) input_opts: Option<HashMap<String, String>>,
237}
238
239impl Input {
240 pub fn new(url: impl Into<String>) -> Self {
241 url.into().into()
242 }
243
244 /// Creates a new `Input` instance with a custom read callback.
245 ///
246 /// This method initializes an `Input` object that uses a provided `read_callback` function
247 /// to supply data to the input stream. This is particularly useful for custom data sources
248 /// such as in-memory buffers, network streams, or other non-standard input mechanisms.
249 ///
250 /// ### Parameters:
251 /// - `read_callback: fn(buf: &mut [u8]) -> i32`: A function pointer that fills the provided
252 /// mutable buffer with data and returns the number of bytes read.
253 ///
254 /// ### Return Value:
255 /// - Returns a new `Input` instance configured with the specified `read_callback`.
256 ///
257 /// ### Behavior of `read_callback`:
258 /// - **Positive Value**: Indicates the number of bytes successfully read.
259 /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the stream. The library will stop requesting data.
260 /// - **Negative Value**: Indicates an error occurred. For example:
261 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: Represents an input/output error.
262 /// - Other custom-defined error codes can also be returned to signal specific issues.
263 ///
264 /// ### Example:
265 /// ```rust
266 /// let input = Input::new_by_read_callback(move |buf| {
267 /// let data = b"example custom data source";
268 /// let len = data.len().min(buf.len());
269 /// buf[..len].copy_from_slice(&data[..len]);
270 /// len as i32 // Return the number of bytes written
271 /// });
272 /// ```
273 pub fn new_by_read_callback<F>(read_callback: F) -> Self
274 where
275 F: FnMut(&mut [u8]) -> i32 + 'static,
276 {
277 (Box::new(read_callback) as Box<dyn FnMut(&mut [u8]) -> i32>).into()
278 }
279
280 /// Sets a custom seek callback for the input stream.
281 ///
282 /// This function assigns a user-defined function that handles seeking within the input stream.
283 /// It is required when using custom data sources that support random access, such as files,
284 /// memory-mapped buffers, or seekable network streams.
285 ///
286 /// **FFmpeg may invoke `seek_callback` from different threads.**
287 /// If using a `File` as the data source, **wrap it in `Arc<Mutex<File>>`** to ensure
288 /// thread-safe access across multiple threads.
289 ///
290 /// ### Parameters:
291 /// - `seek_callback: FnMut(i64, i32) -> i64`: A function that handles seek operations.
292 /// - `offset: i64`: The target seek position in the stream.
293 /// - `whence: i32`: The seek mode, which determines how `offset` should be interpreted:
294 /// - `ffmpeg_sys_next::SEEK_SET` (0) - Seek to an absolute position.
295 /// - `ffmpeg_sys_next::SEEK_CUR` (1) - Seek relative to the current position.
296 /// - `ffmpeg_sys_next::SEEK_END` (2) - Seek relative to the end of the stream.
297 /// - `ffmpeg_sys_next::SEEK_HOLE` (3) - Find the next hole in a sparse file (Linux only).
298 /// - `ffmpeg_sys_next::SEEK_DATA` (4) - Find the next data block in a sparse file (Linux only).
299 /// - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2) - Seek using byte offset instead of timestamps.
300 /// - `ffmpeg_sys_next::AVSEEK_SIZE` (65536) - Query the total size of the stream.
301 /// - `ffmpeg_sys_next::AVSEEK_FORCE` (131072) - Force seeking, even if normally restricted.
302 ///
303 /// ### Return Value:
304 /// - Returns `Self`, allowing for method chaining.
305 ///
306 /// ### Behavior of `seek_callback`:
307 /// - **Positive Value**: The new offset position after seeking.
308 /// - **Negative Value**: An error occurred, such as:
309 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
310 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
311 ///
312 /// ### Example (Thread-safe seek callback using `Arc<Mutex<File>>`):
313 /// Since `FFmpeg` may call `read_callback` and `seek_callback` from different threads,
314 /// **use `Arc<Mutex<File>>` to ensure safe concurrent access.**
315 ///
316 /// ```rust
317 /// use std::fs::File;
318 /// use std::io::{Read, Seek, SeekFrom};
319 /// use std::sync::{Arc, Mutex};
320 ///
321 /// // ✅ Wrap the file in Arc<Mutex<>> for safe shared access
322 /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
323 ///
324 /// // ✅ Thread-safe read callback
325 /// let read_callback = {
326 /// let file = Arc::clone(&file);
327 /// move |buf: &mut [u8]| -> i32 {
328 /// let mut file = file.lock().unwrap();
329 /// match file.read(buf) {
330 /// Ok(0) => {
331 /// println!("Read EOF");
332 /// ffmpeg_sys_next::AVERROR_EOF
333 /// }
334 /// Ok(bytes_read) => bytes_read as i32,
335 /// Err(e) => {
336 /// println!("Read error: {}", e);
337 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)
338 /// }
339 /// }
340 /// }
341 /// };
342 ///
343 /// // ✅ Thread-safe seek callback
344 /// let seek_callback = {
345 /// let file = Arc::clone(&file);
346 /// Box::new(move |offset: i64, whence: i32| -> i64 {
347 /// let mut file = file.lock().unwrap();
348 ///
349 /// // ✅ Handle AVSEEK_SIZE: Return total file size
350 /// if whence == ffmpeg_sys_next::AVSEEK_SIZE {
351 /// if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
352 /// println!("FFmpeg requested stream size: {}", size);
353 /// return size;
354 /// }
355 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
356 /// }
357 ///
358 /// // ✅ Ignore AVSEEK_FORCE flag
359 /// let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
360 ///
361 /// // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
362 /// if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
363 /// println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
364 /// if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
365 /// return new_pos as i64;
366 /// }
367 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
368 /// }
369 ///
370 /// // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
371 /// #[cfg(target_os = "linux")]
372 /// if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
373 /// println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
374 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
375 /// }
376 /// #[cfg(target_os = "linux")]
377 /// if actual_whence == ffmpeg_sys_next::SEEK_DATA {
378 /// println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
379 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
380 /// }
381 ///
382 /// // ✅ Standard seek modes
383 /// let seek_result = match actual_whence {
384 /// ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
385 /// ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
386 /// ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
387 /// _ => {
388 /// println!("Unsupported seek mode: {}", whence);
389 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
390 /// }
391 /// };
392 ///
393 /// match seek_result {
394 /// Ok(new_pos) => {
395 /// println!("Seek successful, new position: {}", new_pos);
396 /// new_pos as i64
397 /// }
398 /// Err(e) => {
399 /// println!("Seek failed: {}", e);
400 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
401 /// }
402 /// }
403 /// })
404 /// };
405 ///
406 /// let input = Input::new_by_read_callback(read_callback).set_seek_callback(seek_callback);
407 /// ```
408 pub fn set_seek_callback<F>(mut self, seek_callback: F) -> Self
409 where
410 F: FnMut(i64, i32) -> i64 + 'static,
411 {
412 self.seek_callback = Some(Box::new(seek_callback) as Box<dyn FnMut(i64, i32) -> i64>);
413 self
414 }
415
416 /// Replaces the entire frame-processing pipeline with a new sequence
417 /// of transformations for **post-decoding** frames on this `Input`.
418 ///
419 /// This method clears any previously set pipelines and replaces them with the provided list.
420 ///
421 /// # Parameters
422 /// * `frame_pipelines` - A list of [`FramePipeline`] instances defining the
423 /// transformations to apply to decoded frames.
424 ///
425 /// # Returns
426 /// * `Self` - Returns the modified `Input`, enabling method chaining.
427 ///
428 /// # Example
429 /// ```rust
430 /// let input = Input::from("my_video.mp4")
431 /// .set_frame_pipelines(vec![
432 /// FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)),
433 /// // Additional pipelines...
434 /// ]);
435 /// ```
436 pub fn set_frame_pipelines(mut self, frame_pipelines: Vec<impl Into<FramePipeline>>) -> Self {
437 self.frame_pipelines = Some(frame_pipelines.into_iter().map(|frame_pipeline| frame_pipeline.into()).collect());
438 self
439 }
440
441 /// Adds a single [`FramePipeline`] to the existing pipeline list.
442 ///
443 /// If no pipelines are currently defined, this method creates a new pipeline list.
444 /// Otherwise, it appends the provided pipeline to the existing transformations.
445 ///
446 /// # Parameters
447 /// * `frame_pipeline` - A [`FramePipeline`] defining a transformation.
448 ///
449 /// # Returns
450 /// * `Self` - Returns the modified `Input`, enabling method chaining.
451 ///
452 /// # Example
453 /// ```rust
454 /// let input = Input::from("my_video.mp4")
455 /// .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)).build())
456 /// .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_AUDIO).filter("my_custom_filter1", Box::new(...)).filter("my_custom_filter2", Box::new(...)).build());
457 /// ```
458 pub fn add_frame_pipeline(mut self, frame_pipeline: impl Into<FramePipeline>) -> Self {
459 if self.frame_pipelines.is_none() {
460 self.frame_pipelines = Some(vec![frame_pipeline.into()]);
461 } else {
462 self.frame_pipelines
463 .as_mut()
464 .unwrap()
465 .push(frame_pipeline.into());
466 }
467 self
468 }
469
470 /// Sets the input format for the container or device.
471 ///
472 /// By default, if no format is specified,
473 /// FFmpeg will attempt to detect the format automatically. However, certain
474 /// use cases require specifying the format explicitly:
475 /// - Using device-specific inputs (e.g., `avfoundation` on macOS, `dshow` on Windows).
476 /// - Handling raw streams or formats that FFmpeg may not detect automatically.
477 ///
478 /// ### Parameters:
479 /// - `format`: A string specifying the desired input format (e.g., `mp4`, `flv`, `avfoundation`).
480 ///
481 /// ### Return Value:
482 /// - Returns the `Input` instance with the newly set format.
483 pub fn set_format(mut self, format: impl Into<String>) -> Self {
484 self.format = Some(format.into());
485 self
486 }
487
488 /// Sets the **video codec** to be used for decoding.
489 ///
490 /// By default, FFmpeg will automatically select an appropriate video codec
491 /// based on the input format and available decoders. However, this method
492 /// allows you to override that selection and force a specific codec.
493 ///
494 /// # Common Video Codecs:
495 /// | Codec | Description |
496 /// |-------|-------------|
497 /// | `h264` | H.264 (AVC), widely supported and efficient |
498 /// | `hevc` | H.265 (HEVC), better compression at higher complexity |
499 /// | `vp9` | VP9, open-source alternative to H.265 |
500 /// | `av1` | AV1, newer open-source codec with improved compression |
501 /// | `mpeg4` | MPEG-4 Part 2, older but still used in some cases |
502 ///
503 /// # Arguments
504 /// * `video_codec` - A string representing the desired video codec (e.g., `"h264"`, `"hevc"`).
505 ///
506 /// # Returns
507 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
508 ///
509 /// # Example:
510 /// ```rust
511 /// let input = Input::from("video.mp4").set_video_codec("h264");
512 /// ```
513 pub fn set_video_codec(mut self, video_codec: impl Into<String>) -> Self {
514 self.video_codec = Some(video_codec.into());
515 self
516 }
517
518 /// Sets the **audio codec** to be used for decoding.
519 ///
520 /// By default, FFmpeg will automatically select an appropriate audio codec
521 /// based on the input format and available decoders. However, this method
522 /// allows you to specify a preferred codec.
523 ///
524 /// # Common Audio Codecs:
525 /// | Codec | Description |
526 /// |-------|-------------|
527 /// | `aac` | AAC, commonly used for MP4 and streaming |
528 /// | `mp3` | MP3, widely supported but lower efficiency |
529 /// | `opus` | Opus, high-quality open-source codec |
530 /// | `vorbis` | Vorbis, used in Ogg containers |
531 /// | `flac` | FLAC, lossless audio format |
532 ///
533 /// # Arguments
534 /// * `audio_codec` - A string representing the desired audio codec (e.g., `"aac"`, `"mp3"`).
535 ///
536 /// # Returns
537 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
538 ///
539 /// # Example:
540 /// ```rust
541 /// let input = Input::from("audio.mp3").set_audio_codec("aac");
542 /// ```
543 pub fn set_audio_codec(mut self, audio_codec: impl Into<String>) -> Self {
544 self.audio_codec = Some(audio_codec.into());
545 self
546 }
547
548 /// Sets the **subtitle codec** to be used for decoding.
549 ///
550 /// By default, FFmpeg will automatically select an appropriate subtitle codec
551 /// based on the input format and available decoders. This method lets you specify
552 /// a particular subtitle codec.
553 ///
554 /// # Common Subtitle Codecs:
555 /// | Codec | Description |
556 /// |-------|-------------|
557 /// | `ass` | Advanced SubStation Alpha (ASS) subtitles |
558 /// | `srt` | SubRip Subtitle format (SRT) |
559 /// | `mov_text` | Subtitles in MP4 containers |
560 /// | `subrip` | Plain-text subtitle format |
561 ///
562 /// # Arguments
563 /// * `subtitle_codec` - A string representing the desired subtitle codec (e.g., `"mov_text"`, `"ass"`, `"srt"`).
564 ///
565 /// # Returns
566 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
567 ///
568 /// # Example:
569 /// ```rust
570 /// let input = Input::from("movie.mkv").set_subtitle_codec("ass");
571 /// ```
572 pub fn set_subtitle_codec(mut self, subtitle_codec: impl Into<String>) -> Self {
573 self.subtitle_codec = Some(subtitle_codec.into());
574 self
575 }
576
577 /// Enables or disables **exit on error** behavior for the input.
578 ///
579 /// If set to `true`, FFmpeg will exit (stop processing) if it encounters any
580 /// decoding or demuxing error on this input. If set to `false` (the default),
581 /// FFmpeg may attempt to continue despite errors, skipping damaged portions.
582 ///
583 /// # Parameters
584 /// - `exit_on_error`: `true` to stop on errors, `false` to keep going.
585 ///
586 /// # Returns
587 /// * `Self` - allowing method chaining.
588 ///
589 /// # Example
590 /// ```rust
591 /// let input = Input::from("test.mp4")
592 /// .set_exit_on_error(true);
593 /// ```
594 pub fn set_exit_on_error(mut self, exit_on_error: bool) -> Self {
595 self.exit_on_error = Some(exit_on_error);
596 self
597 }
598
599 /// Sets a **read rate** for this input, controlling how quickly frames are read.
600 ///
601 /// - If set to `1.0`, frames are read at their native frame rate.
602 /// - If set to another value (e.g., `0.5` or `2.0`), FFmpeg may attempt to read
603 /// slower or faster, simulating changes in real-time playback speed.
604 ///
605 /// # Parameters
606 /// - `rate`: A floating-point value indicating the read rate multiplier.
607 ///
608 /// # Returns
609 /// * `Self` - allowing method chaining.
610 ///
611 /// # Example
612 /// ```rust
613 /// let input = Input::from("video.mp4")
614 /// .set_readrate(0.5); // read at half speed
615 /// ```
616 pub fn set_readrate(mut self, rate: f32) -> Self {
617 self.readrate = Some(rate);
618 self
619 }
620
621 /// Sets the **start time** (in microseconds) from which to begin reading.
622 ///
623 /// FFmpeg will skip all data before this timestamp. This can be used to
624 /// implement “input seeking” or to only process a portion of the input.
625 ///
626 /// # Parameters
627 /// - `start_time_us`: The timestamp (in microseconds) at which to start reading.
628 ///
629 /// # Returns
630 /// * `Self` - allowing method chaining.
631 ///
632 /// # Example
633 /// ```rust
634 /// let input = Input::from("long_clip.mp4")
635 /// .set_start_time_us(2_000_000); // Start at 2 seconds
636 /// ```
637 pub fn set_start_time_us(mut self, start_time_us: i64) -> Self {
638 self.start_time_us = Some(start_time_us);
639 self
640 }
641
642 /// Sets the **recording time** (in microseconds) for this input.
643 ///
644 /// FFmpeg will only read for the specified duration, ignoring data past this
645 /// limit. This can be used to trim or limit how much of the input is processed.
646 ///
647 /// # Parameters
648 /// - `recording_time_us`: The number of microseconds to read from the input.
649 ///
650 /// # Returns
651 /// * `Self` - allowing method chaining.
652 ///
653 /// # Example
654 /// ```rust
655 /// let input = Input::from("long_clip.mp4")
656 /// .set_recording_time_us(5_000_000); // Only read 5 seconds
657 /// ```
658 pub fn set_recording_time_us(mut self, recording_time_us: i64) -> Self {
659 self.recording_time_us = Some(recording_time_us);
660 self
661 }
662
663 /// Sets a **stop time** (in microseconds) beyond which input data will be ignored.
664 ///
665 /// This is similar to [`set_recording_time_us`](Self::set_recording_time_us) but
666 /// specifically references an absolute timestamp in the stream. Once this timestamp
667 /// is reached, FFmpeg stops reading.
668 ///
669 /// # Parameters
670 /// - `stop_time_us`: The absolute timestamp (in microseconds) at which to stop reading.
671 ///
672 /// # Returns
673 /// * `Self` - allowing method chaining.
674 ///
675 /// # Example
676 /// ```rust
677 /// let input = Input::from("long_clip.mp4")
678 /// .set_stop_time_us(10_000_000); // Stop reading at 10 seconds
679 /// ```
680 pub fn set_stop_time_us(mut self, stop_time_us: i64) -> Self {
681 self.stop_time_us = Some(stop_time_us);
682 self
683 }
684
685 /// Sets the number of **loops** to perform on this input stream.
686 ///
687 /// If FFmpeg reaches the end of the input, it can loop back and start from the
688 /// beginning, effectively repeating the content `stream_loop` times.
689 /// A negative value may indicate infinite looping (depending on FFmpeg’s actual behavior).
690 ///
691 /// # Parameters
692 /// - `count`: How many times to loop (e.g. `1` means one loop, `-1` might mean infinite).
693 ///
694 /// # Returns
695 /// * `Self` - allowing method chaining.
696 ///
697 /// # Example
698 /// ```rust
699 /// let input = Input::from("music.mp3")
700 /// .set_stream_loop(2); // play the input 2 extra times
701 /// ```
702 pub fn set_stream_loop(mut self, count: i32) -> Self {
703 self.stream_loop = Some(count);
704 self
705 }
706
707 /// Specifies a **hardware acceleration** name for decoding this input.
708 ///
709 /// Common values might include `"cuda"`, `"vaapi"`, `"dxva2"`, `"videotoolbox"`, etc.
710 /// Whether it works depends on your FFmpeg build and the hardware you have available.
711 ///
712 /// # Parameters
713 /// - `hwaccel_name`: A string naming the hardware accel to use.
714 ///
715 /// # Returns
716 /// * `Self` - allowing method chaining.
717 ///
718 /// # Example
719 /// ```rust
720 /// let input = Input::from("video.mp4")
721 /// .set_hwaccel("cuda");
722 /// ```
723 pub fn set_hwaccel(mut self, hwaccel_name: impl Into<String>) -> Self {
724 self.hwaccel = Some(hwaccel_name.into());
725 self
726 }
727
728 /// Selects a **hardware acceleration device** for decoding.
729 ///
730 /// For example, if you have multiple GPUs or want to specify a device node (like
731 /// `"/dev/dri/renderD128"` on Linux for VAAPI), you can pass it here. This option
732 /// must match the hardware accel you set via [`set_hwaccel`](Self::set_hwaccel) if
733 /// you expect decoding to succeed.
734 ///
735 /// # Parameters
736 /// - `device`: A string indicating the device path or identifier.
737 ///
738 /// # Returns
739 /// * `Self` - allowing method chaining.
740 ///
741 /// # Example
742 /// ```rust
743 /// let input = Input::from("video.mp4")
744 /// .set_hwaccel("vaapi")
745 /// .set_hwaccel_device("/dev/dri/renderD128");
746 /// ```
747 pub fn set_hwaccel_device(mut self, device: impl Into<String>) -> Self {
748 self.hwaccel_device = Some(device.into());
749 self
750 }
751
752 /// Sets the **output pixel format** to be used with hardware-accelerated decoding.
753 ///
754 /// Certain hardware decoders can produce various output pixel formats. This option
755 /// lets you specify which format (e.g., `"nv12"`, `"vaapi"`, etc.) is used during
756 /// the decode process.
757 /// Must be compatible with the chosen hardware accel and device.
758 ///
759 /// # Parameters
760 /// - `format`: A string naming the desired output pixel format (e.g. `"nv12"`).
761 ///
762 /// # Returns
763 /// * `Self` - allowing method chaining.
764 ///
765 /// # Example
766 /// ```rust
767 /// let input = Input::from("video.mp4")
768 /// .set_hwaccel("cuda")
769 /// .set_hwaccel_output_format("cuda");
770 /// ```
771 pub fn set_hwaccel_output_format(mut self, format: impl Into<String>) -> Self {
772 self.hwaccel_output_format = Some(format.into());
773 self
774 }
775
776 /// Sets a single input option for avformat_open_input.
777 ///
778 /// This method configures options that will be passed to FFmpeg's `avformat_open_input()`
779 /// function. The options can control behavior at different levels including format detection,
780 /// protocol handling, device configuration, and general input processing.
781 ///
782 /// **Example Usage:**
783 /// ```rust
784 /// let input = Input::new("avfoundation:0")
785 /// .set_input_opt("framerate", "30")
786 /// .set_input_opt("probesize", "5000000");
787 /// ```
788 ///
789 /// ### Parameters:
790 /// - `key`: The option name (e.g., `"framerate"`, `"probesize"`, `"timeout"`).
791 /// - `value`: The option value (e.g., `"30"`, `"5000000"`, `"10000000"`).
792 ///
793 /// ### Return Value:
794 /// - Returns the modified `Input` instance for method chaining.
795 pub fn set_input_opt(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
796 if let Some(ref mut opts) = self.input_opts {
797 opts.insert(key.into(), value.into());
798 } else {
799 let mut opts = HashMap::new();
800 opts.insert(key.into(), value.into());
801 self.input_opts = Some(opts);
802 }
803 self
804 }
805
806 /// Sets multiple input options at once for avformat_open_input.
807 ///
808 /// This method allows setting multiple options in a single call, which will all be
809 /// passed to FFmpeg's `avformat_open_input()` function. Each key-value pair will be
810 /// inserted into the options map, overwriting any existing keys with the same name.
811 ///
812 /// **Example Usage:**
813 /// ```rust
814 /// let input = Input::new("http://example.com/stream.m3u8")
815 /// .set_input_opts(vec![
816 /// ("user_agent", "MyApp/1.0"),
817 /// ("timeout", "10000000"),
818 /// ("probesize", "5000000"),
819 /// ]);
820 /// ```
821 ///
822 /// ### Parameters:
823 /// - `opts`: A vector of key-value pairs representing input options.
824 ///
825 /// ### Return Value:
826 /// - Returns the modified `Input` instance for method chaining.
827 pub fn set_input_opts(mut self, opts: Vec<(impl Into<String>, impl Into<String>)>) -> Self {
828 if let Some(ref mut input_opts) = self.input_opts {
829 for (key, value) in opts {
830 input_opts.insert(key.into(), value.into());
831 }
832 } else {
833 let mut input_opts = HashMap::new();
834 for (key, value) in opts {
835 input_opts.insert(key.into(), value.into());
836 }
837 self.input_opts = Some(input_opts);
838 }
839 self
840 }
841
842}
843
844impl From<Box<dyn FnMut(&mut [u8]) -> i32>> for Input {
845 fn from(read_callback: Box<dyn FnMut(&mut [u8]) -> i32>) -> Self {
846 Self {
847 url: None,
848 read_callback: Some(read_callback),
849 seek_callback: None,
850 frame_pipelines: None,
851 format: None,
852 video_codec: None,
853 audio_codec: None,
854 subtitle_codec: None,
855 exit_on_error: None,
856 readrate: None,
857 start_time_us: None,
858 recording_time_us: None,
859 stop_time_us: None,
860 stream_loop: None,
861 hwaccel: None,
862 hwaccel_device: None,
863 hwaccel_output_format: None,
864 input_opts: None,
865 }
866 }
867}
868
869impl From<String> for Input {
870 fn from(url: String) -> Self {
871 Self {
872 url: Some(url),
873 read_callback: None,
874 seek_callback: None,
875 frame_pipelines: None,
876 format: None,
877 video_codec: None,
878 audio_codec: None,
879 subtitle_codec: None,
880 exit_on_error: None,
881 readrate: None,
882 start_time_us: None,
883 recording_time_us: None,
884 stop_time_us: None,
885 stream_loop: None,
886 hwaccel: None,
887 hwaccel_device: None,
888 hwaccel_output_format: None,
889 input_opts: None,
890 }
891 }
892}
893
894impl From<&str> for Input {
895 fn from(url: &str) -> Self {
896 Self::from(String::from(url))
897 }
898}
899
900
901#[cfg(test)]
902mod tests {
903 use crate::core::context::input::Input;
904
905 #[test]
906 fn test_new_by_read_callback() {
907 let data_source = b"example custom data source".to_vec();
908 let input = Input::new_by_read_callback(move |buf| {
909 let len = data_source.len().min(buf.len());
910 buf[..len].copy_from_slice(&data_source[..len]);
911 len as i32 // Return the number of bytes written
912 });
913
914 let data_source2 = b"example custom data source2".to_vec();
915 let input = Input::new_by_read_callback(move |buf2| {
916 let len = data_source2.len().min(buf2.len());
917 buf2[..len].copy_from_slice(&data_source2[..len]);
918 len as i32 // Return the number of bytes written
919 });
920 }
921}