ez_ffmpeg/core/context/input.rs
1use std::collections::HashMap;
2use crate::filter::frame_pipeline::FramePipeline;
3
4unsafe impl Send for Input {}
5
6pub struct Input {
7 /// The URL of the input source.
8 ///
9 /// This specifies the source from which the input stream is obtained. It can be:
10 /// - A local file path (e.g., `file:///path/to/video.mp4`).
11 /// - A network stream (e.g., `rtmp://example.com/live/stream`).
12 /// - Any other URL supported by FFmpeg (e.g., `http://example.com/video.mp4`, `udp://...`).
13 ///
14 /// The URL must be valid. If the URL is invalid or unsupported,
15 /// the library will return an error when attempting to open the input stream.
16 pub(crate) url: Option<String>,
17
18 /// A callback function for custom data reading.
19 ///
20 /// The `read_callback` function allows you to provide custom logic for feeding data into
21 /// the input stream. This is useful for scenarios where the input does not come directly
22 /// from a standard source (like a file or URL), but instead from a custom data source,
23 /// such as an in-memory buffer or a custom network stream.
24 ///
25 /// ### Parameters:
26 /// - `buf: &mut [u8]`: A mutable buffer into which the data should be written.
27 /// The callback should fill this buffer with as much data as possible, up to its length.
28 ///
29 /// ### Return Value:
30 /// - **Positive Value**: The number of bytes successfully read into `buf`.
31 /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the input stream. No more data will be read.
32 /// - **Negative Value**: Indicates an error occurred, such as:
33 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
34 /// - Custom-defined error codes depending on your implementation.
35 ///
36 /// ### Example:
37 /// ```rust
38 /// fn custom_read_callback(buf: &mut [u8]) -> i32 {
39 /// let data = b"example data stream";
40 /// let len = data.len().min(buf.len());
41 /// buf[..len].copy_from_slice(&data[..len]);
42 /// len as i32 // Return the number of bytes written into the buffer
43 /// }
44 /// ```
45 pub(crate) read_callback: Option<Box<dyn FnMut(&mut [u8]) -> i32>>,
46
47 /// A callback function for custom seeking within the input stream.
48 ///
49 /// The `seek_callback` function allows defining custom seeking behavior.
50 /// This is useful for data sources that support seeking, such as files or memory-mapped data.
51 /// For non-seekable streams (e.g., live network streams), this function may return an error.
52 ///
53 /// **FFmpeg may invoke `seek_callback` from multiple threads, so thread safety is required.**
54 /// When using a `File` as an input source, **use `Arc<Mutex<File>>` to ensure safe access.**
55 ///
56 /// ### Parameters:
57 /// - `offset: i64`: The target position in the stream for seeking.
58 /// - `whence: i32`: The seek mode defining how the `offset` should be interpreted:
59 /// - `ffmpeg_sys_next::SEEK_SET` (0): Seek to an absolute position.
60 /// - `ffmpeg_sys_next::SEEK_CUR` (1): Seek relative to the current position.
61 /// - `ffmpeg_sys_next::SEEK_END` (2): Seek relative to the end of the stream.
62 /// - `ffmpeg_sys_next::SEEK_HOLE` (3): Find the next file hole (sparse file support).
63 /// - `ffmpeg_sys_next::SEEK_DATA` (4): Find the next data block (sparse file support).
64 /// - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2): Seek using **byte offsets** instead of timestamps.
65 /// - `ffmpeg_sys_next::AVSEEK_SIZE` (65536): Query the **total size** of the stream.
66 /// - `ffmpeg_sys_next::AVSEEK_FORCE` (131072): **Force seeking even if normally restricted.**
67 ///
68 /// ### Return Value:
69 /// - **Positive Value**: The new offset position after seeking.
70 /// - **Negative Value**: An error occurred. Common errors include:
71 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
72 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
73 ///
74 /// ### Example (Handling multi-threaded access safely with `Arc<Mutex<File>>`):
75 /// Since FFmpeg may call `read_callback` and `seek_callback` from different threads,
76 /// **`Arc<Mutex<File>>` is used to ensure safe access across threads.**
77 ///
78 /// ```rust
79 /// use std::fs::File;
80 /// use std::io::{Seek, SeekFrom};
81 /// use std::sync::{Arc, Mutex};
82 ///
83 /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
84 ///
85 /// let seek_callback = {
86 /// let file = Arc::clone(&file);
87 /// Box::new(move |offset: i64, whence: i32| -> i64 {
88 /// let mut file = file.lock().unwrap(); // Acquire lock
89 ///
90 /// // ✅ Handle AVSEEK_SIZE: Return total file size
91 /// if whence == ffmpeg_sys_next::AVSEEK_SIZE {
92 /// if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
93 /// println!("FFmpeg requested stream size: {}", size);
94 /// return size;
95 /// }
96 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
97 /// }
98 ///
99 /// // ✅ Handle AVSEEK_FORCE: Ignore this flag when processing seek
100 /// let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
101 ///
102 /// // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
103 /// if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
104 /// println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
105 /// if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
106 /// return new_pos as i64;
107 /// }
108 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
109 /// }
110 ///
111 /// // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
112 /// #[cfg(target_os = "linux")]
113 /// if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
114 /// println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
115 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
116 /// }
117 /// #[cfg(target_os = "linux")]
118 /// if actual_whence == ffmpeg_sys_next::SEEK_DATA {
119 /// println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
120 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
121 /// }
122 ///
123 /// // ✅ Standard seek modes
124 /// let seek_result = match actual_whence {
125 /// ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
126 /// ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
127 /// ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
128 /// _ => {
129 /// println!("Unsupported seek mode: {}", whence);
130 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
131 /// }
132 /// };
133 ///
134 /// match seek_result {
135 /// Ok(new_pos) => {
136 /// println!("Seek successful, new position: {}", new_pos);
137 /// new_pos as i64
138 /// }
139 /// Err(e) => {
140 /// println!("Seek failed: {}", e);
141 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
142 /// }
143 /// }
144 /// })
145 /// };
146 /// ```
147 pub(crate) seek_callback: Option<Box<dyn FnMut(i64, i32) -> i64>>,
148
149 /// The pipeline that provides custom processing for decoded frames.
150 ///
151 /// After the input data is decoded into `Frame` objects, these frames
152 /// are passed through the `frame_pipeline`. Each frame goes through
153 /// a series of `FrameFilter` objects in the pipeline, allowing for
154 /// customized processing (e.g., filtering, transformation, etc.).
155 ///
156 /// If `None`, no processing pipeline is applied to the decoded frames.
157 pub(crate) frame_pipelines: Option<Vec<FramePipeline>>,
158
159 /// The input format for the source.
160 ///
161 /// This field specifies which container or device format FFmpeg should use to read the input.
162 /// If `None`, FFmpeg will attempt to automatically detect the format based on the source URL,
163 /// file extension, or stream data.
164 ///
165 /// You might need to specify a format explicitly in cases where automatic detection fails or
166 /// when you must force a particular format. For example:
167 /// - When capturing from a specific device on macOS (using `avfoundation`).
168 /// - When capturing on Windows devices (using `dshow`).
169 /// - When dealing with raw streams or unusual data sources.
170 pub(crate) format: Option<String>,
171
172 /// The codec to be used for **video** decoding.
173 ///
174 /// If set, this forces FFmpeg to use the specified video codec for decoding.
175 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
176 pub(crate) video_codec: Option<String>,
177
178 /// The codec to be used for **audio** decoding.
179 ///
180 /// If set, this forces FFmpeg to use the specified audio codec for decoding.
181 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
182 pub(crate) audio_codec: Option<String>,
183
184 /// The codec to be used for **subtitle** decoding.
185 ///
186 /// If set, this forces FFmpeg to use the specified subtitle codec for decoding.
187 /// Otherwise, FFmpeg will attempt to auto-detect the best available codec.
188 pub(crate) subtitle_codec: Option<String>,
189
190 pub(crate) exit_on_error: Option<bool>,
191
192 /// read input at specified rate.
193 /// when set 1. read input at native frame rate.
194 pub(crate) readrate: Option<f32>,
195 pub(crate) start_time_us: Option<i64>,
196 pub(crate) recording_time_us: Option<i64>,
197 pub(crate) stop_time_us: Option<i64>,
198
199 /// set number of times input stream shall be looped
200 pub(crate) stream_loop: Option<i32>,
201
202 /// Hardware Acceleration name
203 /// use Hardware accelerated decoding
204 pub(crate) hwaccel: Option<String>,
205 /// select a device for HW acceleration
206 pub(crate) hwaccel_device: Option<String>,
207 /// select output format used with HW accelerated decoding
208 pub(crate) hwaccel_output_format: Option<String>,
209
210 /// The input format options for the demuxer.
211 ///
212 /// This field stores additional format-specific options that are passed to the FFmpeg demuxer.
213 /// It is a collection of key-value pairs that can modify the behavior of the input format.
214 ///
215 /// **Common examples** might include:
216 /// - `framerate=30` (for device inputs like `avfoundation`).
217 /// - `probesize` or `analyzeduration` (for adjusting how FFmpeg probes input data).
218 ///
219 /// These options are used when initializing the FFmpeg input format, allowing you to
220 /// fine-tune or override default demuxer behavior.
221 pub(crate) format_opts: Option<HashMap<String, String>>,
222}
223
224impl Input {
225 pub fn new(url: impl Into<String>) -> Self {
226 url.into().into()
227 }
228
229 /// Creates a new `Input` instance with a custom read callback.
230 ///
231 /// This method initializes an `Input` object that uses a provided `read_callback` function
232 /// to supply data to the input stream. This is particularly useful for custom data sources
233 /// such as in-memory buffers, network streams, or other non-standard input mechanisms.
234 ///
235 /// ### Parameters:
236 /// - `read_callback: fn(buf: &mut [u8]) -> i32`: A function pointer that fills the provided
237 /// mutable buffer with data and returns the number of bytes read.
238 ///
239 /// ### Return Value:
240 /// - Returns a new `Input` instance configured with the specified `read_callback`.
241 ///
242 /// ### Behavior of `read_callback`:
243 /// - **Positive Value**: Indicates the number of bytes successfully read.
244 /// - **`ffmpeg_sys_next::AVERROR_EOF`**: Indicates the end of the stream. The library will stop requesting data.
245 /// - **Negative Value**: Indicates an error occurred. For example:
246 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: Represents an input/output error.
247 /// - Other custom-defined error codes can also be returned to signal specific issues.
248 ///
249 /// ### Example:
250 /// ```rust
251 /// let input = Input::new_by_read_callback(move |buf| {
252 /// let data = b"example custom data source";
253 /// let len = data.len().min(buf.len());
254 /// buf[..len].copy_from_slice(&data[..len]);
255 /// len as i32 // Return the number of bytes written
256 /// });
257 /// ```
258 pub fn new_by_read_callback<F>(read_callback: F) -> Self
259 where
260 F: FnMut(&mut [u8]) -> i32 + 'static,
261 {
262 (Box::new(read_callback) as Box<dyn FnMut(&mut [u8]) -> i32>).into()
263 }
264
265 /// Sets a custom seek callback for the input stream.
266 ///
267 /// This function assigns a user-defined function that handles seeking within the input stream.
268 /// It is required when using custom data sources that support random access, such as files,
269 /// memory-mapped buffers, or seekable network streams.
270 ///
271 /// **FFmpeg may invoke `seek_callback` from different threads.**
272 /// If using a `File` as the data source, **wrap it in `Arc<Mutex<File>>`** to ensure
273 /// thread-safe access across multiple threads.
274 ///
275 /// ### Parameters:
276 /// - `seek_callback: FnMut(i64, i32) -> i64`: A function that handles seek operations.
277 /// - `offset: i64`: The target seek position in the stream.
278 /// - `whence: i32`: The seek mode, which determines how `offset` should be interpreted:
279 /// - `ffmpeg_sys_next::SEEK_SET` (0) - Seek to an absolute position.
280 /// - `ffmpeg_sys_next::SEEK_CUR` (1) - Seek relative to the current position.
281 /// - `ffmpeg_sys_next::SEEK_END` (2) - Seek relative to the end of the stream.
282 /// - `ffmpeg_sys_next::SEEK_HOLE` (3) - Find the next hole in a sparse file (Linux only).
283 /// - `ffmpeg_sys_next::SEEK_DATA` (4) - Find the next data block in a sparse file (Linux only).
284 /// - `ffmpeg_sys_next::AVSEEK_FLAG_BYTE` (2) - Seek using byte offset instead of timestamps.
285 /// - `ffmpeg_sys_next::AVSEEK_SIZE` (65536) - Query the total size of the stream.
286 /// - `ffmpeg_sys_next::AVSEEK_FORCE` (131072) - Force seeking, even if normally restricted.
287 ///
288 /// ### Return Value:
289 /// - Returns `Self`, allowing for method chaining.
290 ///
291 /// ### Behavior of `seek_callback`:
292 /// - **Positive Value**: The new offset position after seeking.
293 /// - **Negative Value**: An error occurred, such as:
294 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE)`: Seek is not supported.
295 /// - `ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)`: General I/O error.
296 ///
297 /// ### Example (Thread-safe seek callback using `Arc<Mutex<File>>`):
298 /// Since `FFmpeg` may call `read_callback` and `seek_callback` from different threads,
299 /// **use `Arc<Mutex<File>>` to ensure safe concurrent access.**
300 ///
301 /// ```rust
302 /// use std::fs::File;
303 /// use std::io::{Read, Seek, SeekFrom};
304 /// use std::sync::{Arc, Mutex};
305 ///
306 /// // ✅ Wrap the file in Arc<Mutex<>> for safe shared access
307 /// let file = Arc::new(Mutex::new(File::open("test.mp4").expect("Failed to open file")));
308 ///
309 /// // ✅ Thread-safe read callback
310 /// let read_callback = {
311 /// let file = Arc::clone(&file);
312 /// move |buf: &mut [u8]| -> i32 {
313 /// let mut file = file.lock().unwrap();
314 /// match file.read(buf) {
315 /// Ok(0) => {
316 /// println!("Read EOF");
317 /// ffmpeg_sys_next::AVERROR_EOF
318 /// }
319 /// Ok(bytes_read) => bytes_read as i32,
320 /// Err(e) => {
321 /// println!("Read error: {}", e);
322 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO)
323 /// }
324 /// }
325 /// }
326 /// };
327 ///
328 /// // ✅ Thread-safe seek callback
329 /// let seek_callback = {
330 /// let file = Arc::clone(&file);
331 /// Box::new(move |offset: i64, whence: i32| -> i64 {
332 /// let mut file = file.lock().unwrap();
333 ///
334 /// // ✅ Handle AVSEEK_SIZE: Return total file size
335 /// if whence == ffmpeg_sys_next::AVSEEK_SIZE {
336 /// if let Ok(size) = file.metadata().map(|m| m.len() as i64) {
337 /// println!("FFmpeg requested stream size: {}", size);
338 /// return size;
339 /// }
340 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
341 /// }
342 ///
343 /// // ✅ Ignore AVSEEK_FORCE flag
344 /// let actual_whence = whence & !ffmpeg_sys_next::AVSEEK_FORCE;
345 ///
346 /// // ✅ Handle AVSEEK_FLAG_BYTE: Perform byte-based seek
347 /// if actual_whence & ffmpeg_sys_next::AVSEEK_FLAG_BYTE != 0 {
348 /// println!("FFmpeg requested byte-based seeking. Seeking to byte offset: {}", offset);
349 /// if let Ok(new_pos) = file.seek(SeekFrom::Start(offset as u64)) {
350 /// return new_pos as i64;
351 /// }
352 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64;
353 /// }
354 ///
355 /// // ✅ Handle SEEK_HOLE and SEEK_DATA (Linux only)
356 /// #[cfg(target_os = "linux")]
357 /// if actual_whence == ffmpeg_sys_next::SEEK_HOLE {
358 /// println!("FFmpeg requested SEEK_HOLE, but Rust std::fs does not support it.");
359 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
360 /// }
361 /// #[cfg(target_os = "linux")]
362 /// if actual_whence == ffmpeg_sys_next::SEEK_DATA {
363 /// println!("FFmpeg requested SEEK_DATA, but Rust std::fs does not support it.");
364 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
365 /// }
366 ///
367 /// // ✅ Standard seek modes
368 /// let seek_result = match actual_whence {
369 /// ffmpeg_sys_next::SEEK_SET => file.seek(SeekFrom::Start(offset as u64)),
370 /// ffmpeg_sys_next::SEEK_CUR => file.seek(SeekFrom::Current(offset)),
371 /// ffmpeg_sys_next::SEEK_END => file.seek(SeekFrom::End(offset)),
372 /// _ => {
373 /// println!("Unsupported seek mode: {}", whence);
374 /// return ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::ESPIPE) as i64;
375 /// }
376 /// };
377 ///
378 /// match seek_result {
379 /// Ok(new_pos) => {
380 /// println!("Seek successful, new position: {}", new_pos);
381 /// new_pos as i64
382 /// }
383 /// Err(e) => {
384 /// println!("Seek failed: {}", e);
385 /// ffmpeg_sys_next::AVERROR(ffmpeg_sys_next::EIO) as i64
386 /// }
387 /// }
388 /// })
389 /// };
390 ///
391 /// let input = Input::new_by_read_callback(read_callback).set_seek_callback(seek_callback);
392 /// ```
393 pub fn set_seek_callback<F>(mut self, seek_callback: F) -> Self
394 where
395 F: FnMut(i64, i32) -> i64 + 'static,
396 {
397 self.seek_callback = Some(Box::new(seek_callback) as Box<dyn FnMut(i64, i32) -> i64>);
398 self
399 }
400
401 /// Replaces the entire frame-processing pipeline with a new sequence
402 /// of transformations for **post-decoding** frames on this `Input`.
403 ///
404 /// This method clears any previously set pipelines and replaces them with the provided list.
405 ///
406 /// # Parameters
407 /// * `frame_pipelines` - A list of [`FramePipeline`] instances defining the
408 /// transformations to apply to decoded frames.
409 ///
410 /// # Returns
411 /// * `Self` - Returns the modified `Input`, enabling method chaining.
412 ///
413 /// # Example
414 /// ```rust
415 /// let input = Input::from("my_video.mp4")
416 /// .set_frame_pipelines(vec![
417 /// FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)),
418 /// // Additional pipelines...
419 /// ]);
420 /// ```
421 pub fn set_frame_pipelines(mut self, frame_pipelines: Vec<impl Into<FramePipeline>>) -> Self {
422 self.frame_pipelines = Some(frame_pipelines.into_iter().map(|frame_pipeline| frame_pipeline.into()).collect());
423 self
424 }
425
426 /// Adds a single [`FramePipeline`] to the existing pipeline list.
427 ///
428 /// If no pipelines are currently defined, this method creates a new pipeline list.
429 /// Otherwise, it appends the provided pipeline to the existing transformations.
430 ///
431 /// # Parameters
432 /// * `frame_pipeline` - A [`FramePipeline`] defining a transformation.
433 ///
434 /// # Returns
435 /// * `Self` - Returns the modified `Input`, enabling method chaining.
436 ///
437 /// # Example
438 /// ```rust
439 /// let input = Input::from("my_video.mp4")
440 /// .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_VIDEO).filter("opengl", Box::new(my_filter)).build())
441 /// .add_frame_pipeline(FramePipelineBuilder::new(AVMediaType::AVMEDIA_TYPE_AUDIO).filter("my_custom_filter1", Box::new(...)).filter("my_custom_filter2", Box::new(...)).build());
442 /// ```
443 pub fn add_frame_pipeline(mut self, frame_pipeline: impl Into<FramePipeline>) -> Self {
444 if self.frame_pipelines.is_none() {
445 self.frame_pipelines = Some(vec![frame_pipeline.into()]);
446 } else {
447 self.frame_pipelines
448 .as_mut()
449 .unwrap()
450 .push(frame_pipeline.into());
451 }
452 self
453 }
454
455 /// Sets the input format for the container or device.
456 ///
457 /// By default, if no format is specified,
458 /// FFmpeg will attempt to detect the format automatically. However, certain
459 /// use cases require specifying the format explicitly:
460 /// - Using device-specific inputs (e.g., `avfoundation` on macOS, `dshow` on Windows).
461 /// - Handling raw streams or formats that FFmpeg may not detect automatically.
462 ///
463 /// ### Parameters:
464 /// - `format`: A string specifying the desired input format (e.g., `mp4`, `flv`, `avfoundation`).
465 ///
466 /// ### Return Value:
467 /// - Returns the `Input` instance with the newly set format.
468 pub fn set_format(mut self, format: impl Into<String>) -> Self {
469 self.format = Some(format.into());
470 self
471 }
472
473 /// Sets the **video codec** to be used for decoding.
474 ///
475 /// By default, FFmpeg will automatically select an appropriate video codec
476 /// based on the input format and available decoders. However, this method
477 /// allows you to override that selection and force a specific codec.
478 ///
479 /// # Common Video Codecs:
480 /// | Codec | Description |
481 /// |-------|-------------|
482 /// | `h264` | H.264 (AVC), widely supported and efficient |
483 /// | `hevc` | H.265 (HEVC), better compression at higher complexity |
484 /// | `vp9` | VP9, open-source alternative to H.265 |
485 /// | `av1` | AV1, newer open-source codec with improved compression |
486 /// | `mpeg4` | MPEG-4 Part 2, older but still used in some cases |
487 ///
488 /// # Arguments
489 /// * `video_codec` - A string representing the desired video codec (e.g., `"h264"`, `"hevc"`).
490 ///
491 /// # Returns
492 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
493 ///
494 /// # Example:
495 /// ```rust
496 /// let input = Input::from("video.mp4").set_video_codec("h264");
497 /// ```
498 pub fn set_video_codec(mut self, video_codec: impl Into<String>) -> Self {
499 self.video_codec = Some(video_codec.into());
500 self
501 }
502
503 /// Sets the **audio codec** to be used for decoding.
504 ///
505 /// By default, FFmpeg will automatically select an appropriate audio codec
506 /// based on the input format and available decoders. However, this method
507 /// allows you to specify a preferred codec.
508 ///
509 /// # Common Audio Codecs:
510 /// | Codec | Description |
511 /// |-------|-------------|
512 /// | `aac` | AAC, commonly used for MP4 and streaming |
513 /// | `mp3` | MP3, widely supported but lower efficiency |
514 /// | `opus` | Opus, high-quality open-source codec |
515 /// | `vorbis` | Vorbis, used in Ogg containers |
516 /// | `flac` | FLAC, lossless audio format |
517 ///
518 /// # Arguments
519 /// * `audio_codec` - A string representing the desired audio codec (e.g., `"aac"`, `"mp3"`).
520 ///
521 /// # Returns
522 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
523 ///
524 /// # Example:
525 /// ```rust
526 /// let input = Input::from("audio.mp3").set_audio_codec("aac");
527 /// ```
528 pub fn set_audio_codec(mut self, audio_codec: impl Into<String>) -> Self {
529 self.audio_codec = Some(audio_codec.into());
530 self
531 }
532
533 /// Sets the **subtitle codec** to be used for decoding.
534 ///
535 /// By default, FFmpeg will automatically select an appropriate subtitle codec
536 /// based on the input format and available decoders. This method lets you specify
537 /// a particular subtitle codec.
538 ///
539 /// # Common Subtitle Codecs:
540 /// | Codec | Description |
541 /// |-------|-------------|
542 /// | `ass` | Advanced SubStation Alpha (ASS) subtitles |
543 /// | `srt` | SubRip Subtitle format (SRT) |
544 /// | `mov_text` | Subtitles in MP4 containers |
545 /// | `subrip` | Plain-text subtitle format |
546 ///
547 /// # Arguments
548 /// * `subtitle_codec` - A string representing the desired subtitle codec (e.g., `"mov_text"`, `"ass"`, `"srt"`).
549 ///
550 /// # Returns
551 /// * `Self` - Returns the modified `Input` struct, allowing for method chaining.
552 ///
553 /// # Example:
554 /// ```rust
555 /// let input = Input::from("movie.mkv").set_subtitle_codec("ass");
556 /// ```
557 pub fn set_subtitle_codec(mut self, subtitle_codec: impl Into<String>) -> Self {
558 self.subtitle_codec = Some(subtitle_codec.into());
559 self
560 }
561
562 /// Enables or disables **exit on error** behavior for the input.
563 ///
564 /// If set to `true`, FFmpeg will exit (stop processing) if it encounters any
565 /// decoding or demuxing error on this input. If set to `false` (the default),
566 /// FFmpeg may attempt to continue despite errors, skipping damaged portions.
567 ///
568 /// # Parameters
569 /// - `exit_on_error`: `true` to stop on errors, `false` to keep going.
570 ///
571 /// # Returns
572 /// * `Self` - allowing method chaining.
573 ///
574 /// # Example
575 /// ```rust
576 /// let input = Input::from("test.mp4")
577 /// .set_exit_on_error(true);
578 /// ```
579 pub fn set_exit_on_error(mut self, exit_on_error: bool) -> Self {
580 self.exit_on_error = Some(exit_on_error);
581 self
582 }
583
584 /// Sets a **read rate** for this input, controlling how quickly frames are read.
585 ///
586 /// - If set to `1.0`, frames are read at their native frame rate.
587 /// - If set to another value (e.g., `0.5` or `2.0`), FFmpeg may attempt to read
588 /// slower or faster, simulating changes in real-time playback speed.
589 ///
590 /// # Parameters
591 /// - `rate`: A floating-point value indicating the read rate multiplier.
592 ///
593 /// # Returns
594 /// * `Self` - allowing method chaining.
595 ///
596 /// # Example
597 /// ```rust
598 /// let input = Input::from("video.mp4")
599 /// .set_readrate(0.5); // read at half speed
600 /// ```
601 pub fn set_readrate(mut self, rate: f32) -> Self {
602 self.readrate = Some(rate);
603 self
604 }
605
606 /// Sets the **start time** (in microseconds) from which to begin reading.
607 ///
608 /// FFmpeg will skip all data before this timestamp. This can be used to
609 /// implement “input seeking” or to only process a portion of the input.
610 ///
611 /// # Parameters
612 /// - `start_time_us`: The timestamp (in microseconds) at which to start reading.
613 ///
614 /// # Returns
615 /// * `Self` - allowing method chaining.
616 ///
617 /// # Example
618 /// ```rust
619 /// let input = Input::from("long_clip.mp4")
620 /// .set_start_time_us(2_000_000); // Start at 2 seconds
621 /// ```
622 pub fn set_start_time_us(mut self, start_time_us: i64) -> Self {
623 self.start_time_us = Some(start_time_us);
624 self
625 }
626
627 /// Sets the **recording time** (in microseconds) for this input.
628 ///
629 /// FFmpeg will only read for the specified duration, ignoring data past this
630 /// limit. This can be used to trim or limit how much of the input is processed.
631 ///
632 /// # Parameters
633 /// - `recording_time_us`: The number of microseconds to read from the input.
634 ///
635 /// # Returns
636 /// * `Self` - allowing method chaining.
637 ///
638 /// # Example
639 /// ```rust
640 /// let input = Input::from("long_clip.mp4")
641 /// .set_recording_time_us(5_000_000); // Only read 5 seconds
642 /// ```
643 pub fn set_recording_time_us(mut self, recording_time_us: i64) -> Self {
644 self.recording_time_us = Some(recording_time_us);
645 self
646 }
647
648 /// Sets a **stop time** (in microseconds) beyond which input data will be ignored.
649 ///
650 /// This is similar to [`set_recording_time_us`](Self::set_recording_time_us) but
651 /// specifically references an absolute timestamp in the stream. Once this timestamp
652 /// is reached, FFmpeg stops reading.
653 ///
654 /// # Parameters
655 /// - `stop_time_us`: The absolute timestamp (in microseconds) at which to stop reading.
656 ///
657 /// # Returns
658 /// * `Self` - allowing method chaining.
659 ///
660 /// # Example
661 /// ```rust
662 /// let input = Input::from("long_clip.mp4")
663 /// .set_stop_time_us(10_000_000); // Stop reading at 10 seconds
664 /// ```
665 pub fn set_stop_time_us(mut self, stop_time_us: i64) -> Self {
666 self.stop_time_us = Some(stop_time_us);
667 self
668 }
669
670 /// Sets the number of **loops** to perform on this input stream.
671 ///
672 /// If FFmpeg reaches the end of the input, it can loop back and start from the
673 /// beginning, effectively repeating the content `stream_loop` times.
674 /// A negative value may indicate infinite looping (depending on FFmpeg’s actual behavior).
675 ///
676 /// # Parameters
677 /// - `count`: How many times to loop (e.g. `1` means one loop, `-1` might mean infinite).
678 ///
679 /// # Returns
680 /// * `Self` - allowing method chaining.
681 ///
682 /// # Example
683 /// ```rust
684 /// let input = Input::from("music.mp3")
685 /// .set_stream_loop(2); // play the input 2 extra times
686 /// ```
687 pub fn set_stream_loop(mut self, count: i32) -> Self {
688 self.stream_loop = Some(count);
689 self
690 }
691
692 /// Specifies a **hardware acceleration** name for decoding this input.
693 ///
694 /// Common values might include `"cuda"`, `"vaapi"`, `"dxva2"`, `"videotoolbox"`, etc.
695 /// Whether it works depends on your FFmpeg build and the hardware you have available.
696 ///
697 /// # Parameters
698 /// - `hwaccel_name`: A string naming the hardware accel to use.
699 ///
700 /// # Returns
701 /// * `Self` - allowing method chaining.
702 ///
703 /// # Example
704 /// ```rust
705 /// let input = Input::from("video.mp4")
706 /// .set_hwaccel("cuda");
707 /// ```
708 pub fn set_hwaccel(mut self, hwaccel_name: impl Into<String>) -> Self {
709 self.hwaccel = Some(hwaccel_name.into());
710 self
711 }
712
713 /// Selects a **hardware acceleration device** for decoding.
714 ///
715 /// For example, if you have multiple GPUs or want to specify a device node (like
716 /// `"/dev/dri/renderD128"` on Linux for VAAPI), you can pass it here. This option
717 /// must match the hardware accel you set via [`set_hwaccel`](Self::set_hwaccel) if
718 /// you expect decoding to succeed.
719 ///
720 /// # Parameters
721 /// - `device`: A string indicating the device path or identifier.
722 ///
723 /// # Returns
724 /// * `Self` - allowing method chaining.
725 ///
726 /// # Example
727 /// ```rust
728 /// let input = Input::from("video.mp4")
729 /// .set_hwaccel("vaapi")
730 /// .set_hwaccel_device("/dev/dri/renderD128");
731 /// ```
732 pub fn set_hwaccel_device(mut self, device: impl Into<String>) -> Self {
733 self.hwaccel_device = Some(device.into());
734 self
735 }
736
737 /// Sets the **output pixel format** to be used with hardware-accelerated decoding.
738 ///
739 /// Certain hardware decoders can produce various output pixel formats. This option
740 /// lets you specify which format (e.g., `"nv12"`, `"vaapi"`, etc.) is used during
741 /// the decode process.
742 /// Must be compatible with the chosen hardware accel and device.
743 ///
744 /// # Parameters
745 /// - `format`: A string naming the desired output pixel format (e.g. `"nv12"`).
746 ///
747 /// # Returns
748 /// * `Self` - allowing method chaining.
749 ///
750 /// # Example
751 /// ```rust
752 /// let input = Input::from("video.mp4")
753 /// .set_hwaccel("cuda")
754 /// .set_hwaccel_output_format("cuda");
755 /// ```
756 pub fn set_hwaccel_output_format(mut self, format: impl Into<String>) -> Self {
757 self.hwaccel_output_format = Some(format.into());
758 self
759 }
760
761 /// Sets a single input format-specific option.
762 ///
763 /// This method allows you to configure a single key-value pair that will be passed
764 /// to the FFmpeg demuxer. If the same key already exists, it will be overwritten.
765 ///
766 /// **Example Usage:**
767 /// ```rust
768 /// let input = Input::new("avfoundation:0")
769 /// .set_format_opt("framerate", "30");
770 /// ```
771 ///
772 /// ### Parameters:
773 /// - `key`: The format option name (e.g., `"framerate"`, `"probesize"`).
774 /// - `value`: The value to set (e.g., `"30"`, `"5000000"`).
775 ///
776 /// ### Return Value:
777 /// - Returns the modified `Input` instance for chaining.
778 pub fn set_format_opt(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
779 if let Some(ref mut opts) = self.format_opts {
780 opts.insert(key.into(), value.into());
781 } else {
782 let mut opts = HashMap::new();
783 opts.insert(key.into(), value.into());
784 self.format_opts = Some(opts);
785 }
786 self
787 }
788
789 /// Sets multiple input format-specific options at once.
790 ///
791 /// This method allows setting multiple key-value pairs in a single call.
792 /// Each provided key-value pair will be inserted into the `format_opts` map,
793 /// overwriting any existing keys with the same name.
794 ///
795 /// **Example Usage:**
796 /// ```rust
797 /// let input = Input::new("avfoundation:0")
798 /// .set_format_opts(vec![
799 /// ("framerate", "30"),
800 /// ("video_size", "1280x720"),
801 /// ]);
802 /// ```
803 ///
804 /// ### Parameters:
805 /// - `opts`: A vector of key-value pairs representing demuxer options.
806 ///
807 /// ### Return Value:
808 /// - Returns the modified `Input` instance for chaining.
809 pub fn set_format_opts(mut self, opts: Vec<(impl Into<String>, impl Into<String>)>) -> Self {
810 if let Some(ref mut format_opts) = self.format_opts {
811 for (key, value) in opts {
812 format_opts.insert(key.into(), value.into());
813 }
814 } else {
815 let mut format_opts = HashMap::new();
816 for (key, value) in opts {
817 format_opts.insert(key.into(), value.into());
818 }
819 self.format_opts = Some(format_opts);
820 }
821 self
822 }
823
824}
825
826impl From<Box<dyn FnMut(&mut [u8]) -> i32>> for Input {
827 fn from(read_callback: Box<dyn FnMut(&mut [u8]) -> i32>) -> Self {
828 Self {
829 url: None,
830 read_callback: Some(read_callback),
831 seek_callback: None,
832 frame_pipelines: None,
833 format: None,
834 video_codec: None,
835 audio_codec: None,
836 subtitle_codec: None,
837 exit_on_error: None,
838 readrate: None,
839 start_time_us: None,
840 recording_time_us: None,
841 stop_time_us: None,
842 stream_loop: None,
843 hwaccel: None,
844 hwaccel_device: None,
845 hwaccel_output_format: None,
846 format_opts: None,
847 }
848 }
849}
850
851impl From<String> for Input {
852 fn from(url: String) -> Self {
853 Self {
854 url: Some(url),
855 read_callback: None,
856 seek_callback: None,
857 frame_pipelines: None,
858 format: None,
859 video_codec: None,
860 audio_codec: None,
861 subtitle_codec: None,
862 exit_on_error: None,
863 readrate: None,
864 start_time_us: None,
865 recording_time_us: None,
866 stop_time_us: None,
867 stream_loop: None,
868 hwaccel: None,
869 hwaccel_device: None,
870 hwaccel_output_format: None,
871 format_opts: None,
872 }
873 }
874}
875
876impl From<&str> for Input {
877 fn from(url: &str) -> Self {
878 Self::from(String::from(url))
879 }
880}
881
882
883#[cfg(test)]
884mod tests {
885 use crate::core::context::input::Input;
886
887 #[test]
888 fn test_new_by_read_callback() {
889 let data_source = b"example custom data source".to_vec();
890 let input = Input::new_by_read_callback(move |buf| {
891 let len = data_source.len().min(buf.len());
892 buf[..len].copy_from_slice(&data_source[..len]);
893 len as i32 // Return the number of bytes written
894 });
895
896 let data_source2 = b"example custom data source2".to_vec();
897 let input = Input::new_by_read_callback(move |buf2| {
898 let len = data_source2.len().min(buf2.len());
899 buf2[..len].copy_from_slice(&data_source2[..len]);
900 len as i32 // Return the number of bytes written
901 });
902 }
903}