ff_decode/video/builder.rs
1//! Video decoder builder for constructing video decoders with custom configuration.
2//!
3//! This module provides the [`VideoDecoderBuilder`] type which enables fluent
4//! configuration of video decoders. Use [`VideoDecoder::open()`] to start building.
5//!
6//! # Examples
7//!
8//! ```ignore
9//! use ff_decode::{VideoDecoder, HardwareAccel};
10//! use ff_format::PixelFormat;
11//!
12//! let decoder = VideoDecoder::open("video.mp4")?
13//! .output_format(PixelFormat::Rgba)
14//! .hardware_accel(HardwareAccel::Auto)
15//! .thread_count(4)
16//! .build()?;
17//! ```
18
19use std::path::{Path, PathBuf};
20use std::sync::Arc;
21use std::time::Duration;
22
23use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
24
25use crate::HardwareAccel;
26use crate::error::DecodeError;
27use crate::video::decoder_inner::VideoDecoderInner;
28use ff_common::FramePool;
29
30/// Internal configuration for the decoder.
31///
32/// NOTE: Fields are currently unused but will be used when `FFmpeg` integration
33/// is implemented in a future issue.
34#[derive(Debug, Clone)]
35#[allow(dead_code)]
36pub(crate) struct VideoDecoderConfig {
37 /// Output pixel format (None = use source format)
38 pub output_format: Option<PixelFormat>,
39 /// Hardware acceleration setting
40 pub hardware_accel: HardwareAccel,
41 /// Number of decoding threads (0 = auto)
42 pub thread_count: usize,
43}
44
45impl Default for VideoDecoderConfig {
46 fn default() -> Self {
47 Self {
48 output_format: None,
49 hardware_accel: HardwareAccel::Auto,
50 thread_count: 0, // Auto-detect
51 }
52 }
53}
54
55/// Builder for configuring and constructing a [`VideoDecoder`].
56///
57/// This struct provides a fluent interface for setting up decoder options
58/// before opening a video file. It is created by calling [`VideoDecoder::open()`].
59///
60/// # Examples
61///
62/// ## Basic Usage
63///
64/// ```ignore
65/// use ff_decode::VideoDecoder;
66///
67/// let decoder = VideoDecoder::open("video.mp4")?
68/// .build()?;
69/// ```
70///
71/// ## With Custom Format
72///
73/// ```ignore
74/// use ff_decode::VideoDecoder;
75/// use ff_format::PixelFormat;
76///
77/// let decoder = VideoDecoder::open("video.mp4")?
78/// .output_format(PixelFormat::Rgba)
79/// .build()?;
80/// ```
81///
82/// ## With Hardware Acceleration
83///
84/// ```ignore
85/// use ff_decode::{VideoDecoder, HardwareAccel};
86///
87/// let decoder = VideoDecoder::open("video.mp4")?
88/// .hardware_accel(HardwareAccel::Nvdec)
89/// .build()?;
90/// ```
91///
92/// ## With Frame Pool
93///
94/// ```ignore
95/// use ff_decode::{VideoDecoder, FramePool};
96/// use std::sync::Arc;
97///
98/// let pool: Arc<dyn FramePool> = create_frame_pool();
99/// let decoder = VideoDecoder::open("video.mp4")?
100/// .frame_pool(pool)
101/// .build()?;
102/// ```
103#[derive(Debug)]
104pub struct VideoDecoderBuilder {
105 /// Path to the media file
106 path: PathBuf,
107 /// Output pixel format (None = use source format)
108 output_format: Option<PixelFormat>,
109 /// Hardware acceleration setting
110 hardware_accel: HardwareAccel,
111 /// Number of decoding threads (0 = auto)
112 thread_count: usize,
113 /// Optional frame pool for memory reuse
114 frame_pool: Option<Arc<dyn FramePool>>,
115}
116
117impl VideoDecoderBuilder {
118 /// Creates a new builder for the specified file path.
119 ///
120 /// This is an internal constructor; use [`VideoDecoder::open()`] instead.
121 pub(crate) fn new(path: PathBuf) -> Self {
122 Self {
123 path,
124 output_format: None,
125 hardware_accel: HardwareAccel::Auto,
126 thread_count: 0,
127 frame_pool: None,
128 }
129 }
130
131 /// Sets the output pixel format for decoded frames.
132 ///
133 /// If not set, frames are returned in the source format. Setting an
134 /// output format enables automatic conversion during decoding.
135 ///
136 /// # Common Formats
137 ///
138 /// - [`PixelFormat::Rgba`] - Best for UI rendering, includes alpha
139 /// - [`PixelFormat::Rgb24`] - RGB without alpha, smaller memory footprint
140 /// - [`PixelFormat::Yuv420p`] - Source format for most H.264/H.265 videos
141 ///
142 /// # Examples
143 ///
144 /// ```ignore
145 /// use ff_decode::VideoDecoder;
146 /// use ff_format::PixelFormat;
147 ///
148 /// let decoder = VideoDecoder::open("video.mp4")?
149 /// .output_format(PixelFormat::Rgba)
150 /// .build()?;
151 /// ```
152 #[must_use]
153 pub fn output_format(mut self, format: PixelFormat) -> Self {
154 self.output_format = Some(format);
155 self
156 }
157
158 /// Sets the hardware acceleration mode.
159 ///
160 /// Hardware acceleration can significantly improve decoding performance,
161 /// especially for high-resolution video (4K and above).
162 ///
163 /// # Available Modes
164 ///
165 /// - [`HardwareAccel::Auto`] - Automatically detect and use available hardware (default)
166 /// - [`HardwareAccel::None`] - Disable hardware acceleration (CPU only)
167 /// - [`HardwareAccel::Nvdec`] - NVIDIA NVDEC (requires NVIDIA GPU)
168 /// - [`HardwareAccel::Qsv`] - Intel Quick Sync Video
169 /// - [`HardwareAccel::Amf`] - AMD Advanced Media Framework
170 /// - [`HardwareAccel::VideoToolbox`] - Apple `VideoToolbox` (macOS/iOS)
171 /// - [`HardwareAccel::Vaapi`] - VA-API (Linux)
172 ///
173 /// # Fallback Behavior
174 ///
175 /// If the requested hardware accelerator is unavailable, the decoder
176 /// will fall back to software decoding unless
177 /// [`DecodeError::HwAccelUnavailable`] is explicitly requested.
178 ///
179 /// # Examples
180 ///
181 /// ```ignore
182 /// use ff_decode::{VideoDecoder, HardwareAccel};
183 ///
184 /// // Use NVIDIA NVDEC if available
185 /// let decoder = VideoDecoder::open("video.mp4")?
186 /// .hardware_accel(HardwareAccel::Nvdec)
187 /// .build()?;
188 ///
189 /// // Force CPU decoding
190 /// let cpu_decoder = Decoder::open("video.mp4")?
191 /// .hardware_accel(HardwareAccel::None)
192 /// .build()?;
193 /// ```
194 #[must_use]
195 pub fn hardware_accel(mut self, accel: HardwareAccel) -> Self {
196 self.hardware_accel = accel;
197 self
198 }
199
200 /// Sets the number of decoding threads.
201 ///
202 /// More threads can improve decoding throughput, especially for
203 /// high-resolution videos or codecs that support parallel decoding.
204 ///
205 /// # Thread Count Values
206 ///
207 /// - `0` - Auto-detect based on CPU cores (default)
208 /// - `1` - Single-threaded decoding
209 /// - `N` - Use N threads for decoding
210 ///
211 /// # Performance Notes
212 ///
213 /// - H.264/H.265: Benefit significantly from multi-threading
214 /// - VP9: Good parallel decoding support
215 /// - `ProRes`: Limited threading benefit
216 ///
217 /// Setting too many threads may increase memory usage without
218 /// proportional performance gains.
219 ///
220 /// # Examples
221 ///
222 /// ```ignore
223 /// use ff_decode::VideoDecoder;
224 ///
225 /// // Use 4 threads for decoding
226 /// let decoder = VideoDecoder::open("video.mp4")?
227 /// .thread_count(4)
228 /// .build()?;
229 ///
230 /// // Single-threaded for minimal memory
231 /// let decoder = VideoDecoder::open("video.mp4")?
232 /// .thread_count(1)
233 /// .build()?;
234 /// ```
235 #[must_use]
236 pub fn thread_count(mut self, count: usize) -> Self {
237 self.thread_count = count;
238 self
239 }
240
241 /// Sets a frame pool for memory reuse.
242 ///
243 /// Using a frame pool can significantly reduce allocation overhead
244 /// during continuous video playback by reusing frame buffers.
245 ///
246 /// # Memory Management
247 ///
248 /// When a frame pool is set:
249 /// - Decoded frames attempt to acquire buffers from the pool
250 /// - When frames are dropped, their buffers are returned to the pool
251 /// - If the pool is exhausted, new buffers are allocated normally
252 ///
253 /// # Examples
254 ///
255 /// ```ignore
256 /// use ff_decode::{VideoDecoder, FramePool, PooledBuffer};
257 /// use std::sync::{Arc, Mutex};
258 ///
259 /// // Create a simple frame pool
260 /// struct SimplePool {
261 /// buffers: Mutex<Vec<Vec<u8>>>,
262 /// }
263 ///
264 /// impl FramePool for SimplePool {
265 /// fn acquire(&self, size: usize) -> Option<PooledBuffer> {
266 /// // Implementation...
267 /// None
268 /// }
269 /// }
270 ///
271 /// let pool = Arc::new(SimplePool {
272 /// buffers: Mutex::new(vec![]),
273 /// });
274 ///
275 /// let decoder = VideoDecoder::open("video.mp4")?
276 /// .frame_pool(pool)
277 /// .build()?;
278 /// ```
279 #[must_use]
280 pub fn frame_pool(mut self, pool: Arc<dyn FramePool>) -> Self {
281 self.frame_pool = Some(pool);
282 self
283 }
284
285 /// Returns the configured file path.
286 #[must_use]
287 pub fn path(&self) -> &Path {
288 &self.path
289 }
290
291 /// Returns the configured output format, if any.
292 #[must_use]
293 pub fn get_output_format(&self) -> Option<PixelFormat> {
294 self.output_format
295 }
296
297 /// Returns the configured hardware acceleration mode.
298 #[must_use]
299 pub fn get_hardware_accel(&self) -> HardwareAccel {
300 self.hardware_accel
301 }
302
303 /// Returns the configured thread count.
304 #[must_use]
305 pub fn get_thread_count(&self) -> usize {
306 self.thread_count
307 }
308
309 /// Builds the decoder with the configured options.
310 ///
311 /// This method opens the media file, initializes the decoder context,
312 /// and prepares for frame decoding.
313 ///
314 /// # Errors
315 ///
316 /// Returns an error if:
317 /// - The file cannot be found ([`DecodeError::FileNotFound`])
318 /// - The file contains no video stream ([`DecodeError::NoVideoStream`])
319 /// - The codec is not supported ([`DecodeError::UnsupportedCodec`])
320 /// - Hardware acceleration is unavailable ([`DecodeError::HwAccelUnavailable`])
321 /// - Other `FFmpeg` errors occur ([`DecodeError::Ffmpeg`])
322 ///
323 /// # Examples
324 ///
325 /// ```ignore
326 /// use ff_decode::VideoDecoder;
327 ///
328 /// let decoder = VideoDecoder::open("video.mp4")?
329 /// .build()?;
330 ///
331 /// // Start decoding
332 /// for frame in decoder.frames().take(100) {
333 /// let frame = frame?;
334 /// // Process frame...
335 /// }
336 /// ```
337 pub fn build(self) -> Result<VideoDecoder, DecodeError> {
338 // Verify the file exists
339 if !self.path.exists() {
340 return Err(DecodeError::FileNotFound {
341 path: self.path.clone(),
342 });
343 }
344
345 // Build the internal configuration
346 let config = VideoDecoderConfig {
347 output_format: self.output_format,
348 hardware_accel: self.hardware_accel,
349 thread_count: self.thread_count,
350 };
351
352 // Create the decoder inner
353 let (inner, stream_info) = VideoDecoderInner::new(
354 &self.path,
355 self.output_format,
356 self.hardware_accel,
357 self.thread_count,
358 self.frame_pool.clone(),
359 )?;
360
361 Ok(VideoDecoder {
362 path: self.path,
363 config,
364 frame_pool: self.frame_pool,
365 inner,
366 stream_info,
367 })
368 }
369}
370
371/// A video decoder for extracting frames from media files.
372///
373/// The decoder provides frame-by-frame access to video content with support
374/// for seeking, hardware acceleration, and format conversion.
375///
376/// # Construction
377///
378/// Use [`VideoDecoder::open()`] to create a builder, then call [`VideoDecoderBuilder::build()`]:
379///
380/// ```ignore
381/// use ff_decode::VideoDecoder;
382/// use ff_format::PixelFormat;
383///
384/// let decoder = VideoDecoder::open("video.mp4")?
385/// .output_format(PixelFormat::Rgba)
386/// .build()?;
387/// ```
388///
389/// # Frame Decoding
390///
391/// Frames can be decoded one at a time or using an iterator:
392///
393/// ```ignore
394/// // Decode one frame
395/// if let Some(frame) = decoder.decode_one()? {
396/// println!("Frame at {:?}", frame.timestamp().as_duration());
397/// }
398///
399/// // Use iterator
400/// for frame in decoder.frames().take(100) {
401/// let frame = frame?;
402/// // Process frame...
403/// }
404/// ```
405///
406/// # Seeking
407///
408/// The decoder supports efficient seeking:
409///
410/// ```ignore
411/// use ff_decode::SeekMode;
412/// use std::time::Duration;
413///
414/// // Seek to 30 seconds (keyframe)
415/// decoder.seek(Duration::from_secs(30), SeekMode::Keyframe)?;
416///
417/// // Seek to exact frame
418/// decoder.seek(Duration::from_secs(30), SeekMode::Exact)?;
419/// ```
420pub struct VideoDecoder {
421 /// Path to the media file
422 path: PathBuf,
423 /// Decoder configuration
424 ///
425 /// NOTE: Currently unused but will be used when `FFmpeg` integration
426 /// is implemented in a future issue.
427 #[allow(dead_code)]
428 config: VideoDecoderConfig,
429 /// Optional frame pool for memory reuse
430 frame_pool: Option<Arc<dyn FramePool>>,
431 /// Internal decoder state
432 inner: VideoDecoderInner,
433 /// Video stream information
434 stream_info: VideoStreamInfo,
435}
436
437impl VideoDecoder {
438 /// Opens a media file and returns a builder for configuring the decoder.
439 ///
440 /// This is the entry point for creating a decoder. The returned builder
441 /// allows setting options before the decoder is fully initialized.
442 ///
443 /// # Arguments
444 ///
445 /// * `path` - Path to the media file to decode.
446 ///
447 /// # Examples
448 ///
449 /// ```ignore
450 /// use ff_decode::VideoDecoder;
451 ///
452 /// // Simple usage
453 /// let decoder = VideoDecoder::open("video.mp4")?
454 /// .build()?;
455 ///
456 /// // With options
457 /// let decoder = VideoDecoder::open("video.mp4")?
458 /// .output_format(PixelFormat::Rgba)
459 /// .hardware_accel(HardwareAccel::Auto)
460 /// .build()?;
461 /// ```
462 ///
463 /// # Note
464 ///
465 /// This method does not validate that the file exists or is a valid
466 /// media file. Validation occurs when [`VideoDecoderBuilder::build()`] is called.
467 pub fn open(path: impl AsRef<Path>) -> VideoDecoderBuilder {
468 VideoDecoderBuilder::new(path.as_ref().to_path_buf())
469 }
470
471 // =========================================================================
472 // Information Methods
473 // =========================================================================
474
475 /// Returns the video stream information.
476 ///
477 /// This contains metadata about the video stream including resolution,
478 /// frame rate, codec, and color characteristics.
479 #[must_use]
480 pub fn stream_info(&self) -> &VideoStreamInfo {
481 &self.stream_info
482 }
483
484 /// Returns the video width in pixels.
485 #[must_use]
486 pub fn width(&self) -> u32 {
487 self.stream_info.width()
488 }
489
490 /// Returns the video height in pixels.
491 #[must_use]
492 pub fn height(&self) -> u32 {
493 self.stream_info.height()
494 }
495
496 /// Returns the frame rate in frames per second.
497 #[must_use]
498 pub fn frame_rate(&self) -> f64 {
499 self.stream_info.fps()
500 }
501
502 /// Returns the total duration of the video.
503 ///
504 /// Returns [`Duration::ZERO`] if duration is unknown.
505 #[must_use]
506 pub fn duration(&self) -> Duration {
507 self.stream_info.duration().unwrap_or(Duration::ZERO)
508 }
509
510 /// Returns the current playback position.
511 #[must_use]
512 pub fn position(&self) -> Duration {
513 self.inner.position()
514 }
515
516 /// Returns `true` if the end of stream has been reached.
517 #[must_use]
518 pub fn is_eof(&self) -> bool {
519 self.inner.is_eof()
520 }
521
522 /// Returns the file path being decoded.
523 #[must_use]
524 pub fn path(&self) -> &Path {
525 &self.path
526 }
527
528 /// Returns a reference to the frame pool, if configured.
529 #[must_use]
530 pub fn frame_pool(&self) -> Option<&Arc<dyn FramePool>> {
531 self.frame_pool.as_ref()
532 }
533
534 /// Returns the currently active hardware acceleration mode.
535 ///
536 /// This method returns the actual hardware acceleration being used,
537 /// which may differ from what was requested:
538 ///
539 /// - If [`HardwareAccel::Auto`] was requested, this returns the specific
540 /// accelerator that was successfully initialized (e.g., [`HardwareAccel::Nvdec`]),
541 /// or [`HardwareAccel::None`] if no hardware acceleration is available.
542 /// - If a specific accelerator was requested and initialization failed,
543 /// the decoder creation would have returned an error.
544 /// - If [`HardwareAccel::None`] was requested, this always returns [`HardwareAccel::None`].
545 ///
546 /// # Examples
547 ///
548 /// ```ignore
549 /// use ff_decode::{VideoDecoder, HardwareAccel};
550 ///
551 /// // Request automatic hardware acceleration
552 /// let decoder = VideoDecoder::open("video.mp4")?
553 /// .hardware_accel(HardwareAccel::Auto)
554 /// .build()?;
555 ///
556 /// // Check which accelerator was selected
557 /// match decoder.hardware_accel() {
558 /// HardwareAccel::None => println!("Using software decoding"),
559 /// HardwareAccel::Nvdec => println!("Using NVIDIA NVDEC"),
560 /// HardwareAccel::Qsv => println!("Using Intel Quick Sync"),
561 /// HardwareAccel::VideoToolbox => println!("Using Apple VideoToolbox"),
562 /// HardwareAccel::Vaapi => println!("Using VA-API"),
563 /// HardwareAccel::Amf => println!("Using AMD AMF"),
564 /// _ => unreachable!(),
565 /// }
566 /// ```
567 #[must_use]
568 pub fn hardware_accel(&self) -> HardwareAccel {
569 self.inner.hardware_accel()
570 }
571
572 // =========================================================================
573 // Decoding Methods
574 // =========================================================================
575
576 /// Decodes the next video frame.
577 ///
578 /// This method reads and decodes a single frame from the video stream.
579 /// Frames are returned in presentation order.
580 ///
581 /// # Returns
582 ///
583 /// - `Ok(Some(frame))` - A frame was successfully decoded
584 /// - `Ok(None)` - End of stream reached, no more frames
585 /// - `Err(_)` - An error occurred during decoding
586 ///
587 /// # Errors
588 ///
589 /// Returns [`DecodeError`] if:
590 /// - Reading from the file fails
591 /// - Decoding the frame fails
592 /// - Pixel format conversion fails
593 ///
594 /// # Examples
595 ///
596 /// ```ignore
597 /// use ff_decode::VideoDecoder;
598 ///
599 /// let mut decoder = VideoDecoder::open("video.mp4")?.build()?;
600 ///
601 /// while let Some(frame) = decoder.decode_one()? {
602 /// println!("Frame at {:?}", frame.timestamp().as_duration());
603 /// // Process frame...
604 /// }
605 /// ```
606 pub fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
607 self.inner.decode_one()
608 }
609
610 /// Returns an iterator over decoded frames.
611 ///
612 /// This provides a convenient way to iterate over all frames in the video.
613 /// The iterator will continue until end of stream or an error occurs.
614 ///
615 /// # Examples
616 ///
617 /// ```ignore
618 /// use ff_decode::VideoDecoder;
619 ///
620 /// let mut decoder = VideoDecoder::open("video.mp4")?.build()?;
621 ///
622 /// // Process first 100 frames
623 /// for frame in decoder.frames().take(100) {
624 /// let frame = frame?;
625 /// // Process frame...
626 /// }
627 /// ```
628 pub fn frames(&mut self) -> VideoFrameIterator<'_> {
629 VideoFrameIterator { decoder: self }
630 }
631
632 /// Decodes all frames within a specified time range.
633 ///
634 /// This method seeks to the start position and decodes all frames until
635 /// the end position is reached. Frames outside the range are skipped.
636 ///
637 /// # Performance
638 ///
639 /// - The method performs a keyframe seek to the start position
640 /// - Frames before `start` (from nearest keyframe) are decoded but discarded
641 /// - All frames within `[start, end)` are collected and returned
642 /// - The decoder position after this call will be at or past `end`
643 ///
644 /// For large time ranges or high frame rates, this may allocate significant
645 /// memory. Consider using [`frames()`](Self::frames) with manual filtering
646 /// for very large ranges.
647 ///
648 /// # Arguments
649 ///
650 /// * `start` - Start of the time range (inclusive).
651 /// * `end` - End of the time range (exclusive).
652 ///
653 /// # Returns
654 ///
655 /// A vector of frames with timestamps in the range `[start, end)`.
656 /// Frames are returned in presentation order.
657 ///
658 /// # Errors
659 ///
660 /// Returns [`DecodeError`] if:
661 /// - Seeking to the start position fails
662 /// - Decoding frames fails
663 /// - The time range is invalid (start >= end)
664 ///
665 /// # Examples
666 ///
667 /// ```ignore
668 /// use ff_decode::VideoDecoder;
669 /// use std::time::Duration;
670 ///
671 /// let mut decoder = VideoDecoder::open("video.mp4")?.build()?;
672 ///
673 /// // Decode frames from 5s to 10s
674 /// let frames = decoder.decode_range(
675 /// Duration::from_secs(5),
676 /// Duration::from_secs(10),
677 /// )?;
678 ///
679 /// println!("Decoded {} frames", frames.len());
680 /// for frame in frames {
681 /// println!("Frame at {:?}", frame.timestamp().as_duration());
682 /// }
683 /// ```
684 ///
685 /// # Memory Usage
686 ///
687 /// At 30fps, a 5-second range will allocate ~150 frames. For 1080p RGBA:
688 /// - Each frame: ~8.3 MB (1920 × 1080 × 4 bytes)
689 /// - 150 frames: ~1.25 GB
690 ///
691 /// Consider using a frame pool to reduce allocation overhead.
692 pub fn decode_range(
693 &mut self,
694 start: Duration,
695 end: Duration,
696 ) -> Result<Vec<VideoFrame>, DecodeError> {
697 // Validate range
698 if start >= end {
699 return Err(DecodeError::DecodingFailed {
700 timestamp: Some(start),
701 reason: format!(
702 "Invalid time range: start ({start:?}) must be before end ({end:?})"
703 ),
704 });
705 }
706
707 // Seek to start position (keyframe mode for efficiency)
708 self.seek(start, crate::SeekMode::Keyframe)?;
709
710 // Collect frames in the range
711 let mut frames = Vec::new();
712
713 for frame_result in self.frames() {
714 let frame = frame_result?;
715 let frame_time = frame.timestamp().as_duration();
716
717 // Stop if we've passed the end of the range
718 if frame_time >= end {
719 break;
720 }
721
722 // Only collect frames within the range
723 if frame_time >= start {
724 frames.push(frame);
725 }
726 // Frames before start are automatically discarded
727 }
728
729 Ok(frames)
730 }
731
732 // =========================================================================
733 // Seeking Methods
734 // =========================================================================
735
736 /// Seeks to a specified position in the video stream.
737 ///
738 /// This method performs efficient seeking without reopening the file,
739 /// providing significantly better performance than file-reopen-based seeking
740 /// (5-10ms vs 50-100ms).
741 ///
742 /// # Performance
743 ///
744 /// - **Keyframe seeking**: 5-10ms (typical GOP 1-2s)
745 /// - **Exact seeking**: 10-50ms depending on GOP size
746 /// - **Backward seeking**: Similar to keyframe seeking
747 ///
748 /// For videos with large GOP sizes (>5 seconds), exact seeking may take longer
749 /// as it requires decoding all frames from the nearest keyframe to the target.
750 ///
751 /// # Choosing a Seek Mode
752 ///
753 /// - **Use [`crate::SeekMode::Keyframe`]** for:
754 /// - Video player scrubbing (approximate positioning)
755 /// - Thumbnail generation
756 /// - Quick preview navigation
757 ///
758 /// - **Use [`crate::SeekMode::Exact`]** for:
759 /// - Frame-accurate editing
760 /// - Precise timestamp extraction
761 /// - Quality-critical operations
762 ///
763 /// - **Use [`crate::SeekMode::Backward`]** for:
764 /// - Guaranteed keyframe positioning
765 /// - Preparing for forward playback
766 ///
767 /// # Arguments
768 ///
769 /// * `position` - Target position to seek to.
770 /// * `mode` - Seek mode determining accuracy and performance.
771 ///
772 /// # Errors
773 ///
774 /// Returns [`DecodeError::SeekFailed`] if:
775 /// - The target position is beyond the video duration
776 /// - The file format doesn't support seeking
777 /// - The seek operation fails internally
778 ///
779 /// # Examples
780 ///
781 /// ```ignore
782 /// use ff_decode::{VideoDecoder, SeekMode};
783 /// use std::time::Duration;
784 ///
785 /// let mut decoder = VideoDecoder::open("video.mp4")?.build()?;
786 ///
787 /// // Fast seek to 30 seconds (keyframe)
788 /// decoder.seek(Duration::from_secs(30), SeekMode::Keyframe)?;
789 ///
790 /// // Exact seek to 1 minute
791 /// decoder.seek(Duration::from_secs(60), SeekMode::Exact)?;
792 ///
793 /// // Seek and decode next frame
794 /// decoder.seek(Duration::from_secs(10), SeekMode::Keyframe)?;
795 /// if let Some(frame) = decoder.decode_one()? {
796 /// println!("Frame at {:?}", frame.timestamp().as_duration());
797 /// }
798 /// ```
799 pub fn seek(&mut self, position: Duration, mode: crate::SeekMode) -> Result<(), DecodeError> {
800 self.inner.seek(position, mode)
801 }
802
803 /// Flushes the decoder's internal buffers.
804 ///
805 /// This method clears any cached frames and resets the decoder state.
806 /// The decoder is ready to receive new packets after flushing.
807 ///
808 /// # When to Use
809 ///
810 /// - After seeking to ensure clean state
811 /// - Before switching between different parts of the video
812 /// - To clear buffered frames after errors
813 ///
814 /// # Examples
815 ///
816 /// ```ignore
817 /// use ff_decode::VideoDecoder;
818 ///
819 /// let mut decoder = VideoDecoder::open("video.mp4")?.build()?;
820 ///
821 /// // Decode some frames...
822 /// for _ in 0..10 {
823 /// decoder.decode_one()?;
824 /// }
825 ///
826 /// // Flush and start fresh
827 /// decoder.flush();
828 /// ```
829 ///
830 /// # Note
831 ///
832 /// Calling [`seek()`](Self::seek) automatically flushes the decoder,
833 /// so you don't need to call this method explicitly after seeking.
834 pub fn flush(&mut self) {
835 self.inner.flush();
836 }
837
838 // =========================================================================
839 // Thumbnail Generation Methods
840 // =========================================================================
841
842 /// Generates a thumbnail at a specific timestamp.
843 ///
844 /// This method seeks to the specified position, decodes a frame, and scales
845 /// it to the target dimensions. It's optimized for thumbnail generation by
846 /// using keyframe seeking for speed.
847 ///
848 /// # Performance
849 ///
850 /// - Seeking: 5-10ms (keyframe mode)
851 /// - Decoding: 5-10ms for 1080p H.264
852 /// - Scaling: 1-3ms for 1080p → 320x180
853 /// - **Total: ~10-25ms per thumbnail**
854 ///
855 /// # Aspect Ratio
856 ///
857 /// The thumbnail preserves the video's aspect ratio using a "fit-within"
858 /// strategy. The output dimensions will be at most the target size, with
859 /// at least one dimension matching the target. No letterboxing is applied.
860 ///
861 /// # Arguments
862 ///
863 /// * `position` - Timestamp to extract the thumbnail from.
864 /// * `width` - Target thumbnail width in pixels.
865 /// * `height` - Target thumbnail height in pixels.
866 ///
867 /// # Returns
868 ///
869 /// A scaled `VideoFrame` representing the thumbnail.
870 ///
871 /// # Errors
872 ///
873 /// Returns [`DecodeError`] if:
874 /// - Seeking to the position fails
875 /// - No frame can be decoded at that position ([`DecodeError::EndOfStream`])
876 /// - Scaling fails
877 ///
878 /// # Examples
879 ///
880 /// ```ignore
881 /// use ff_decode::VideoDecoder;
882 /// use std::time::Duration;
883 ///
884 /// let mut decoder = VideoDecoder::open("video.mp4")?.build()?;
885 ///
886 /// // Generate a 320x180 thumbnail at 5 seconds
887 /// let thumbnail = decoder.thumbnail_at(
888 /// Duration::from_secs(5),
889 /// 320,
890 /// 180,
891 /// )?;
892 ///
893 /// assert_eq!(thumbnail.width(), 320);
894 /// assert_eq!(thumbnail.height(), 180);
895 /// ```
896 ///
897 /// # Use Cases
898 ///
899 /// - Video player scrubbing preview
900 /// - Timeline thumbnail strips
901 /// - Gallery view thumbnails
902 /// - Social media preview images
903 pub fn thumbnail_at(
904 &mut self,
905 position: Duration,
906 width: u32,
907 height: u32,
908 ) -> Result<VideoFrame, DecodeError> {
909 // 1. Seek to the specified position (keyframe mode for speed)
910 self.seek(position, crate::SeekMode::Keyframe)?;
911
912 // 2. Decode one frame
913 let frame = self.decode_one()?.ok_or(DecodeError::EndOfStream)?;
914
915 // 3. Scale the frame to target dimensions
916 self.inner.scale_frame(&frame, width, height)
917 }
918
919 /// Generates multiple thumbnails evenly distributed across the video.
920 ///
921 /// This method creates a series of thumbnails by dividing the video duration
922 /// into equal intervals and extracting a frame at each position. This is
923 /// commonly used for timeline preview strips or video galleries.
924 ///
925 /// # Performance
926 ///
927 /// For a 2-minute video generating 10 thumbnails:
928 /// - Per thumbnail: ~10-25ms (see [`thumbnail_at()`](Self::thumbnail_at))
929 /// - **Total: ~100-250ms**
930 ///
931 /// Performance scales linearly with the number of thumbnails.
932 ///
933 /// # Thumbnail Positions
934 ///
935 /// Thumbnails are extracted at evenly spaced intervals:
936 /// - Position 0: `0s`
937 /// - Position 1: `duration / count`
938 /// - Position 2: `2 * (duration / count)`
939 /// - ...
940 /// - Position N-1: `(N-1) * (duration / count)`
941 ///
942 /// # Arguments
943 ///
944 /// * `count` - Number of thumbnails to generate.
945 /// * `width` - Target thumbnail width in pixels.
946 /// * `height` - Target thumbnail height in pixels.
947 ///
948 /// # Returns
949 ///
950 /// A vector of `VideoFrame` thumbnails in temporal order.
951 ///
952 /// # Errors
953 ///
954 /// Returns [`DecodeError`] if:
955 /// - Any individual thumbnail generation fails (see [`thumbnail_at()`](Self::thumbnail_at))
956 /// - The video duration is unknown ([`Duration::ZERO`])
957 /// - Count is zero
958 ///
959 /// # Examples
960 ///
961 /// ```ignore
962 /// use ff_decode::VideoDecoder;
963 ///
964 /// let mut decoder = VideoDecoder::open("video.mp4")?.build()?;
965 ///
966 /// // Generate 10 thumbnails at 160x90 resolution
967 /// let thumbnails = decoder.thumbnails(10, 160, 90)?;
968 ///
969 /// assert_eq!(thumbnails.len(), 10);
970 /// for thumb in thumbnails {
971 /// assert_eq!(thumb.width(), 160);
972 /// assert_eq!(thumb.height(), 90);
973 /// }
974 /// ```
975 ///
976 /// # Use Cases
977 ///
978 /// - Timeline preview strips (like `YouTube`'s timeline hover)
979 /// - Video gallery grid views
980 /// - Storyboard generation for editing
981 /// - Video summary/preview pages
982 ///
983 /// # Memory Usage
984 ///
985 /// For 10 thumbnails at 160x90 RGBA:
986 /// - Per thumbnail: ~56 KB (160 × 90 × 4 bytes)
987 /// - Total: ~560 KB
988 ///
989 /// This is typically acceptable, but consider using a smaller resolution
990 /// or generating thumbnails on-demand for very large thumbnail counts.
991 pub fn thumbnails(
992 &mut self,
993 count: usize,
994 width: u32,
995 height: u32,
996 ) -> Result<Vec<VideoFrame>, DecodeError> {
997 // Validate count
998 if count == 0 {
999 return Err(DecodeError::DecodingFailed {
1000 timestamp: None,
1001 reason: "Thumbnail count must be greater than zero".to_string(),
1002 });
1003 }
1004
1005 let duration = self.duration();
1006
1007 // Check if duration is valid
1008 if duration.is_zero() {
1009 return Err(DecodeError::DecodingFailed {
1010 timestamp: None,
1011 reason: "Cannot generate thumbnails: video duration is unknown".to_string(),
1012 });
1013 }
1014
1015 // Calculate interval between thumbnails
1016 let interval_nanos = duration.as_nanos() / count as u128;
1017
1018 // Generate thumbnails
1019 let mut thumbnails = Vec::with_capacity(count);
1020
1021 for i in 0..count {
1022 // Use saturating_mul to prevent u128 overflow
1023 let position_nanos = interval_nanos.saturating_mul(i as u128);
1024 // Clamp to u64::MAX to prevent overflow when converting to Duration
1025 #[allow(clippy::cast_possible_truncation)]
1026 let position_nanos_u64 = position_nanos.min(u128::from(u64::MAX)) as u64;
1027 let position = Duration::from_nanos(position_nanos_u64);
1028
1029 let thumbnail = self.thumbnail_at(position, width, height)?;
1030 thumbnails.push(thumbnail);
1031 }
1032
1033 Ok(thumbnails)
1034 }
1035}
1036
1037/// Iterator over decoded video frames.
1038///
1039/// Created by calling [`VideoDecoder::frames()`]. Yields frames until the end
1040/// of the stream is reached or an error occurs.
1041pub struct VideoFrameIterator<'a> {
1042 decoder: &'a mut VideoDecoder,
1043}
1044
1045impl Iterator for VideoFrameIterator<'_> {
1046 type Item = Result<VideoFrame, DecodeError>;
1047
1048 fn next(&mut self) -> Option<Self::Item> {
1049 match self.decoder.decode_one() {
1050 Ok(Some(frame)) => Some(Ok(frame)),
1051 Ok(None) => None, // EOF
1052 Err(e) => Some(Err(e)),
1053 }
1054 }
1055}
1056
1057#[cfg(test)]
1058#[allow(clippy::panic, clippy::expect_used, clippy::float_cmp)]
1059mod tests {
1060 use super::*;
1061 use std::path::PathBuf;
1062
1063 #[test]
1064 fn test_builder_default_values() {
1065 let builder = VideoDecoderBuilder::new(PathBuf::from("test.mp4"));
1066
1067 assert_eq!(builder.path(), Path::new("test.mp4"));
1068 assert!(builder.get_output_format().is_none());
1069 assert_eq!(builder.get_hardware_accel(), HardwareAccel::Auto);
1070 assert_eq!(builder.get_thread_count(), 0);
1071 }
1072
1073 #[test]
1074 fn test_builder_output_format() {
1075 let builder =
1076 VideoDecoderBuilder::new(PathBuf::from("test.mp4")).output_format(PixelFormat::Rgba);
1077
1078 assert_eq!(builder.get_output_format(), Some(PixelFormat::Rgba));
1079 }
1080
1081 #[test]
1082 fn test_builder_hardware_accel() {
1083 let builder = VideoDecoderBuilder::new(PathBuf::from("test.mp4"))
1084 .hardware_accel(HardwareAccel::Nvdec);
1085
1086 assert_eq!(builder.get_hardware_accel(), HardwareAccel::Nvdec);
1087 }
1088
1089 #[test]
1090 fn test_builder_thread_count() {
1091 let builder = VideoDecoderBuilder::new(PathBuf::from("test.mp4")).thread_count(8);
1092
1093 assert_eq!(builder.get_thread_count(), 8);
1094 }
1095
1096 #[test]
1097 fn test_builder_chaining() {
1098 let builder = VideoDecoderBuilder::new(PathBuf::from("test.mp4"))
1099 .output_format(PixelFormat::Bgra)
1100 .hardware_accel(HardwareAccel::Qsv)
1101 .thread_count(4);
1102
1103 assert_eq!(builder.get_output_format(), Some(PixelFormat::Bgra));
1104 assert_eq!(builder.get_hardware_accel(), HardwareAccel::Qsv);
1105 assert_eq!(builder.get_thread_count(), 4);
1106 }
1107
1108 #[test]
1109 fn test_decoder_open() {
1110 let builder = VideoDecoder::open("video.mp4");
1111 assert_eq!(builder.path(), Path::new("video.mp4"));
1112 }
1113
1114 #[test]
1115 fn test_decoder_open_pathbuf() {
1116 let path = PathBuf::from("/path/to/video.mp4");
1117 let builder = VideoDecoder::open(&path);
1118 assert_eq!(builder.path(), path.as_path());
1119 }
1120
1121 #[test]
1122 fn test_build_file_not_found() {
1123 let result = VideoDecoder::open("nonexistent_file_12345.mp4").build();
1124
1125 assert!(result.is_err());
1126 match result {
1127 Err(DecodeError::FileNotFound { path }) => {
1128 assert!(
1129 path.to_string_lossy()
1130 .contains("nonexistent_file_12345.mp4")
1131 );
1132 }
1133 Err(e) => panic!("Expected FileNotFound error, got: {e:?}"),
1134 Ok(_) => panic!("Expected error, got Ok"),
1135 }
1136 }
1137
1138 #[test]
1139 fn test_decoder_initial_state_with_invalid_file() {
1140 // Create a temporary test file (not a valid video)
1141 let temp_dir = std::env::temp_dir();
1142 let test_file = temp_dir.join("ff_decode_test_file.txt");
1143 std::fs::write(&test_file, "test").expect("Failed to create test file");
1144
1145 let result = VideoDecoder::open(&test_file).build();
1146
1147 // Clean up
1148 let _ = std::fs::remove_file(&test_file);
1149
1150 // The build should fail (not a valid video file)
1151 assert!(result.is_err());
1152 if let Err(e) = result {
1153 // Should get either NoVideoStream or Ffmpeg error
1154 assert!(
1155 matches!(e, DecodeError::NoVideoStream { .. })
1156 || matches!(e, DecodeError::Ffmpeg { .. })
1157 );
1158 }
1159 }
1160
1161 #[test]
1162 fn test_decoder_config_default() {
1163 let config = VideoDecoderConfig::default();
1164
1165 assert!(config.output_format.is_none());
1166 assert_eq!(config.hardware_accel, HardwareAccel::Auto);
1167 assert_eq!(config.thread_count, 0);
1168 }
1169
1170 #[test]
1171 fn test_seek_mode_variants() {
1172 // Test that all SeekMode variants exist and are accessible
1173 use crate::SeekMode;
1174
1175 let keyframe = SeekMode::Keyframe;
1176 let exact = SeekMode::Exact;
1177 let backward = SeekMode::Backward;
1178
1179 // Verify they can be compared
1180 assert_eq!(keyframe, SeekMode::Keyframe);
1181 assert_eq!(exact, SeekMode::Exact);
1182 assert_eq!(backward, SeekMode::Backward);
1183 assert_ne!(keyframe, exact);
1184 assert_ne!(exact, backward);
1185 }
1186
1187 #[test]
1188 fn test_seek_mode_default() {
1189 use crate::SeekMode;
1190
1191 let default_mode = SeekMode::default();
1192 assert_eq!(default_mode, SeekMode::Keyframe);
1193 }
1194
1195 #[test]
1196 fn test_frame_iterator_structure() {
1197 // Test that VideoFrameIterator can be created (compile-time check)
1198 // The actual iteration test is in integration tests with real video files
1199 let builder = VideoDecoderBuilder::new(PathBuf::from("test.mp4"));
1200 let _ = builder; // Ensure it compiles
1201 }
1202
1203 #[test]
1204 fn test_decode_range_invalid_range() {
1205 use std::time::Duration;
1206
1207 // Create a temporary test file
1208 let temp_dir = std::env::temp_dir();
1209 let test_file = temp_dir.join("ff_decode_range_test.txt");
1210 std::fs::write(&test_file, "test").expect("Failed to create test file");
1211
1212 // Try to build decoder (will fail, but that's ok for this test)
1213 let result = VideoDecoder::open(&test_file).build();
1214
1215 // Clean up
1216 let _ = std::fs::remove_file(&test_file);
1217
1218 // If we somehow got a decoder (shouldn't happen with text file),
1219 // test that invalid range returns error
1220 if let Ok(mut decoder) = result {
1221 let start = Duration::from_secs(10);
1222 let end = Duration::from_secs(5); // end < start
1223
1224 let range_result = decoder.decode_range(start, end);
1225 assert!(range_result.is_err());
1226
1227 if let Err(DecodeError::DecodingFailed { reason, .. }) = range_result {
1228 assert!(reason.contains("Invalid time range"));
1229 }
1230 }
1231 }
1232
1233 #[test]
1234 fn test_decode_range_equal_start_end() {
1235 use std::time::Duration;
1236
1237 // Test that start == end is treated as invalid range
1238 let temp_dir = std::env::temp_dir();
1239 let test_file = temp_dir.join("ff_decode_range_equal_test.txt");
1240 std::fs::write(&test_file, "test").expect("Failed to create test file");
1241
1242 let result = VideoDecoder::open(&test_file).build();
1243
1244 // Clean up
1245 let _ = std::fs::remove_file(&test_file);
1246
1247 if let Ok(mut decoder) = result {
1248 let time = Duration::from_secs(5);
1249 let range_result = decoder.decode_range(time, time);
1250 assert!(range_result.is_err());
1251
1252 if let Err(DecodeError::DecodingFailed { reason, .. }) = range_result {
1253 assert!(reason.contains("Invalid time range"));
1254 }
1255 }
1256 }
1257
1258 #[test]
1259 fn test_thumbnails_zero_count() {
1260 // Create a temporary test file
1261 let temp_dir = std::env::temp_dir();
1262 let test_file = temp_dir.join("ff_decode_thumbnails_zero_test.txt");
1263 std::fs::write(&test_file, "test").expect("Failed to create test file");
1264
1265 let result = VideoDecoder::open(&test_file).build();
1266
1267 // Clean up
1268 let _ = std::fs::remove_file(&test_file);
1269
1270 // If we somehow got a decoder (shouldn't happen with text file),
1271 // test that zero count returns error
1272 if let Ok(mut decoder) = result {
1273 let thumbnails_result = decoder.thumbnails(0, 160, 90);
1274 assert!(thumbnails_result.is_err());
1275
1276 if let Err(DecodeError::DecodingFailed { reason, .. }) = thumbnails_result {
1277 assert!(reason.contains("Thumbnail count must be greater than zero"));
1278 }
1279 }
1280 }
1281
1282 #[test]
1283 fn test_thumbnail_api_exists() {
1284 // Compile-time test to verify thumbnail methods exist on Decoder
1285 // This ensures the API surface is correct even without real video files
1286
1287 // Create a builder (won't actually build successfully with a nonexistent file)
1288 let builder = VideoDecoder::open("nonexistent.mp4");
1289
1290 // Verify the builder exists
1291 let _ = builder;
1292
1293 // The actual thumbnail generation tests require real video files
1294 // and should be in integration tests. This test just verifies
1295 // that the methods are accessible at compile time.
1296 }
1297
1298 #[test]
1299 fn test_thumbnail_dimensions_calculation() {
1300 // Test aspect ratio preservation logic (indirectly through DecoderInner)
1301 // This is a compile-time test to ensure the code structure is correct
1302
1303 // Source: 1920x1080 (16:9)
1304 // Target: 320x180 (16:9)
1305 // Expected: 320x180 (exact fit)
1306
1307 let src_width = 1920.0_f64;
1308 let src_height = 1080.0_f64;
1309 let target_width = 320.0_f64;
1310 let target_height = 180.0_f64;
1311
1312 let src_aspect = src_width / src_height;
1313 let target_aspect = target_width / target_height;
1314
1315 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1316 let height = (target_width / src_aspect).round();
1317 (target_width, height)
1318 } else {
1319 let width = (target_height * src_aspect).round();
1320 (width, target_height)
1321 };
1322
1323 assert_eq!(scaled_width, 320.0);
1324 assert_eq!(scaled_height, 180.0);
1325 }
1326
1327 #[test]
1328 fn test_thumbnail_aspect_ratio_wide_source() {
1329 // Test aspect ratio preservation for wide source
1330 // Source: 1920x1080 (16:9)
1331 // Target: 180x180 (1:1)
1332 // Expected: 180x101 (fits width, height adjusted)
1333
1334 let src_width = 1920.0_f64;
1335 let src_height = 1080.0_f64;
1336 let target_width = 180.0_f64;
1337 let target_height = 180.0_f64;
1338
1339 let src_aspect = src_width / src_height;
1340 let target_aspect = target_width / target_height;
1341
1342 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1343 let height = (target_width / src_aspect).round();
1344 (target_width, height)
1345 } else {
1346 let width = (target_height * src_aspect).round();
1347 (width, target_height)
1348 };
1349
1350 assert_eq!(scaled_width, 180.0);
1351 // 180 / (16/9) = 101.25 → 101
1352 assert!((scaled_height - 101.0).abs() < 1.0);
1353 }
1354
1355 #[test]
1356 fn test_thumbnail_aspect_ratio_tall_source() {
1357 // Test aspect ratio preservation for tall source
1358 // Source: 1080x1920 (9:16 - portrait)
1359 // Target: 180x180 (1:1)
1360 // Expected: 101x180 (fits height, width adjusted)
1361
1362 let src_width = 1080.0_f64;
1363 let src_height = 1920.0_f64;
1364 let target_width = 180.0_f64;
1365 let target_height = 180.0_f64;
1366
1367 let src_aspect = src_width / src_height;
1368 let target_aspect = target_width / target_height;
1369
1370 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1371 let height = (target_width / src_aspect).round();
1372 (target_width, height)
1373 } else {
1374 let width = (target_height * src_aspect).round();
1375 (width, target_height)
1376 };
1377
1378 // 180 * (9/16) = 101.25 → 101
1379 assert!((scaled_width - 101.0).abs() < 1.0);
1380 assert_eq!(scaled_height, 180.0);
1381 }
1382}