Skip to main content

ff_decode/video/
decoder_inner.rs

1//! Internal video decoder implementation using FFmpeg.
2//!
3//! This module contains the low-level decoder logic that directly interacts
4//! with FFmpeg's C API through the ff-sys crate. It is not exposed publicly.
5
6// Allow unsafe code in this module as it's necessary for FFmpeg FFI
7#![allow(unsafe_code)]
8// Allow specific clippy lints for FFmpeg FFI code
9#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::path::Path;
26use std::ptr;
27use std::sync::Arc;
28use std::time::Duration;
29
30use ff_format::PooledBuffer;
31use ff_format::codec::VideoCodec;
32use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
33use ff_format::time::{Rational, Timestamp};
34use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
35use ff_sys::{
36    AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
37    AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
38    AVPixelFormat, SwsContext,
39};
40
41use crate::HardwareAccel;
42use crate::error::DecodeError;
43use crate::pool::FramePool;
44
45/// Tolerance in seconds for keyframe/backward seek modes.
46///
47/// When seeking in Keyframe or Backward mode, frames are skipped until we're within
48/// this tolerance of the target position. This balances accuracy with performance for
49/// typical GOP sizes (1-2 seconds).
50const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
51
52/// RAII guard for `AVFormatContext` to ensure proper cleanup.
53struct AvFormatContextGuard(*mut AVFormatContext);
54
55impl AvFormatContextGuard {
56    /// Creates a new guard by opening an input file.
57    ///
58    /// # Safety
59    ///
60    /// Caller must ensure FFmpeg is initialized and path is valid.
61    unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
62        // SAFETY: Caller ensures FFmpeg is initialized and path is valid
63        let format_ctx = unsafe {
64            ff_sys::avformat::open_input(path).map_err(|e| {
65                DecodeError::Ffmpeg(format!(
66                    "Failed to open file: {}",
67                    ff_sys::av_error_string(e)
68                ))
69            })?
70        };
71        Ok(Self(format_ctx))
72    }
73
74    /// Returns the raw pointer.
75    const fn as_ptr(&self) -> *mut AVFormatContext {
76        self.0
77    }
78
79    /// Consumes the guard and returns the raw pointer without dropping.
80    fn into_raw(self) -> *mut AVFormatContext {
81        let ptr = self.0;
82        std::mem::forget(self);
83        ptr
84    }
85}
86
87impl Drop for AvFormatContextGuard {
88    fn drop(&mut self) {
89        if !self.0.is_null() {
90            // SAFETY: self.0 is valid and owned by this guard
91            unsafe {
92                ff_sys::avformat::close_input(&mut (self.0 as *mut _));
93            }
94        }
95    }
96}
97
98/// RAII guard for `AVCodecContext` to ensure proper cleanup.
99struct AvCodecContextGuard(*mut AVCodecContext);
100
101impl AvCodecContextGuard {
102    /// Creates a new guard by allocating a codec context.
103    ///
104    /// # Safety
105    ///
106    /// Caller must ensure codec pointer is valid.
107    unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
108        // SAFETY: Caller ensures codec pointer is valid
109        let codec_ctx = unsafe {
110            ff_sys::avcodec::alloc_context3(codec).map_err(|e| {
111                DecodeError::Ffmpeg(format!("Failed to allocate codec context: {e}"))
112            })?
113        };
114        Ok(Self(codec_ctx))
115    }
116
117    /// Returns the raw pointer.
118    const fn as_ptr(&self) -> *mut AVCodecContext {
119        self.0
120    }
121
122    /// Consumes the guard and returns the raw pointer without dropping.
123    fn into_raw(self) -> *mut AVCodecContext {
124        let ptr = self.0;
125        std::mem::forget(self);
126        ptr
127    }
128}
129
130impl Drop for AvCodecContextGuard {
131    fn drop(&mut self) {
132        if !self.0.is_null() {
133            // SAFETY: self.0 is valid and owned by this guard
134            unsafe {
135                ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
136            }
137        }
138    }
139}
140
141/// RAII guard for `AVPacket` to ensure proper cleanup.
142struct AvPacketGuard(*mut AVPacket);
143
144impl AvPacketGuard {
145    /// Creates a new guard by allocating a packet.
146    ///
147    /// # Safety
148    ///
149    /// Must be called after FFmpeg initialization.
150    unsafe fn new() -> Result<Self, DecodeError> {
151        // SAFETY: Caller ensures FFmpeg is initialized
152        let packet = unsafe { ff_sys::av_packet_alloc() };
153        if packet.is_null() {
154            return Err(DecodeError::Ffmpeg("Failed to allocate packet".to_string()));
155        }
156        Ok(Self(packet))
157    }
158
159    /// Returns the raw pointer.
160    #[allow(dead_code)]
161    const fn as_ptr(&self) -> *mut AVPacket {
162        self.0
163    }
164
165    /// Consumes the guard and returns the raw pointer without dropping.
166    fn into_raw(self) -> *mut AVPacket {
167        let ptr = self.0;
168        std::mem::forget(self);
169        ptr
170    }
171}
172
173impl Drop for AvPacketGuard {
174    fn drop(&mut self) {
175        if !self.0.is_null() {
176            // SAFETY: self.0 is valid and owned by this guard
177            unsafe {
178                ff_sys::av_packet_free(&mut (self.0 as *mut _));
179            }
180        }
181    }
182}
183
184/// RAII guard for `AVFrame` to ensure proper cleanup.
185struct AvFrameGuard(*mut AVFrame);
186
187impl AvFrameGuard {
188    /// Creates a new guard by allocating a frame.
189    ///
190    /// # Safety
191    ///
192    /// Must be called after FFmpeg initialization.
193    unsafe fn new() -> Result<Self, DecodeError> {
194        // SAFETY: Caller ensures FFmpeg is initialized
195        let frame = unsafe { ff_sys::av_frame_alloc() };
196        if frame.is_null() {
197            return Err(DecodeError::Ffmpeg("Failed to allocate frame".to_string()));
198        }
199        Ok(Self(frame))
200    }
201
202    /// Returns the raw pointer.
203    const fn as_ptr(&self) -> *mut AVFrame {
204        self.0
205    }
206
207    /// Consumes the guard and returns the raw pointer without dropping.
208    fn into_raw(self) -> *mut AVFrame {
209        let ptr = self.0;
210        std::mem::forget(self);
211        ptr
212    }
213}
214
215impl Drop for AvFrameGuard {
216    fn drop(&mut self) {
217        if !self.0.is_null() {
218            // SAFETY: self.0 is valid and owned by this guard
219            unsafe {
220                ff_sys::av_frame_free(&mut (self.0 as *mut _));
221            }
222        }
223    }
224}
225
226/// Internal decoder state holding FFmpeg contexts.
227///
228/// This structure manages the lifecycle of FFmpeg objects and is responsible
229/// for proper cleanup when dropped.
230pub(crate) struct VideoDecoderInner {
231    /// Format context for reading the media file
232    format_ctx: *mut AVFormatContext,
233    /// Codec context for decoding video frames
234    codec_ctx: *mut AVCodecContext,
235    /// Video stream index in the format context
236    stream_index: i32,
237    /// SwScale context for pixel format conversion (optional)
238    sws_ctx: Option<*mut SwsContext>,
239    /// Target output pixel format (if conversion is needed)
240    output_format: Option<PixelFormat>,
241    /// Whether end of file has been reached
242    eof: bool,
243    /// Current playback position
244    position: Duration,
245    /// Reusable packet for reading from file
246    packet: *mut AVPacket,
247    /// Reusable frame for decoding
248    frame: *mut AVFrame,
249    /// Cached SwScale context for thumbnail generation
250    thumbnail_sws_ctx: Option<*mut SwsContext>,
251    /// Last thumbnail dimensions (for cache invalidation)
252    thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
253    /// Hardware device context (if hardware acceleration is active)
254    hw_device_ctx: Option<*mut AVBufferRef>,
255    /// Active hardware acceleration mode
256    active_hw_accel: HardwareAccel,
257    /// Optional frame pool for memory reuse
258    frame_pool: Option<Arc<dyn FramePool>>,
259}
260
261impl VideoDecoderInner {
262    /// Maps our `HardwareAccel` enum to the corresponding FFmpeg `AVHWDeviceType`.
263    ///
264    /// Returns `None` for `Auto` and `None` variants as they require special handling.
265    fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
266        match accel {
267            HardwareAccel::Auto => None,
268            HardwareAccel::None => None,
269            HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
270            HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
271            HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), // AMF uses D3D11
272            HardwareAccel::VideoToolbox => {
273                Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
274            }
275            HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
276        }
277    }
278
279    /// Returns the hardware decoders to try in priority order for Auto mode.
280    const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
281        // Priority order: NVDEC, QSV, VideoToolbox, VA-API, AMF
282        &[
283            HardwareAccel::Nvdec,
284            HardwareAccel::Qsv,
285            HardwareAccel::VideoToolbox,
286            HardwareAccel::Vaapi,
287            HardwareAccel::Amf,
288        ]
289    }
290
291    /// Attempts to initialize hardware acceleration.
292    ///
293    /// # Arguments
294    ///
295    /// * `codec_ctx` - The codec context to configure
296    /// * `accel` - Requested hardware acceleration mode
297    ///
298    /// # Returns
299    ///
300    /// Returns `Ok((hw_device_ctx, active_accel))` if hardware acceleration was initialized,
301    /// or `Ok((None, HardwareAccel::None))` if software decoding should be used.
302    ///
303    /// # Errors
304    ///
305    /// Returns an error only if a specific hardware accelerator was requested but failed to initialize.
306    unsafe fn init_hardware_accel(
307        codec_ctx: *mut AVCodecContext,
308        accel: HardwareAccel,
309    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
310        match accel {
311            HardwareAccel::Auto => {
312                // Try hardware accelerators in priority order
313                for &hw_type in Self::hw_accel_auto_priority() {
314                    // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
315                    if let Ok((Some(ctx), active)) =
316                        unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
317                    {
318                        return Ok((Some(ctx), active));
319                    }
320                    // Ignore errors in Auto mode and try the next one
321                }
322                // All hardware accelerators failed, fall back to software
323                Ok((None, HardwareAccel::None))
324            }
325            HardwareAccel::None => {
326                // Software decoding explicitly requested
327                Ok((None, HardwareAccel::None))
328            }
329            _ => {
330                // Specific hardware accelerator requested
331                // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
332                unsafe { Self::try_init_hw_device(codec_ctx, accel) }
333            }
334        }
335    }
336
337    /// Tries to initialize a specific hardware device.
338    ///
339    /// # Safety
340    ///
341    /// Caller must ensure `codec_ctx` is valid and not yet configured with a hardware device.
342    unsafe fn try_init_hw_device(
343        codec_ctx: *mut AVCodecContext,
344        accel: HardwareAccel,
345    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
346        // Get the FFmpeg device type
347        let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
348            return Ok((None, HardwareAccel::None));
349        };
350
351        // Create hardware device context
352        // SAFETY: FFmpeg is initialized, device_type is valid
353        let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
354        let ret = unsafe {
355            ff_sys::av_hwdevice_ctx_create(
356                ptr::addr_of_mut!(hw_device_ctx),
357                device_type,
358                ptr::null(),     // device: null for default device
359                ptr::null_mut(), // opts: null for default options
360                0,               // flags: currently unused by FFmpeg
361            )
362        };
363
364        if ret < 0 {
365            // Hardware device creation failed
366            return Err(DecodeError::HwAccelUnavailable { accel });
367        }
368
369        // Assign hardware device context to codec context
370        // We transfer ownership of the reference to codec_ctx
371        // SAFETY: codec_ctx and hw_device_ctx are valid
372        unsafe {
373            (*codec_ctx).hw_device_ctx = hw_device_ctx;
374        }
375
376        // We keep our own reference for cleanup in Drop
377        // SAFETY: hw_device_ctx is valid
378        let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
379        if our_ref.is_null() {
380            // Failed to create our reference
381            // codec_ctx still owns the original, so we don't need to clean it up here
382            return Err(DecodeError::HwAccelUnavailable { accel });
383        }
384
385        Ok((Some(our_ref), accel))
386    }
387
388    /// Returns the currently active hardware acceleration mode.
389    pub(crate) fn hardware_accel(&self) -> HardwareAccel {
390        self.active_hw_accel
391    }
392
393    /// Checks if a pixel format is a hardware format.
394    ///
395    /// Hardware formats include: D3D11, CUDA, VAAPI, VideoToolbox, QSV, etc.
396    const fn is_hardware_format(format: AVPixelFormat) -> bool {
397        matches!(
398            format,
399            ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
400                | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
401                | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
402                | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
403                | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
404                | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
405                | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
406                | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
407                | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
408                | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
409        )
410    }
411
412    /// Transfers a hardware frame to CPU memory if needed.
413    ///
414    /// If `self.frame` is a hardware frame, creates a new software frame
415    /// and transfers the data from GPU to CPU memory.
416    ///
417    /// # Safety
418    ///
419    /// Caller must ensure `self.frame` contains a valid decoded frame.
420    unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
421        // SAFETY: self.frame is valid and owned by this instance
422        let frame_format = unsafe { (*self.frame).format };
423
424        if !Self::is_hardware_format(frame_format) {
425            // Not a hardware frame, no transfer needed
426            return Ok(());
427        }
428
429        // Create a temporary software frame for transfer
430        // SAFETY: FFmpeg is initialized
431        let sw_frame = unsafe { ff_sys::av_frame_alloc() };
432        if sw_frame.is_null() {
433            return Err(DecodeError::Ffmpeg(
434                "Failed to allocate software frame for hardware transfer".to_string(),
435            ));
436        }
437
438        // Transfer data from hardware frame to software frame
439        // SAFETY: self.frame and sw_frame are valid
440        let ret = unsafe {
441            ff_sys::av_hwframe_transfer_data(
442                sw_frame, self.frame, 0, // flags: currently unused
443            )
444        };
445
446        if ret < 0 {
447            // Transfer failed, clean up
448            unsafe {
449                ff_sys::av_frame_free(&mut (sw_frame as *mut _));
450            }
451            return Err(DecodeError::Ffmpeg(format!(
452                "Failed to transfer hardware frame to CPU memory: {}",
453                ff_sys::av_error_string(ret)
454            )));
455        }
456
457        // Copy metadata (pts, duration, etc.) from hardware frame to software frame
458        // SAFETY: Both frames are valid
459        unsafe {
460            (*sw_frame).pts = (*self.frame).pts;
461            (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
462            (*sw_frame).duration = (*self.frame).duration;
463            (*sw_frame).time_base = (*self.frame).time_base;
464        }
465
466        // Replace self.frame with the software frame
467        // SAFETY: self.frame is valid and owned by this instance
468        unsafe {
469            ff_sys::av_frame_unref(self.frame);
470            ff_sys::av_frame_move_ref(self.frame, sw_frame);
471            ff_sys::av_frame_free(&mut (sw_frame as *mut _));
472        }
473
474        Ok(())
475    }
476
477    /// Opens a media file and initializes the decoder.
478    ///
479    /// # Arguments
480    ///
481    /// * `path` - Path to the media file
482    /// * `output_format` - Optional target pixel format for conversion
483    /// * `hardware_accel` - Hardware acceleration mode
484    /// * `thread_count` - Number of decoding threads (0 = auto)
485    ///
486    /// # Errors
487    ///
488    /// Returns an error if:
489    /// - The file cannot be opened
490    /// - No video stream is found
491    /// - The codec is not supported
492    /// - Decoder initialization fails
493    pub(crate) fn new(
494        path: &Path,
495        output_format: Option<PixelFormat>,
496        hardware_accel: HardwareAccel,
497        thread_count: usize,
498        frame_pool: Option<Arc<dyn FramePool>>,
499    ) -> Result<(Self, VideoStreamInfo), DecodeError> {
500        // Ensure FFmpeg is initialized (thread-safe and idempotent)
501        ff_sys::ensure_initialized();
502
503        // Open the input file (with RAII guard)
504        // SAFETY: Path is valid, AvFormatContextGuard ensures cleanup
505        let format_ctx_guard = unsafe { AvFormatContextGuard::new(path)? };
506        let format_ctx = format_ctx_guard.as_ptr();
507
508        // Read stream information
509        // SAFETY: format_ctx is valid and owned by guard
510        unsafe {
511            ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| {
512                DecodeError::Ffmpeg(format!(
513                    "Failed to find stream info: {}",
514                    ff_sys::av_error_string(e)
515                ))
516            })?;
517        }
518
519        // Find the video stream
520        // SAFETY: format_ctx is valid
521        let (stream_index, codec_id) =
522            unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
523                DecodeError::NoVideoStream {
524                    path: path.to_path_buf(),
525                }
526            })?;
527
528        // Find the decoder for this codec
529        // SAFETY: codec_id is valid from FFmpeg
530        let codec = unsafe {
531            ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
532                DecodeError::UnsupportedCodec {
533                    codec: format!("codec_id={codec_id:?}"),
534                }
535            })?
536        };
537
538        // Allocate codec context (with RAII guard)
539        // SAFETY: codec pointer is valid, AvCodecContextGuard ensures cleanup
540        let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
541        let codec_ctx = codec_ctx_guard.as_ptr();
542
543        // Copy codec parameters from stream to context
544        // SAFETY: format_ctx and codec_ctx are valid, stream_index is valid
545        unsafe {
546            let stream = (*format_ctx).streams.add(stream_index as usize);
547            let codecpar = (*(*stream)).codecpar;
548            ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
549                DecodeError::Ffmpeg(format!(
550                    "Failed to copy codec parameters: {}",
551                    ff_sys::av_error_string(e)
552                ))
553            })?;
554
555            // Set thread count
556            if thread_count > 0 {
557                (*codec_ctx).thread_count = thread_count as i32;
558            }
559        }
560
561        // Initialize hardware acceleration if requested
562        // SAFETY: codec_ctx is valid and not yet opened
563        let (hw_device_ctx, active_hw_accel) =
564            unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
565
566        // Open the codec
567        // SAFETY: codec_ctx and codec are valid, hardware device context is set if requested
568        unsafe {
569            ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
570                // If codec opening failed, we still own our reference to hw_device_ctx
571                // but it will be cleaned up when codec_ctx is freed (which happens
572                // when codec_ctx_guard is dropped)
573                // Our reference in hw_device_ctx will be cleaned up here
574                if let Some(hw_ctx) = hw_device_ctx {
575                    ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
576                }
577                DecodeError::Ffmpeg(format!(
578                    "Failed to open codec: {}",
579                    ff_sys::av_error_string(e)
580                ))
581            })?;
582        }
583
584        // Extract stream information
585        // SAFETY: All pointers are valid
586        let stream_info =
587            unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
588
589        // Allocate packet and frame (with RAII guards)
590        // SAFETY: FFmpeg is initialized, guards ensure cleanup
591        let packet_guard = unsafe { AvPacketGuard::new()? };
592        let frame_guard = unsafe { AvFrameGuard::new()? };
593
594        // All initialization successful - transfer ownership to VideoDecoderInner
595        Ok((
596            Self {
597                format_ctx: format_ctx_guard.into_raw(),
598                codec_ctx: codec_ctx_guard.into_raw(),
599                stream_index: stream_index as i32,
600                sws_ctx: None,
601                output_format,
602                eof: false,
603                position: Duration::ZERO,
604                packet: packet_guard.into_raw(),
605                frame: frame_guard.into_raw(),
606                thumbnail_sws_ctx: None,
607                thumbnail_cache_key: None,
608                hw_device_ctx,
609                active_hw_accel,
610                frame_pool,
611            },
612            stream_info,
613        ))
614    }
615
616    /// Finds the first video stream in the format context.
617    ///
618    /// # Returns
619    ///
620    /// Returns `Some((index, codec_id))` if a video stream is found, `None` otherwise.
621    ///
622    /// # Safety
623    ///
624    /// Caller must ensure `format_ctx` is valid and initialized.
625    unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
626        // SAFETY: Caller ensures format_ctx is valid
627        unsafe {
628            let nb_streams = (*format_ctx).nb_streams as usize;
629
630            for i in 0..nb_streams {
631                let stream = (*format_ctx).streams.add(i);
632                let codecpar = (*(*stream)).codecpar;
633
634                if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
635                    return Some((i, (*codecpar).codec_id));
636                }
637            }
638
639            None
640        }
641    }
642
643    /// Extracts video stream information from FFmpeg structures.
644    unsafe fn extract_stream_info(
645        format_ctx: *mut AVFormatContext,
646        stream_index: i32,
647        codec_ctx: *mut AVCodecContext,
648    ) -> Result<VideoStreamInfo, DecodeError> {
649        // SAFETY: Caller ensures all pointers are valid
650        let (
651            width,
652            height,
653            fps_rational,
654            duration_val,
655            pix_fmt,
656            color_space_val,
657            color_range_val,
658            color_primaries_val,
659            codec_id,
660        ) = unsafe {
661            let stream = (*format_ctx).streams.add(stream_index as usize);
662            let codecpar = (*(*stream)).codecpar;
663
664            (
665                (*codecpar).width as u32,
666                (*codecpar).height as u32,
667                (*(*stream)).avg_frame_rate,
668                (*format_ctx).duration,
669                (*codec_ctx).pix_fmt,
670                (*codecpar).color_space,
671                (*codecpar).color_range,
672                (*codecpar).color_primaries,
673                (*codecpar).codec_id,
674            )
675        };
676
677        // Extract frame rate
678        let frame_rate = if fps_rational.den != 0 {
679            Rational::new(fps_rational.num as i32, fps_rational.den as i32)
680        } else {
681            log::warn!(
682                "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
683                fps_rational.num
684            );
685            Rational::new(30, 1)
686        };
687
688        // Extract duration
689        let duration = if duration_val > 0 {
690            let duration_secs = duration_val as f64 / 1_000_000.0;
691            Some(Duration::from_secs_f64(duration_secs))
692        } else {
693            None
694        };
695
696        // Extract pixel format
697        let pixel_format = Self::convert_pixel_format(pix_fmt);
698
699        // Extract color information
700        let color_space = Self::convert_color_space(color_space_val);
701        let color_range = Self::convert_color_range(color_range_val);
702        let color_primaries = Self::convert_color_primaries(color_primaries_val);
703
704        // Extract codec
705        let codec = Self::convert_codec(codec_id);
706
707        // Build stream info
708        let mut builder = VideoStreamInfo::builder()
709            .index(stream_index as u32)
710            .codec(codec)
711            .width(width)
712            .height(height)
713            .frame_rate(frame_rate)
714            .pixel_format(pixel_format)
715            .color_space(color_space)
716            .color_range(color_range)
717            .color_primaries(color_primaries);
718
719        if let Some(d) = duration {
720            builder = builder.duration(d);
721        }
722
723        Ok(builder.build())
724    }
725
726    /// Converts FFmpeg pixel format to our PixelFormat enum.
727    fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
728        if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
729            PixelFormat::Yuv420p
730        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
731            PixelFormat::Yuv422p
732        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
733            PixelFormat::Yuv444p
734        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
735            PixelFormat::Rgb24
736        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
737            PixelFormat::Bgr24
738        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
739            PixelFormat::Rgba
740        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
741            PixelFormat::Bgra
742        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
743            PixelFormat::Gray8
744        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
745            PixelFormat::Nv12
746        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
747            PixelFormat::Nv21
748        } else {
749            log::warn!(
750                "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
751            );
752            PixelFormat::Yuv420p
753        }
754    }
755
756    /// Converts FFmpeg color space to our ColorSpace enum.
757    fn convert_color_space(space: AVColorSpace) -> ColorSpace {
758        if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
759            ColorSpace::Bt709
760        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
761            || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
762        {
763            ColorSpace::Bt601
764        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
765            ColorSpace::Bt2020
766        } else {
767            log::warn!(
768                "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
769            );
770            ColorSpace::Bt709
771        }
772    }
773
774    /// Converts FFmpeg color range to our ColorRange enum.
775    fn convert_color_range(range: AVColorRange) -> ColorRange {
776        if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
777            ColorRange::Full
778        } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
779            ColorRange::Limited
780        } else {
781            log::warn!(
782                "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
783            );
784            ColorRange::Limited
785        }
786    }
787
788    /// Converts FFmpeg color primaries to our ColorPrimaries enum.
789    fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
790        if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
791            ColorPrimaries::Bt709
792        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
793            || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
794        {
795            ColorPrimaries::Bt601
796        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
797            ColorPrimaries::Bt2020
798        } else {
799            log::warn!(
800                "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
801            );
802            ColorPrimaries::Bt709
803        }
804    }
805
806    /// Converts FFmpeg codec ID to our VideoCodec enum.
807    fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
808        if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
809            VideoCodec::H264
810        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
811            VideoCodec::H265
812        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
813            VideoCodec::Vp8
814        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
815            VideoCodec::Vp9
816        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
817            VideoCodec::Av1
818        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
819            VideoCodec::Mpeg4
820        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
821            VideoCodec::ProRes
822        } else {
823            log::warn!(
824                "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
825            );
826            VideoCodec::H264
827        }
828    }
829
830    /// Decodes the next video frame.
831    ///
832    /// # Returns
833    ///
834    /// - `Ok(Some(frame))` - Successfully decoded a frame
835    /// - `Ok(None)` - End of stream reached
836    /// - `Err(_)` - Decoding error occurred
837    pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
838        if self.eof {
839            return Ok(None);
840        }
841
842        unsafe {
843            loop {
844                // Try to receive a frame from the decoder
845                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
846
847                if ret == 0 {
848                    // Successfully received a frame
849                    // Check if this is a hardware frame and transfer to CPU memory if needed
850                    self.transfer_hardware_frame_if_needed()?;
851
852                    let video_frame = self.convert_frame_to_video_frame()?;
853
854                    // Update position based on frame timestamp
855                    let pts = (*self.frame).pts;
856                    if pts != ff_sys::AV_NOPTS_VALUE {
857                        let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
858                        let time_base = (*(*stream)).time_base;
859                        let timestamp_secs =
860                            pts as f64 * time_base.num as f64 / time_base.den as f64;
861                        self.position = Duration::from_secs_f64(timestamp_secs);
862                    }
863
864                    return Ok(Some(video_frame));
865                } else if ret == ff_sys::error_codes::EAGAIN {
866                    // Need to send more packets to the decoder
867                    // Read a packet from the file
868                    let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
869
870                    if read_ret == ff_sys::error_codes::EOF {
871                        // End of file - flush the decoder
872                        ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
873                        self.eof = true;
874                        continue;
875                    } else if read_ret < 0 {
876                        return Err(DecodeError::Ffmpeg(format!(
877                            "Failed to read frame: {}",
878                            ff_sys::av_error_string(read_ret)
879                        )));
880                    }
881
882                    // Check if this packet belongs to the video stream
883                    if (*self.packet).stream_index == self.stream_index {
884                        // Send the packet to the decoder
885                        let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
886                        ff_sys::av_packet_unref(self.packet);
887
888                        if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
889                            return Err(DecodeError::Ffmpeg(format!(
890                                "Failed to send packet: {}",
891                                ff_sys::av_error_string(send_ret)
892                            )));
893                        }
894                    } else {
895                        // Not our stream, unref and continue
896                        ff_sys::av_packet_unref(self.packet);
897                    }
898                } else if ret == ff_sys::error_codes::EOF {
899                    // Decoder has been fully flushed
900                    self.eof = true;
901                    return Ok(None);
902                } else {
903                    return Err(DecodeError::DecodingFailed {
904                        timestamp: Some(self.position),
905                        reason: ff_sys::av_error_string(ret),
906                    });
907                }
908            }
909        }
910    }
911
912    /// Converts an AVFrame to a VideoFrame, applying pixel format conversion if needed.
913    unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
914        // SAFETY: Caller ensures self.frame is valid
915        unsafe {
916            let width = (*self.frame).width as u32;
917            let height = (*self.frame).height as u32;
918            let src_format = (*self.frame).format;
919
920            // Determine output format
921            let dst_format = if let Some(fmt) = self.output_format {
922                Self::pixel_format_to_av(fmt)
923            } else {
924                src_format
925            };
926
927            // Check if conversion is needed
928            let needs_conversion = src_format != dst_format;
929
930            if needs_conversion {
931                self.convert_with_sws(width, height, src_format, dst_format)
932            } else {
933                self.av_frame_to_video_frame(self.frame)
934            }
935        }
936    }
937
938    /// Converts pixel format using SwScale.
939    unsafe fn convert_with_sws(
940        &mut self,
941        width: u32,
942        height: u32,
943        src_format: i32,
944        dst_format: i32,
945    ) -> Result<VideoFrame, DecodeError> {
946        // SAFETY: Caller ensures frame and context pointers are valid
947        unsafe {
948            // Get or create SwScale context
949            if self.sws_ctx.is_none() {
950                let ctx = ff_sys::swscale::get_context(
951                    width as i32,
952                    height as i32,
953                    src_format,
954                    width as i32,
955                    height as i32,
956                    dst_format,
957                    ff_sys::swscale::scale_flags::BILINEAR,
958                )
959                .map_err(|e| DecodeError::Ffmpeg(format!("Failed to create sws context: {e}")))?;
960
961                self.sws_ctx = Some(ctx);
962            }
963
964            let Some(sws_ctx) = self.sws_ctx else {
965                return Err(DecodeError::Ffmpeg(
966                    "SwsContext not initialized".to_string(),
967                ));
968            };
969
970            // Allocate destination frame (with RAII guard)
971            let dst_frame_guard = AvFrameGuard::new()?;
972            let dst_frame = dst_frame_guard.as_ptr();
973
974            (*dst_frame).width = width as i32;
975            (*dst_frame).height = height as i32;
976            (*dst_frame).format = dst_format;
977
978            // Allocate buffer for destination frame
979            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
980            if buffer_ret < 0 {
981                return Err(DecodeError::Ffmpeg(format!(
982                    "Failed to allocate frame buffer: {}",
983                    ff_sys::av_error_string(buffer_ret)
984                )));
985            }
986
987            // Perform conversion
988            ff_sys::swscale::scale(
989                sws_ctx,
990                (*self.frame).data.as_ptr() as *const *const u8,
991                (*self.frame).linesize.as_ptr(),
992                0,
993                height as i32,
994                (*dst_frame).data.as_ptr() as *const *mut u8,
995                (*dst_frame).linesize.as_ptr(),
996            )
997            .map_err(|e| DecodeError::Ffmpeg(format!("Failed to scale frame: {e}")))?;
998
999            // Copy timestamp
1000            (*dst_frame).pts = (*self.frame).pts;
1001
1002            // Convert to VideoFrame
1003            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1004
1005            // dst_frame is automatically freed when guard drops
1006
1007            Ok(video_frame)
1008        }
1009    }
1010
1011    /// Converts an AVFrame to a VideoFrame.
1012    unsafe fn av_frame_to_video_frame(
1013        &self,
1014        frame: *const AVFrame,
1015    ) -> Result<VideoFrame, DecodeError> {
1016        // SAFETY: Caller ensures frame and format_ctx are valid
1017        unsafe {
1018            let width = (*frame).width as u32;
1019            let height = (*frame).height as u32;
1020            let format = Self::convert_pixel_format((*frame).format);
1021
1022            // Extract timestamp
1023            let pts = (*frame).pts;
1024            let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1025                let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1026                let time_base = (*(*stream)).time_base;
1027                Timestamp::new(
1028                    pts as i64,
1029                    Rational::new(time_base.num as i32, time_base.den as i32),
1030                )
1031            } else {
1032                Timestamp::default()
1033            };
1034
1035            // Convert frame to planes and strides
1036            let (planes, strides) =
1037                self.extract_planes_and_strides(frame, width, height, format)?;
1038
1039            VideoFrame::new(planes, strides, width, height, format, timestamp, false)
1040                .map_err(|e| DecodeError::Ffmpeg(format!("Failed to create VideoFrame: {e}")))
1041        }
1042    }
1043
1044    /// Allocates a buffer, optionally using the frame pool.
1045    ///
1046    /// If a frame pool is configured and has available buffers, uses the pool.
1047    /// Otherwise, allocates a new Vec<u8>.
1048    ///
1049    /// Allocates a buffer for decoded frame data.
1050    ///
1051    /// If a frame pool is configured, attempts to acquire a buffer from the pool.
1052    /// The returned PooledBuffer will automatically be returned to the pool when dropped.
1053    fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1054        if let Some(ref pool) = self.frame_pool
1055            && let Some(pooled_buffer) = pool.acquire(size)
1056        {
1057            // Return the pooled buffer directly - it will be automatically
1058            // returned to the pool when the VideoFrame is dropped
1059            return pooled_buffer;
1060        }
1061
1062        // Pool not available or exhausted - allocate a standalone buffer
1063        PooledBuffer::standalone(vec![0u8; size])
1064    }
1065
1066    /// Extracts planes and strides from an AVFrame.
1067    unsafe fn extract_planes_and_strides(
1068        &self,
1069        frame: *const AVFrame,
1070        width: u32,
1071        height: u32,
1072        format: PixelFormat,
1073    ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1074        // Bytes per pixel constants for different pixel formats
1075        const BYTES_PER_PIXEL_RGBA: usize = 4;
1076        const BYTES_PER_PIXEL_RGB24: usize = 3;
1077
1078        // SAFETY: Caller ensures frame is valid and format matches actual frame format
1079        unsafe {
1080            let mut planes = Vec::new();
1081            let mut strides = Vec::new();
1082
1083            #[allow(clippy::match_same_arms)]
1084            match format {
1085                PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1086                    // Packed formats - single plane
1087                    let stride = (*frame).linesize[0] as usize;
1088                    let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1089                    {
1090                        BYTES_PER_PIXEL_RGBA
1091                    } else {
1092                        BYTES_PER_PIXEL_RGB24
1093                    };
1094                    let row_size = (width as usize) * bytes_per_pixel;
1095                    let buffer_size = row_size * height as usize;
1096                    let mut plane_data = self.allocate_buffer(buffer_size);
1097
1098                    for y in 0..height as usize {
1099                        let src_offset = y * stride;
1100                        let dst_offset = y * row_size;
1101                        let src_ptr = (*frame).data[0].add(src_offset);
1102                        let plane_slice = plane_data.as_mut();
1103                        // SAFETY: We copy exactly `row_size` bytes per row. The source pointer
1104                        // is valid (from FFmpeg frame data), destination has sufficient capacity
1105                        // (allocated with height * row_size), and ranges don't overlap.
1106                        std::ptr::copy_nonoverlapping(
1107                            src_ptr,
1108                            plane_slice[dst_offset..].as_mut_ptr(),
1109                            row_size,
1110                        );
1111                    }
1112
1113                    planes.push(plane_data);
1114                    strides.push(row_size);
1115                }
1116                PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1117                    // Planar YUV formats
1118                    let (chroma_width, chroma_height) = match format {
1119                        PixelFormat::Yuv420p => (width / 2, height / 2),
1120                        PixelFormat::Yuv422p => (width / 2, height),
1121                        PixelFormat::Yuv444p => (width, height),
1122                        _ => unreachable!(),
1123                    };
1124
1125                    // Y plane
1126                    let y_stride = width as usize;
1127                    let y_size = y_stride * height as usize;
1128                    let mut y_data = self.allocate_buffer(y_size);
1129                    for y in 0..height as usize {
1130                        let src_offset = y * (*frame).linesize[0] as usize;
1131                        let dst_offset = y * y_stride;
1132                        let src_ptr = (*frame).data[0].add(src_offset);
1133                        let y_slice = y_data.as_mut();
1134                        // SAFETY: Copying Y plane row-by-row. Source is valid FFmpeg data,
1135                        // destination has sufficient capacity, no overlap.
1136                        std::ptr::copy_nonoverlapping(
1137                            src_ptr,
1138                            y_slice[dst_offset..].as_mut_ptr(),
1139                            width as usize,
1140                        );
1141                    }
1142                    planes.push(y_data);
1143                    strides.push(y_stride);
1144
1145                    // U plane
1146                    let u_stride = chroma_width as usize;
1147                    let u_size = u_stride * chroma_height as usize;
1148                    let mut u_data = self.allocate_buffer(u_size);
1149                    for y in 0..chroma_height as usize {
1150                        let src_offset = y * (*frame).linesize[1] as usize;
1151                        let dst_offset = y * u_stride;
1152                        let src_ptr = (*frame).data[1].add(src_offset);
1153                        let u_slice = u_data.as_mut();
1154                        // SAFETY: Copying U (chroma) plane row-by-row. Valid source,
1155                        // sufficient destination capacity, no overlap.
1156                        std::ptr::copy_nonoverlapping(
1157                            src_ptr,
1158                            u_slice[dst_offset..].as_mut_ptr(),
1159                            chroma_width as usize,
1160                        );
1161                    }
1162                    planes.push(u_data);
1163                    strides.push(u_stride);
1164
1165                    // V plane
1166                    let v_stride = chroma_width as usize;
1167                    let v_size = v_stride * chroma_height as usize;
1168                    let mut v_data = self.allocate_buffer(v_size);
1169                    for y in 0..chroma_height as usize {
1170                        let src_offset = y * (*frame).linesize[2] as usize;
1171                        let dst_offset = y * v_stride;
1172                        let src_ptr = (*frame).data[2].add(src_offset);
1173                        let v_slice = v_data.as_mut();
1174                        // SAFETY: Copying V (chroma) plane row-by-row. Valid source,
1175                        // sufficient destination capacity, no overlap.
1176                        std::ptr::copy_nonoverlapping(
1177                            src_ptr,
1178                            v_slice[dst_offset..].as_mut_ptr(),
1179                            chroma_width as usize,
1180                        );
1181                    }
1182                    planes.push(v_data);
1183                    strides.push(v_stride);
1184                }
1185                PixelFormat::Gray8 => {
1186                    // Single plane grayscale
1187                    let stride = width as usize;
1188                    let mut plane_data = self.allocate_buffer(stride * height as usize);
1189
1190                    for y in 0..height as usize {
1191                        let src_offset = y * (*frame).linesize[0] as usize;
1192                        let dst_offset = y * stride;
1193                        let src_ptr = (*frame).data[0].add(src_offset);
1194                        let plane_slice = plane_data.as_mut();
1195                        // SAFETY: Copying grayscale plane row-by-row. Valid source,
1196                        // sufficient destination capacity, no overlap.
1197                        std::ptr::copy_nonoverlapping(
1198                            src_ptr,
1199                            plane_slice[dst_offset..].as_mut_ptr(),
1200                            width as usize,
1201                        );
1202                    }
1203
1204                    planes.push(plane_data);
1205                    strides.push(stride);
1206                }
1207                PixelFormat::Nv12 | PixelFormat::Nv21 => {
1208                    // Semi-planar formats
1209                    let uv_height = height / 2;
1210
1211                    // Y plane
1212                    let y_stride = width as usize;
1213                    let mut y_data = self.allocate_buffer(y_stride * height as usize);
1214                    for y in 0..height as usize {
1215                        let src_offset = y * (*frame).linesize[0] as usize;
1216                        let dst_offset = y * y_stride;
1217                        let src_ptr = (*frame).data[0].add(src_offset);
1218                        let y_slice = y_data.as_mut();
1219                        // SAFETY: Copying Y plane (semi-planar) row-by-row. Valid source,
1220                        // sufficient destination capacity, no overlap.
1221                        std::ptr::copy_nonoverlapping(
1222                            src_ptr,
1223                            y_slice[dst_offset..].as_mut_ptr(),
1224                            width as usize,
1225                        );
1226                    }
1227                    planes.push(y_data);
1228                    strides.push(y_stride);
1229
1230                    // UV plane
1231                    let uv_stride = width as usize;
1232                    let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1233                    for y in 0..uv_height as usize {
1234                        let src_offset = y * (*frame).linesize[1] as usize;
1235                        let dst_offset = y * uv_stride;
1236                        let src_ptr = (*frame).data[1].add(src_offset);
1237                        let uv_slice = uv_data.as_mut();
1238                        // SAFETY: Copying interleaved UV plane (semi-planar) row-by-row.
1239                        // Valid source, sufficient destination capacity, no overlap.
1240                        std::ptr::copy_nonoverlapping(
1241                            src_ptr,
1242                            uv_slice[dst_offset..].as_mut_ptr(),
1243                            width as usize,
1244                        );
1245                    }
1246                    planes.push(uv_data);
1247                    strides.push(uv_stride);
1248                }
1249                _ => {
1250                    return Err(DecodeError::Ffmpeg(format!(
1251                        "Unsupported pixel format: {format:?}"
1252                    )));
1253                }
1254            }
1255
1256            Ok((planes, strides))
1257        }
1258    }
1259
1260    /// Converts our `PixelFormat` to FFmpeg `AVPixelFormat`.
1261    fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1262        match format {
1263            PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1264            PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1265            PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1266            PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1267            PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1268            PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1269            PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1270            PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1271            PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1272            PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1273            _ => {
1274                log::warn!(
1275                    "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1276                );
1277                ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1278            }
1279        }
1280    }
1281
1282    /// Returns the current playback position.
1283    pub(crate) fn position(&self) -> Duration {
1284        self.position
1285    }
1286
1287    /// Returns whether end of file has been reached.
1288    pub(crate) fn is_eof(&self) -> bool {
1289        self.eof
1290    }
1291
1292    /// Converts a `Duration` to a presentation timestamp (PTS) in stream time_base units.
1293    ///
1294    /// # Arguments
1295    ///
1296    /// * `duration` - The duration to convert.
1297    ///
1298    /// # Returns
1299    ///
1300    /// The timestamp in stream time_base units.
1301    ///
1302    /// # Note
1303    ///
1304    /// av_seek_frame expects timestamps in stream time_base units when using a specific stream_index.
1305    fn duration_to_pts(&self, duration: Duration) -> i64 {
1306        // Convert duration to stream time_base units for seeking
1307        // SAFETY:
1308        // - format_ctx is valid: owned by VideoDecoderInner, initialized in constructor via avformat_open_input
1309        // - stream_index is valid: validated during decoder creation (find_stream_info + codec opening)
1310        // - streams array access is valid: guaranteed by FFmpeg after successful avformat_open_input
1311        let time_base = unsafe {
1312            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1313            (*(*stream)).time_base
1314        };
1315
1316        // Convert: duration (seconds) * (time_base.den / time_base.num) = PTS
1317        let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1318        (duration.as_secs_f64() * time_base_f64) as i64
1319    }
1320
1321    /// Converts a presentation timestamp (PTS) to a `Duration`.
1322    ///
1323    /// # Arguments
1324    ///
1325    /// * `pts` - The presentation timestamp in stream time base units.
1326    ///
1327    /// # Returns
1328    ///
1329    /// The duration corresponding to the PTS.
1330    ///
1331    /// # Safety
1332    ///
1333    /// Caller must ensure that `format_ctx` and `stream_index` are valid.
1334    ///
1335    /// # Note
1336    ///
1337    /// Currently unused but kept for potential future use in more advanced seeking scenarios.
1338    #[allow(dead_code)]
1339    fn pts_to_duration(&self, pts: i64) -> Duration {
1340        // SAFETY: Caller ensures format_ctx and stream_index are valid
1341        unsafe {
1342            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1343            let time_base = (*(*stream)).time_base;
1344
1345            // Convert PTS to duration
1346            let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1347            Duration::from_secs_f64(duration_secs)
1348        }
1349    }
1350
1351    /// Seeks to a specified position in the video stream.
1352    ///
1353    /// This method performs efficient seeking without reopening the file.
1354    /// It uses `av_seek_frame` internally and flushes the decoder buffers.
1355    ///
1356    /// # Performance Characteristics
1357    ///
1358    /// - **Keyframe seek**: 5-10ms for typical GOP sizes (1-2 seconds)
1359    /// - **Exact seek**: Proportional to distance from nearest keyframe
1360    /// - **Large GOP videos**: May require sequential decoding from distant keyframe
1361    ///
1362    /// For videos with sparse keyframes (GOP > 2 seconds), the method will
1363    /// decode frames sequentially from the nearest keyframe to reach the target.
1364    /// This ensures correct frame data but may take longer (10-50ms for very large GOPs).
1365    ///
1366    /// # Arguments
1367    ///
1368    /// * `position` - Target position to seek to.
1369    /// * `mode` - Seek mode (Keyframe, Exact, or Backward).
1370    ///
1371    /// # Errors
1372    ///
1373    /// Returns [`DecodeError::SeekFailed`] if the seek operation fails.
1374    pub(crate) fn seek(
1375        &mut self,
1376        position: Duration,
1377        mode: crate::SeekMode,
1378    ) -> Result<(), DecodeError> {
1379        use crate::SeekMode;
1380
1381        let timestamp = self.duration_to_pts(position);
1382
1383        // All seek modes use BACKWARD flag to find the nearest keyframe at or before target.
1384        // The difference between modes is in the post-seek processing below.
1385        let flags = ff_sys::avformat::seek_flags::BACKWARD;
1386
1387        // 1. Clear any pending packet and frame to avoid reading stale data after seek
1388        // SAFETY:
1389        // - packet is valid: allocated in constructor, owned by VideoDecoderInner
1390        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1391        unsafe {
1392            ff_sys::av_packet_unref(self.packet);
1393            ff_sys::av_frame_unref(self.frame);
1394        }
1395
1396        // 2. Seek in the format context (file is NOT reopened)
1397        // Use av_seek_frame with the stream index and timestamp in stream time_base units
1398        // SAFETY:
1399        // - format_ctx is valid: owned by VideoDecoderInner, initialized via avformat_open_input
1400        // - stream_index is valid: validated during decoder creation
1401        // - timestamp is valid: converted from Duration using stream's time_base
1402        unsafe {
1403            ff_sys::avformat::seek_frame(
1404                self.format_ctx,
1405                self.stream_index as i32,
1406                timestamp,
1407                flags,
1408            )
1409            .map_err(|e| DecodeError::SeekFailed {
1410                target: position,
1411                reason: ff_sys::av_error_string(e),
1412            })?;
1413        }
1414
1415        // 3. Flush decoder buffers to clear any cached frames
1416        // SAFETY: codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1417        unsafe {
1418            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1419        }
1420
1421        // 4. Drain any remaining frames from the decoder after flush
1422        // This ensures no stale frames are returned after the seek
1423        // SAFETY:
1424        // - codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1425        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1426        unsafe {
1427            loop {
1428                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1429                if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1430                    // No more frames in the decoder buffer
1431                    break;
1432                } else if ret == 0 {
1433                    // Got a frame, unref it and continue draining
1434                    ff_sys::av_frame_unref(self.frame);
1435                } else {
1436                    // Other error, break out
1437                    break;
1438                }
1439            }
1440        }
1441
1442        // 5. Reset internal state
1443        self.eof = false;
1444        // Note: We don't update self.position here because it will be updated
1445        // when the next frame is decoded. This ensures position reflects actual decoded position.
1446
1447        // 6. Skip forward to the target position
1448        //
1449        // Context: av_seek_frame with BACKWARD flag seeks to the nearest keyframe *at or before*
1450        // the target timestamp. For videos with sparse keyframes (large GOP size), this may
1451        // land far from the target (e.g., at the first keyframe for GOP=entire video).
1452        //
1453        // Solution: Decode frames sequentially from the keyframe until reaching the target.
1454        // This is necessary because H.264/H.265 P-frames and B-frames depend on previous
1455        // frames for reconstruction, so we must decode all intermediate frames.
1456        //
1457        // Performance Impact:
1458        // - Typical GOP (1-2s): 30-60 frames to skip, ~5-10ms overhead
1459        // - Large GOP (5-10s): 150-300 frames to skip, ~20-50ms overhead
1460        // - Worst case (single keyframe): May decode entire video, ~100ms-1s
1461        if mode == SeekMode::Exact {
1462            // For exact mode, decode until we reach or pass the exact target
1463            self.skip_to_exact(position)?;
1464        } else {
1465            // For keyframe/backward modes, decode until we're reasonably close to the target
1466            // Rationale: Balances accuracy with performance for common use cases
1467            let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1468            let min_position = position.saturating_sub(tolerance);
1469
1470            while let Some(frame) = self.decode_one()? {
1471                let frame_time = frame.timestamp().as_duration();
1472                if frame_time >= min_position {
1473                    // We're close enough to the target
1474                    break;
1475                }
1476                // Continue decoding to get closer (frames are automatically dropped)
1477            }
1478        }
1479
1480        Ok(())
1481    }
1482
1483    /// Skips frames until reaching the exact target position.
1484    ///
1485    /// This is used by [`Self::seek`] when `SeekMode::Exact` is specified.
1486    /// It decodes and discards frames from the nearest keyframe until
1487    /// reaching the target position.
1488    ///
1489    /// # Performance
1490    ///
1491    /// Time complexity is O(n) where n is the number of frames between the
1492    /// keyframe and target. For a 30fps video with 2-second GOP:
1493    /// - Worst case: ~60 frames to decode, ~10-20ms
1494    /// - Average case: ~30 frames to decode, ~5-10ms
1495    ///
1496    /// # Arguments
1497    ///
1498    /// * `target` - The exact target position.
1499    ///
1500    /// # Errors
1501    ///
1502    /// Returns [`DecodeError::SeekFailed`] if EOF is reached before the target position.
1503    fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1504        loop {
1505            match self.decode_one()? {
1506                Some(frame) => {
1507                    let frame_time = frame.timestamp().as_duration();
1508                    if frame_time >= target {
1509                        // Reached or passed the target frame
1510                        // Position will be updated by decode_one() which was just called
1511                        break;
1512                    }
1513                    // Continue decoding (frame is automatically dropped)
1514                }
1515                None => {
1516                    // Reached EOF before finding target frame
1517                    return Err(DecodeError::SeekFailed {
1518                        target,
1519                        reason: "Reached end of stream before target position".to_string(),
1520                    });
1521                }
1522            }
1523        }
1524        Ok(())
1525    }
1526
1527    /// Flushes the decoder's internal buffers.
1528    ///
1529    /// This clears any cached frames and resets the decoder state.
1530    /// The decoder is ready to receive new packets after flushing.
1531    pub(crate) fn flush(&mut self) {
1532        // SAFETY: codec_ctx is valid and owned by this instance
1533        unsafe {
1534            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1535        }
1536        self.eof = false;
1537    }
1538
1539    /// Scales a video frame to the specified dimensions while preserving aspect ratio.
1540    ///
1541    /// This method uses SwScale to resize frames efficiently using a "fit-within"
1542    /// strategy that preserves the original aspect ratio.
1543    ///
1544    /// # Aspect Ratio Preservation
1545    ///
1546    /// The frame is scaled to fit within `(target_width, target_height)` while
1547    /// maintaining its original aspect ratio. The output dimensions will be at most
1548    /// the target size, with at least one dimension matching the target. No letterboxing
1549    /// or pillarboxing is applied - the frame is simply scaled down to fit.
1550    ///
1551    /// # Arguments
1552    ///
1553    /// * `frame` - The source frame to scale.
1554    /// * `target_width` - Desired width in pixels.
1555    /// * `target_height` - Desired height in pixels.
1556    ///
1557    /// # Returns
1558    ///
1559    /// A new `VideoFrame` scaled to fit within the target dimensions.
1560    ///
1561    /// # Errors
1562    ///
1563    /// Returns [`DecodeError`] if SwScale context creation or scaling fails.
1564    ///
1565    /// # Performance
1566    ///
1567    /// - Caches SwScale context for repeated calls with same dimensions
1568    /// - Context creation: ~0.1-0.5ms (only on first call or dimension change)
1569    /// - Typical scaling time: 1-3ms for 1080p → 320x180
1570    /// - Uses bilinear interpolation for quality/performance balance
1571    ///
1572    /// # Cache Behavior
1573    ///
1574    /// The SwScale context is cached based on source/target dimensions and format.
1575    /// When generating multiple thumbnails with the same size (e.g., via `thumbnails()`),
1576    /// the context is reused, eliminating the ~0.1-0.5ms creation overhead per thumbnail.
1577    pub(crate) fn scale_frame(
1578        &mut self,
1579        frame: &VideoFrame,
1580        target_width: u32,
1581        target_height: u32,
1582    ) -> Result<VideoFrame, DecodeError> {
1583        let src_width = frame.width();
1584        let src_height = frame.height();
1585        let src_format = frame.format();
1586
1587        // Calculate scaled dimensions to preserve aspect ratio (fit within target)
1588        let src_aspect = src_width as f64 / src_height as f64;
1589        let target_aspect = target_width as f64 / target_height as f64;
1590
1591        let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1592            // Source is wider - fit to width
1593            let height = (target_width as f64 / src_aspect).round() as u32;
1594            (target_width, height)
1595        } else {
1596            // Source is taller or equal - fit to height
1597            let width = (target_height as f64 * src_aspect).round() as u32;
1598            (width, target_height)
1599        };
1600
1601        // Convert pixel format to FFmpeg format
1602        let av_format = Self::pixel_format_to_av(src_format);
1603
1604        // Cache key: (src_width, src_height, scaled_width, scaled_height, format)
1605        let cache_key = (
1606            src_width,
1607            src_height,
1608            scaled_width,
1609            scaled_height,
1610            av_format,
1611        );
1612
1613        // SAFETY: We're creating temporary FFmpeg objects for scaling
1614        unsafe {
1615            // Check if we can reuse the cached SwScale context
1616            let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1617                (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1618            {
1619                if cached_key == cache_key {
1620                    // Cache hit - reuse existing context
1621                    (cached_ctx, true)
1622                } else {
1623                    // Cache miss - free old context and create new one
1624                    ff_sys::swscale::free_context(cached_ctx);
1625                    // Clear cache immediately to prevent dangling pointer
1626                    self.thumbnail_sws_ctx = None;
1627                    self.thumbnail_cache_key = None;
1628
1629                    let new_ctx = ff_sys::swscale::get_context(
1630                        src_width as i32,
1631                        src_height as i32,
1632                        av_format,
1633                        scaled_width as i32,
1634                        scaled_height as i32,
1635                        av_format,
1636                        ff_sys::swscale::scale_flags::BILINEAR,
1637                    )
1638                    .map_err(|e| {
1639                        DecodeError::Ffmpeg(format!("Failed to create scaling context: {e}"))
1640                    })?;
1641
1642                    // Don't cache yet - will cache after successful scaling
1643                    (new_ctx, false)
1644                }
1645            } else {
1646                // No cache - create new context
1647                let new_ctx = ff_sys::swscale::get_context(
1648                    src_width as i32,
1649                    src_height as i32,
1650                    av_format,
1651                    scaled_width as i32,
1652                    scaled_height as i32,
1653                    av_format,
1654                    ff_sys::swscale::scale_flags::BILINEAR,
1655                )
1656                .map_err(|e| {
1657                    DecodeError::Ffmpeg(format!("Failed to create scaling context: {e}"))
1658                })?;
1659
1660                // Don't cache yet - will cache after successful scaling
1661                (new_ctx, false)
1662            };
1663
1664            // Set up source frame with VideoFrame data
1665            let src_frame_guard = AvFrameGuard::new()?;
1666            let src_frame = src_frame_guard.as_ptr();
1667
1668            (*src_frame).width = src_width as i32;
1669            (*src_frame).height = src_height as i32;
1670            (*src_frame).format = av_format;
1671
1672            // Set up source frame data pointers directly from VideoFrame (no copy)
1673            let planes = frame.planes();
1674            let strides = frame.strides();
1675
1676            for (i, plane_data) in planes.iter().enumerate() {
1677                if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1678                    break;
1679                }
1680                (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1681                (*src_frame).linesize[i] = strides[i] as i32;
1682            }
1683
1684            // Allocate destination frame
1685            let dst_frame_guard = AvFrameGuard::new()?;
1686            let dst_frame = dst_frame_guard.as_ptr();
1687
1688            (*dst_frame).width = scaled_width as i32;
1689            (*dst_frame).height = scaled_height as i32;
1690            (*dst_frame).format = av_format;
1691
1692            // Allocate buffer for destination frame
1693            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1694            if buffer_ret < 0 {
1695                // Clean up context if not cached
1696                if !is_cached {
1697                    ff_sys::swscale::free_context(sws_ctx);
1698                }
1699                return Err(DecodeError::Ffmpeg(format!(
1700                    "Failed to allocate destination frame buffer: {}",
1701                    ff_sys::av_error_string(buffer_ret)
1702                )));
1703            }
1704
1705            // Perform scaling
1706            let scale_result = ff_sys::swscale::scale(
1707                sws_ctx,
1708                (*src_frame).data.as_ptr() as *const *const u8,
1709                (*src_frame).linesize.as_ptr(),
1710                0,
1711                src_height as i32,
1712                (*dst_frame).data.as_ptr() as *const *mut u8,
1713                (*dst_frame).linesize.as_ptr(),
1714            );
1715
1716            if let Err(e) = scale_result {
1717                // Clean up context if not cached
1718                if !is_cached {
1719                    ff_sys::swscale::free_context(sws_ctx);
1720                }
1721                return Err(DecodeError::Ffmpeg(format!("Failed to scale frame: {e}")));
1722            }
1723
1724            // Scaling successful - cache the context if it's new
1725            if !is_cached {
1726                self.thumbnail_sws_ctx = Some(sws_ctx);
1727                self.thumbnail_cache_key = Some(cache_key);
1728            }
1729
1730            // Copy timestamp
1731            (*dst_frame).pts = frame.timestamp().pts();
1732
1733            // Convert destination frame to VideoFrame
1734            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1735
1736            Ok(video_frame)
1737        }
1738    }
1739}
1740
1741impl Drop for VideoDecoderInner {
1742    fn drop(&mut self) {
1743        // Free SwScale context if allocated
1744        if let Some(sws_ctx) = self.sws_ctx {
1745            // SAFETY: sws_ctx is valid and owned by this instance
1746            unsafe {
1747                ff_sys::swscale::free_context(sws_ctx);
1748            }
1749        }
1750
1751        // Free cached thumbnail SwScale context if allocated
1752        if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1753            // SAFETY: thumbnail_ctx is valid and owned by this instance
1754            unsafe {
1755                ff_sys::swscale::free_context(thumbnail_ctx);
1756            }
1757        }
1758
1759        // Free hardware device context if allocated
1760        if let Some(hw_ctx) = self.hw_device_ctx {
1761            // SAFETY: hw_ctx is valid and owned by this instance
1762            unsafe {
1763                ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
1764            }
1765        }
1766
1767        // Free frame and packet
1768        if !self.frame.is_null() {
1769            // SAFETY: self.frame is valid and owned by this instance
1770            unsafe {
1771                ff_sys::av_frame_free(&mut (self.frame as *mut _));
1772            }
1773        }
1774
1775        if !self.packet.is_null() {
1776            // SAFETY: self.packet is valid and owned by this instance
1777            unsafe {
1778                ff_sys::av_packet_free(&mut (self.packet as *mut _));
1779            }
1780        }
1781
1782        // Free codec context
1783        if !self.codec_ctx.is_null() {
1784            // SAFETY: self.codec_ctx is valid and owned by this instance
1785            unsafe {
1786                ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
1787            }
1788        }
1789
1790        // Close format context
1791        if !self.format_ctx.is_null() {
1792            // SAFETY: self.format_ctx is valid and owned by this instance
1793            unsafe {
1794                ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
1795            }
1796        }
1797    }
1798}
1799
1800// SAFETY: VideoDecoderInner manages FFmpeg contexts which are thread-safe when not shared.
1801// We don't expose mutable access across threads, so Send is safe.
1802unsafe impl Send for VideoDecoderInner {}
1803
1804#[cfg(test)]
1805mod tests {
1806    use ff_format::PixelFormat;
1807    use ff_format::codec::VideoCodec;
1808    use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
1809
1810    use crate::HardwareAccel;
1811
1812    use super::VideoDecoderInner;
1813
1814    // -------------------------------------------------------------------------
1815    // convert_pixel_format
1816    // -------------------------------------------------------------------------
1817
1818    #[test]
1819    fn pixel_format_yuv420p() {
1820        assert_eq!(
1821            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
1822            PixelFormat::Yuv420p
1823        );
1824    }
1825
1826    #[test]
1827    fn pixel_format_yuv422p() {
1828        assert_eq!(
1829            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
1830            PixelFormat::Yuv422p
1831        );
1832    }
1833
1834    #[test]
1835    fn pixel_format_yuv444p() {
1836        assert_eq!(
1837            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
1838            PixelFormat::Yuv444p
1839        );
1840    }
1841
1842    #[test]
1843    fn pixel_format_rgb24() {
1844        assert_eq!(
1845            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
1846            PixelFormat::Rgb24
1847        );
1848    }
1849
1850    #[test]
1851    fn pixel_format_bgr24() {
1852        assert_eq!(
1853            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
1854            PixelFormat::Bgr24
1855        );
1856    }
1857
1858    #[test]
1859    fn pixel_format_rgba() {
1860        assert_eq!(
1861            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
1862            PixelFormat::Rgba
1863        );
1864    }
1865
1866    #[test]
1867    fn pixel_format_bgra() {
1868        assert_eq!(
1869            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
1870            PixelFormat::Bgra
1871        );
1872    }
1873
1874    #[test]
1875    fn pixel_format_gray8() {
1876        assert_eq!(
1877            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
1878            PixelFormat::Gray8
1879        );
1880    }
1881
1882    #[test]
1883    fn pixel_format_nv12() {
1884        assert_eq!(
1885            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
1886            PixelFormat::Nv12
1887        );
1888    }
1889
1890    #[test]
1891    fn pixel_format_nv21() {
1892        assert_eq!(
1893            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
1894            PixelFormat::Nv21
1895        );
1896    }
1897
1898    #[test]
1899    fn pixel_format_unknown_falls_back_to_yuv420p() {
1900        assert_eq!(
1901            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
1902            PixelFormat::Yuv420p
1903        );
1904    }
1905
1906    // -------------------------------------------------------------------------
1907    // convert_color_space
1908    // -------------------------------------------------------------------------
1909
1910    #[test]
1911    fn color_space_bt709() {
1912        assert_eq!(
1913            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
1914            ColorSpace::Bt709
1915        );
1916    }
1917
1918    #[test]
1919    fn color_space_bt470bg_yields_bt601() {
1920        assert_eq!(
1921            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
1922            ColorSpace::Bt601
1923        );
1924    }
1925
1926    #[test]
1927    fn color_space_smpte170m_yields_bt601() {
1928        assert_eq!(
1929            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
1930            ColorSpace::Bt601
1931        );
1932    }
1933
1934    #[test]
1935    fn color_space_bt2020_ncl() {
1936        assert_eq!(
1937            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
1938            ColorSpace::Bt2020
1939        );
1940    }
1941
1942    #[test]
1943    fn color_space_unknown_falls_back_to_bt709() {
1944        assert_eq!(
1945            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
1946            ColorSpace::Bt709
1947        );
1948    }
1949
1950    // -------------------------------------------------------------------------
1951    // convert_color_range
1952    // -------------------------------------------------------------------------
1953
1954    #[test]
1955    fn color_range_jpeg_yields_full() {
1956        assert_eq!(
1957            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
1958            ColorRange::Full
1959        );
1960    }
1961
1962    #[test]
1963    fn color_range_mpeg_yields_limited() {
1964        assert_eq!(
1965            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
1966            ColorRange::Limited
1967        );
1968    }
1969
1970    #[test]
1971    fn color_range_unknown_falls_back_to_limited() {
1972        assert_eq!(
1973            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
1974            ColorRange::Limited
1975        );
1976    }
1977
1978    // -------------------------------------------------------------------------
1979    // convert_color_primaries
1980    // -------------------------------------------------------------------------
1981
1982    #[test]
1983    fn color_primaries_bt709() {
1984        assert_eq!(
1985            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
1986            ColorPrimaries::Bt709
1987        );
1988    }
1989
1990    #[test]
1991    fn color_primaries_bt470bg_yields_bt601() {
1992        assert_eq!(
1993            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
1994            ColorPrimaries::Bt601
1995        );
1996    }
1997
1998    #[test]
1999    fn color_primaries_smpte170m_yields_bt601() {
2000        assert_eq!(
2001            VideoDecoderInner::convert_color_primaries(
2002                ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2003            ),
2004            ColorPrimaries::Bt601
2005        );
2006    }
2007
2008    #[test]
2009    fn color_primaries_bt2020() {
2010        assert_eq!(
2011            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2012            ColorPrimaries::Bt2020
2013        );
2014    }
2015
2016    #[test]
2017    fn color_primaries_unknown_falls_back_to_bt709() {
2018        assert_eq!(
2019            VideoDecoderInner::convert_color_primaries(
2020                ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2021            ),
2022            ColorPrimaries::Bt709
2023        );
2024    }
2025
2026    // -------------------------------------------------------------------------
2027    // convert_codec
2028    // -------------------------------------------------------------------------
2029
2030    #[test]
2031    fn codec_h264() {
2032        assert_eq!(
2033            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2034            VideoCodec::H264
2035        );
2036    }
2037
2038    #[test]
2039    fn codec_hevc_yields_h265() {
2040        assert_eq!(
2041            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2042            VideoCodec::H265
2043        );
2044    }
2045
2046    #[test]
2047    fn codec_vp8() {
2048        assert_eq!(
2049            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2050            VideoCodec::Vp8
2051        );
2052    }
2053
2054    #[test]
2055    fn codec_vp9() {
2056        assert_eq!(
2057            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2058            VideoCodec::Vp9
2059        );
2060    }
2061
2062    #[test]
2063    fn codec_av1() {
2064        assert_eq!(
2065            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2066            VideoCodec::Av1
2067        );
2068    }
2069
2070    #[test]
2071    fn codec_mpeg4() {
2072        assert_eq!(
2073            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2074            VideoCodec::Mpeg4
2075        );
2076    }
2077
2078    #[test]
2079    fn codec_prores() {
2080        assert_eq!(
2081            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2082            VideoCodec::ProRes
2083        );
2084    }
2085
2086    #[test]
2087    fn codec_unknown_falls_back_to_h264() {
2088        assert_eq!(
2089            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2090            VideoCodec::H264
2091        );
2092    }
2093
2094    // -------------------------------------------------------------------------
2095    // hw_accel_to_device_type
2096    // -------------------------------------------------------------------------
2097
2098    #[test]
2099    fn hw_accel_auto_yields_none() {
2100        assert_eq!(
2101            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2102            None
2103        );
2104    }
2105
2106    #[test]
2107    fn hw_accel_none_yields_none() {
2108        assert_eq!(
2109            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2110            None
2111        );
2112    }
2113
2114    #[test]
2115    fn hw_accel_nvdec_yields_cuda() {
2116        assert_eq!(
2117            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2118            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2119        );
2120    }
2121
2122    #[test]
2123    fn hw_accel_qsv_yields_qsv() {
2124        assert_eq!(
2125            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2126            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2127        );
2128    }
2129
2130    #[test]
2131    fn hw_accel_amf_yields_d3d11va() {
2132        assert_eq!(
2133            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2134            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2135        );
2136    }
2137
2138    #[test]
2139    fn hw_accel_videotoolbox() {
2140        assert_eq!(
2141            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2142            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2143        );
2144    }
2145
2146    #[test]
2147    fn hw_accel_vaapi() {
2148        assert_eq!(
2149            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2150            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2151        );
2152    }
2153
2154    // -------------------------------------------------------------------------
2155    // pixel_format_to_av — round-trip
2156    // -------------------------------------------------------------------------
2157
2158    #[test]
2159    fn pixel_format_to_av_round_trip_yuv420p() {
2160        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2161        assert_eq!(
2162            VideoDecoderInner::convert_pixel_format(av),
2163            PixelFormat::Yuv420p
2164        );
2165    }
2166
2167    #[test]
2168    fn pixel_format_to_av_round_trip_yuv422p() {
2169        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2170        assert_eq!(
2171            VideoDecoderInner::convert_pixel_format(av),
2172            PixelFormat::Yuv422p
2173        );
2174    }
2175
2176    #[test]
2177    fn pixel_format_to_av_round_trip_yuv444p() {
2178        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2179        assert_eq!(
2180            VideoDecoderInner::convert_pixel_format(av),
2181            PixelFormat::Yuv444p
2182        );
2183    }
2184
2185    #[test]
2186    fn pixel_format_to_av_round_trip_rgb24() {
2187        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2188        assert_eq!(
2189            VideoDecoderInner::convert_pixel_format(av),
2190            PixelFormat::Rgb24
2191        );
2192    }
2193
2194    #[test]
2195    fn pixel_format_to_av_round_trip_bgr24() {
2196        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2197        assert_eq!(
2198            VideoDecoderInner::convert_pixel_format(av),
2199            PixelFormat::Bgr24
2200        );
2201    }
2202
2203    #[test]
2204    fn pixel_format_to_av_round_trip_rgba() {
2205        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2206        assert_eq!(
2207            VideoDecoderInner::convert_pixel_format(av),
2208            PixelFormat::Rgba
2209        );
2210    }
2211
2212    #[test]
2213    fn pixel_format_to_av_round_trip_bgra() {
2214        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2215        assert_eq!(
2216            VideoDecoderInner::convert_pixel_format(av),
2217            PixelFormat::Bgra
2218        );
2219    }
2220
2221    #[test]
2222    fn pixel_format_to_av_round_trip_gray8() {
2223        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2224        assert_eq!(
2225            VideoDecoderInner::convert_pixel_format(av),
2226            PixelFormat::Gray8
2227        );
2228    }
2229
2230    #[test]
2231    fn pixel_format_to_av_round_trip_nv12() {
2232        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2233        assert_eq!(
2234            VideoDecoderInner::convert_pixel_format(av),
2235            PixelFormat::Nv12
2236        );
2237    }
2238
2239    #[test]
2240    fn pixel_format_to_av_round_trip_nv21() {
2241        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2242        assert_eq!(
2243            VideoDecoderInner::convert_pixel_format(av),
2244            PixelFormat::Nv21
2245        );
2246    }
2247
2248    #[test]
2249    fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2250        // Yuv420p10le has no explicit mapping in pixel_format_to_av, so it hits the _ arm
2251        assert_eq!(
2252            VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p10le),
2253            ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2254        );
2255    }
2256}