Skip to main content

ff_decode/video/
decoder_inner.rs

1//! Internal video decoder implementation using FFmpeg.
2//!
3//! This module contains the low-level decoder logic that directly interacts
4//! with FFmpeg's C API through the ff-sys crate. It is not exposed publicly.
5
6// Allow unsafe code in this module as it's necessary for FFmpeg FFI
7#![allow(unsafe_code)]
8// Allow specific clippy lints for FFmpeg FFI code
9#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::path::Path;
26use std::ptr;
27use std::sync::Arc;
28use std::time::Duration;
29
30use ff_format::PooledBuffer;
31use ff_format::codec::VideoCodec;
32use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
33use ff_format::time::{Rational, Timestamp};
34use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
35use ff_sys::{
36    AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
37    AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
38    AVPixelFormat, SwsContext,
39};
40
41use crate::HardwareAccel;
42use crate::error::DecodeError;
43use ff_common::FramePool;
44
45/// Tolerance in seconds for keyframe/backward seek modes.
46///
47/// When seeking in Keyframe or Backward mode, frames are skipped until we're within
48/// this tolerance of the target position. This balances accuracy with performance for
49/// typical GOP sizes (1-2 seconds).
50const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
51
52/// RAII guard for `AVFormatContext` to ensure proper cleanup.
53struct AvFormatContextGuard(*mut AVFormatContext);
54
55impl AvFormatContextGuard {
56    /// Creates a new guard by opening an input file.
57    ///
58    /// # Safety
59    ///
60    /// Caller must ensure FFmpeg is initialized and path is valid.
61    unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
62        // SAFETY: Caller ensures FFmpeg is initialized and path is valid
63        let format_ctx = unsafe {
64            ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
65                code: e,
66                message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
67            })?
68        };
69        Ok(Self(format_ctx))
70    }
71
72    /// Returns the raw pointer.
73    const fn as_ptr(&self) -> *mut AVFormatContext {
74        self.0
75    }
76
77    /// Consumes the guard and returns the raw pointer without dropping.
78    fn into_raw(self) -> *mut AVFormatContext {
79        let ptr = self.0;
80        std::mem::forget(self);
81        ptr
82    }
83}
84
85impl Drop for AvFormatContextGuard {
86    fn drop(&mut self) {
87        if !self.0.is_null() {
88            // SAFETY: self.0 is valid and owned by this guard
89            unsafe {
90                ff_sys::avformat::close_input(&mut (self.0 as *mut _));
91            }
92        }
93    }
94}
95
96/// RAII guard for `AVCodecContext` to ensure proper cleanup.
97struct AvCodecContextGuard(*mut AVCodecContext);
98
99impl AvCodecContextGuard {
100    /// Creates a new guard by allocating a codec context.
101    ///
102    /// # Safety
103    ///
104    /// Caller must ensure codec pointer is valid.
105    unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
106        // SAFETY: Caller ensures codec pointer is valid
107        let codec_ctx = unsafe {
108            ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
109                code: e,
110                message: format!("Failed to allocate codec context: {e}"),
111            })?
112        };
113        Ok(Self(codec_ctx))
114    }
115
116    /// Returns the raw pointer.
117    const fn as_ptr(&self) -> *mut AVCodecContext {
118        self.0
119    }
120
121    /// Consumes the guard and returns the raw pointer without dropping.
122    fn into_raw(self) -> *mut AVCodecContext {
123        let ptr = self.0;
124        std::mem::forget(self);
125        ptr
126    }
127}
128
129impl Drop for AvCodecContextGuard {
130    fn drop(&mut self) {
131        if !self.0.is_null() {
132            // SAFETY: self.0 is valid and owned by this guard
133            unsafe {
134                ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
135            }
136        }
137    }
138}
139
140/// RAII guard for `AVPacket` to ensure proper cleanup.
141struct AvPacketGuard(*mut AVPacket);
142
143impl AvPacketGuard {
144    /// Creates a new guard by allocating a packet.
145    ///
146    /// # Safety
147    ///
148    /// Must be called after FFmpeg initialization.
149    unsafe fn new() -> Result<Self, DecodeError> {
150        // SAFETY: Caller ensures FFmpeg is initialized
151        let packet = unsafe { ff_sys::av_packet_alloc() };
152        if packet.is_null() {
153            return Err(DecodeError::Ffmpeg {
154                code: 0,
155                message: "Failed to allocate packet".to_string(),
156            });
157        }
158        Ok(Self(packet))
159    }
160
161    /// Returns the raw pointer.
162    #[allow(dead_code)]
163    const fn as_ptr(&self) -> *mut AVPacket {
164        self.0
165    }
166
167    /// Consumes the guard and returns the raw pointer without dropping.
168    fn into_raw(self) -> *mut AVPacket {
169        let ptr = self.0;
170        std::mem::forget(self);
171        ptr
172    }
173}
174
175impl Drop for AvPacketGuard {
176    fn drop(&mut self) {
177        if !self.0.is_null() {
178            // SAFETY: self.0 is valid and owned by this guard
179            unsafe {
180                ff_sys::av_packet_free(&mut (self.0 as *mut _));
181            }
182        }
183    }
184}
185
186/// RAII guard for `AVFrame` to ensure proper cleanup.
187struct AvFrameGuard(*mut AVFrame);
188
189impl AvFrameGuard {
190    /// Creates a new guard by allocating a frame.
191    ///
192    /// # Safety
193    ///
194    /// Must be called after FFmpeg initialization.
195    unsafe fn new() -> Result<Self, DecodeError> {
196        // SAFETY: Caller ensures FFmpeg is initialized
197        let frame = unsafe { ff_sys::av_frame_alloc() };
198        if frame.is_null() {
199            return Err(DecodeError::Ffmpeg {
200                code: 0,
201                message: "Failed to allocate frame".to_string(),
202            });
203        }
204        Ok(Self(frame))
205    }
206
207    /// Returns the raw pointer.
208    const fn as_ptr(&self) -> *mut AVFrame {
209        self.0
210    }
211
212    /// Consumes the guard and returns the raw pointer without dropping.
213    fn into_raw(self) -> *mut AVFrame {
214        let ptr = self.0;
215        std::mem::forget(self);
216        ptr
217    }
218}
219
220impl Drop for AvFrameGuard {
221    fn drop(&mut self) {
222        if !self.0.is_null() {
223            // SAFETY: self.0 is valid and owned by this guard
224            unsafe {
225                ff_sys::av_frame_free(&mut (self.0 as *mut _));
226            }
227        }
228    }
229}
230
231/// Internal decoder state holding FFmpeg contexts.
232///
233/// This structure manages the lifecycle of FFmpeg objects and is responsible
234/// for proper cleanup when dropped.
235pub(crate) struct VideoDecoderInner {
236    /// Format context for reading the media file
237    format_ctx: *mut AVFormatContext,
238    /// Codec context for decoding video frames
239    codec_ctx: *mut AVCodecContext,
240    /// Video stream index in the format context
241    stream_index: i32,
242    /// SwScale context for pixel format conversion (optional)
243    sws_ctx: Option<*mut SwsContext>,
244    /// Target output pixel format (if conversion is needed)
245    output_format: Option<PixelFormat>,
246    /// Whether end of file has been reached
247    eof: bool,
248    /// Current playback position
249    position: Duration,
250    /// Reusable packet for reading from file
251    packet: *mut AVPacket,
252    /// Reusable frame for decoding
253    frame: *mut AVFrame,
254    /// Cached SwScale context for thumbnail generation
255    thumbnail_sws_ctx: Option<*mut SwsContext>,
256    /// Last thumbnail dimensions (for cache invalidation)
257    thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
258    /// Hardware device context (if hardware acceleration is active)
259    hw_device_ctx: Option<*mut AVBufferRef>,
260    /// Active hardware acceleration mode
261    active_hw_accel: HardwareAccel,
262    /// Optional frame pool for memory reuse
263    frame_pool: Option<Arc<dyn FramePool>>,
264}
265
266impl VideoDecoderInner {
267    /// Maps our `HardwareAccel` enum to the corresponding FFmpeg `AVHWDeviceType`.
268    ///
269    /// Returns `None` for `Auto` and `None` variants as they require special handling.
270    fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
271        match accel {
272            HardwareAccel::Auto => None,
273            HardwareAccel::None => None,
274            HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
275            HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
276            HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), // AMF uses D3D11
277            HardwareAccel::VideoToolbox => {
278                Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
279            }
280            HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
281        }
282    }
283
284    /// Returns the hardware decoders to try in priority order for Auto mode.
285    const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
286        // Priority order: NVDEC, QSV, VideoToolbox, VA-API, AMF
287        &[
288            HardwareAccel::Nvdec,
289            HardwareAccel::Qsv,
290            HardwareAccel::VideoToolbox,
291            HardwareAccel::Vaapi,
292            HardwareAccel::Amf,
293        ]
294    }
295
296    /// Attempts to initialize hardware acceleration.
297    ///
298    /// # Arguments
299    ///
300    /// * `codec_ctx` - The codec context to configure
301    /// * `accel` - Requested hardware acceleration mode
302    ///
303    /// # Returns
304    ///
305    /// Returns `Ok((hw_device_ctx, active_accel))` if hardware acceleration was initialized,
306    /// or `Ok((None, HardwareAccel::None))` if software decoding should be used.
307    ///
308    /// # Errors
309    ///
310    /// Returns an error only if a specific hardware accelerator was requested but failed to initialize.
311    unsafe fn init_hardware_accel(
312        codec_ctx: *mut AVCodecContext,
313        accel: HardwareAccel,
314    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
315        match accel {
316            HardwareAccel::Auto => {
317                // Try hardware accelerators in priority order
318                for &hw_type in Self::hw_accel_auto_priority() {
319                    // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
320                    if let Ok((Some(ctx), active)) =
321                        unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
322                    {
323                        return Ok((Some(ctx), active));
324                    }
325                    // Ignore errors in Auto mode and try the next one
326                }
327                // All hardware accelerators failed, fall back to software
328                Ok((None, HardwareAccel::None))
329            }
330            HardwareAccel::None => {
331                // Software decoding explicitly requested
332                Ok((None, HardwareAccel::None))
333            }
334            _ => {
335                // Specific hardware accelerator requested
336                // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
337                unsafe { Self::try_init_hw_device(codec_ctx, accel) }
338            }
339        }
340    }
341
342    /// Tries to initialize a specific hardware device.
343    ///
344    /// # Safety
345    ///
346    /// Caller must ensure `codec_ctx` is valid and not yet configured with a hardware device.
347    unsafe fn try_init_hw_device(
348        codec_ctx: *mut AVCodecContext,
349        accel: HardwareAccel,
350    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
351        // Get the FFmpeg device type
352        let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
353            return Ok((None, HardwareAccel::None));
354        };
355
356        // Create hardware device context
357        // SAFETY: FFmpeg is initialized, device_type is valid
358        let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
359        let ret = unsafe {
360            ff_sys::av_hwdevice_ctx_create(
361                ptr::addr_of_mut!(hw_device_ctx),
362                device_type,
363                ptr::null(),     // device: null for default device
364                ptr::null_mut(), // opts: null for default options
365                0,               // flags: currently unused by FFmpeg
366            )
367        };
368
369        if ret < 0 {
370            // Hardware device creation failed
371            return Err(DecodeError::HwAccelUnavailable { accel });
372        }
373
374        // Assign hardware device context to codec context
375        // We transfer ownership of the reference to codec_ctx
376        // SAFETY: codec_ctx and hw_device_ctx are valid
377        unsafe {
378            (*codec_ctx).hw_device_ctx = hw_device_ctx;
379        }
380
381        // We keep our own reference for cleanup in Drop
382        // SAFETY: hw_device_ctx is valid
383        let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
384        if our_ref.is_null() {
385            // Failed to create our reference
386            // codec_ctx still owns the original, so we don't need to clean it up here
387            return Err(DecodeError::HwAccelUnavailable { accel });
388        }
389
390        Ok((Some(our_ref), accel))
391    }
392
393    /// Returns the currently active hardware acceleration mode.
394    pub(crate) fn hardware_accel(&self) -> HardwareAccel {
395        self.active_hw_accel
396    }
397
398    /// Checks if a pixel format is a hardware format.
399    ///
400    /// Hardware formats include: D3D11, CUDA, VAAPI, VideoToolbox, QSV, etc.
401    const fn is_hardware_format(format: AVPixelFormat) -> bool {
402        matches!(
403            format,
404            ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
405                | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
406                | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
407                | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
408                | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
409                | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
410                | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
411                | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
412                | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
413                | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
414        )
415    }
416
417    /// Transfers a hardware frame to CPU memory if needed.
418    ///
419    /// If `self.frame` is a hardware frame, creates a new software frame
420    /// and transfers the data from GPU to CPU memory.
421    ///
422    /// # Safety
423    ///
424    /// Caller must ensure `self.frame` contains a valid decoded frame.
425    unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
426        // SAFETY: self.frame is valid and owned by this instance
427        let frame_format = unsafe { (*self.frame).format };
428
429        if !Self::is_hardware_format(frame_format) {
430            // Not a hardware frame, no transfer needed
431            return Ok(());
432        }
433
434        // Create a temporary software frame for transfer
435        // SAFETY: FFmpeg is initialized
436        let sw_frame = unsafe { ff_sys::av_frame_alloc() };
437        if sw_frame.is_null() {
438            return Err(DecodeError::Ffmpeg {
439                code: 0,
440                message: "Failed to allocate software frame for hardware transfer".to_string(),
441            });
442        }
443
444        // Transfer data from hardware frame to software frame
445        // SAFETY: self.frame and sw_frame are valid
446        let ret = unsafe {
447            ff_sys::av_hwframe_transfer_data(
448                sw_frame, self.frame, 0, // flags: currently unused
449            )
450        };
451
452        if ret < 0 {
453            // Transfer failed, clean up
454            unsafe {
455                ff_sys::av_frame_free(&mut (sw_frame as *mut _));
456            }
457            return Err(DecodeError::Ffmpeg {
458                code: ret,
459                message: format!(
460                    "Failed to transfer hardware frame to CPU memory: {}",
461                    ff_sys::av_error_string(ret)
462                ),
463            });
464        }
465
466        // Copy metadata (pts, duration, etc.) from hardware frame to software frame
467        // SAFETY: Both frames are valid
468        unsafe {
469            (*sw_frame).pts = (*self.frame).pts;
470            (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
471            (*sw_frame).duration = (*self.frame).duration;
472            (*sw_frame).time_base = (*self.frame).time_base;
473        }
474
475        // Replace self.frame with the software frame
476        // SAFETY: self.frame is valid and owned by this instance
477        unsafe {
478            ff_sys::av_frame_unref(self.frame);
479            ff_sys::av_frame_move_ref(self.frame, sw_frame);
480            ff_sys::av_frame_free(&mut (sw_frame as *mut _));
481        }
482
483        Ok(())
484    }
485
486    /// Opens a media file and initializes the decoder.
487    ///
488    /// # Arguments
489    ///
490    /// * `path` - Path to the media file
491    /// * `output_format` - Optional target pixel format for conversion
492    /// * `hardware_accel` - Hardware acceleration mode
493    /// * `thread_count` - Number of decoding threads (0 = auto)
494    ///
495    /// # Errors
496    ///
497    /// Returns an error if:
498    /// - The file cannot be opened
499    /// - No video stream is found
500    /// - The codec is not supported
501    /// - Decoder initialization fails
502    pub(crate) fn new(
503        path: &Path,
504        output_format: Option<PixelFormat>,
505        hardware_accel: HardwareAccel,
506        thread_count: usize,
507        frame_pool: Option<Arc<dyn FramePool>>,
508    ) -> Result<(Self, VideoStreamInfo), DecodeError> {
509        // Ensure FFmpeg is initialized (thread-safe and idempotent)
510        ff_sys::ensure_initialized();
511
512        // Open the input file (with RAII guard)
513        // SAFETY: Path is valid, AvFormatContextGuard ensures cleanup
514        let format_ctx_guard = unsafe { AvFormatContextGuard::new(path)? };
515        let format_ctx = format_ctx_guard.as_ptr();
516
517        // Read stream information
518        // SAFETY: format_ctx is valid and owned by guard
519        unsafe {
520            ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
521                code: e,
522                message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
523            })?;
524        }
525
526        // Find the video stream
527        // SAFETY: format_ctx is valid
528        let (stream_index, codec_id) =
529            unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
530                DecodeError::NoVideoStream {
531                    path: path.to_path_buf(),
532                }
533            })?;
534
535        // Find the decoder for this codec
536        // SAFETY: codec_id is valid from FFmpeg
537        let codec = unsafe {
538            ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
539                DecodeError::UnsupportedCodec {
540                    codec: format!("codec_id={codec_id:?}"),
541                }
542            })?
543        };
544
545        // Allocate codec context (with RAII guard)
546        // SAFETY: codec pointer is valid, AvCodecContextGuard ensures cleanup
547        let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
548        let codec_ctx = codec_ctx_guard.as_ptr();
549
550        // Copy codec parameters from stream to context
551        // SAFETY: format_ctx and codec_ctx are valid, stream_index is valid
552        unsafe {
553            let stream = (*format_ctx).streams.add(stream_index as usize);
554            let codecpar = (*(*stream)).codecpar;
555            ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
556                DecodeError::Ffmpeg {
557                    code: e,
558                    message: format!(
559                        "Failed to copy codec parameters: {}",
560                        ff_sys::av_error_string(e)
561                    ),
562                }
563            })?;
564
565            // Set thread count
566            if thread_count > 0 {
567                (*codec_ctx).thread_count = thread_count as i32;
568            }
569        }
570
571        // Initialize hardware acceleration if requested
572        // SAFETY: codec_ctx is valid and not yet opened
573        let (hw_device_ctx, active_hw_accel) =
574            unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
575
576        // Open the codec
577        // SAFETY: codec_ctx and codec are valid, hardware device context is set if requested
578        unsafe {
579            ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
580                // If codec opening failed, we still own our reference to hw_device_ctx
581                // but it will be cleaned up when codec_ctx is freed (which happens
582                // when codec_ctx_guard is dropped)
583                // Our reference in hw_device_ctx will be cleaned up here
584                if let Some(hw_ctx) = hw_device_ctx {
585                    ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
586                }
587                DecodeError::Ffmpeg {
588                    code: e,
589                    message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
590                }
591            })?;
592        }
593
594        // Extract stream information
595        // SAFETY: All pointers are valid
596        let stream_info =
597            unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
598
599        // Allocate packet and frame (with RAII guards)
600        // SAFETY: FFmpeg is initialized, guards ensure cleanup
601        let packet_guard = unsafe { AvPacketGuard::new()? };
602        let frame_guard = unsafe { AvFrameGuard::new()? };
603
604        // All initialization successful - transfer ownership to VideoDecoderInner
605        Ok((
606            Self {
607                format_ctx: format_ctx_guard.into_raw(),
608                codec_ctx: codec_ctx_guard.into_raw(),
609                stream_index: stream_index as i32,
610                sws_ctx: None,
611                output_format,
612                eof: false,
613                position: Duration::ZERO,
614                packet: packet_guard.into_raw(),
615                frame: frame_guard.into_raw(),
616                thumbnail_sws_ctx: None,
617                thumbnail_cache_key: None,
618                hw_device_ctx,
619                active_hw_accel,
620                frame_pool,
621            },
622            stream_info,
623        ))
624    }
625
626    /// Finds the first video stream in the format context.
627    ///
628    /// # Returns
629    ///
630    /// Returns `Some((index, codec_id))` if a video stream is found, `None` otherwise.
631    ///
632    /// # Safety
633    ///
634    /// Caller must ensure `format_ctx` is valid and initialized.
635    unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
636        // SAFETY: Caller ensures format_ctx is valid
637        unsafe {
638            let nb_streams = (*format_ctx).nb_streams as usize;
639
640            for i in 0..nb_streams {
641                let stream = (*format_ctx).streams.add(i);
642                let codecpar = (*(*stream)).codecpar;
643
644                if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
645                    return Some((i, (*codecpar).codec_id));
646                }
647            }
648
649            None
650        }
651    }
652
653    /// Extracts video stream information from FFmpeg structures.
654    unsafe fn extract_stream_info(
655        format_ctx: *mut AVFormatContext,
656        stream_index: i32,
657        codec_ctx: *mut AVCodecContext,
658    ) -> Result<VideoStreamInfo, DecodeError> {
659        // SAFETY: Caller ensures all pointers are valid
660        let (
661            width,
662            height,
663            fps_rational,
664            duration_val,
665            pix_fmt,
666            color_space_val,
667            color_range_val,
668            color_primaries_val,
669            codec_id,
670        ) = unsafe {
671            let stream = (*format_ctx).streams.add(stream_index as usize);
672            let codecpar = (*(*stream)).codecpar;
673
674            (
675                (*codecpar).width as u32,
676                (*codecpar).height as u32,
677                (*(*stream)).avg_frame_rate,
678                (*format_ctx).duration,
679                (*codec_ctx).pix_fmt,
680                (*codecpar).color_space,
681                (*codecpar).color_range,
682                (*codecpar).color_primaries,
683                (*codecpar).codec_id,
684            )
685        };
686
687        // Extract frame rate
688        let frame_rate = if fps_rational.den != 0 {
689            Rational::new(fps_rational.num as i32, fps_rational.den as i32)
690        } else {
691            log::warn!(
692                "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
693                fps_rational.num
694            );
695            Rational::new(30, 1)
696        };
697
698        // Extract duration
699        let duration = if duration_val > 0 {
700            let duration_secs = duration_val as f64 / 1_000_000.0;
701            Some(Duration::from_secs_f64(duration_secs))
702        } else {
703            None
704        };
705
706        // Extract pixel format
707        let pixel_format = Self::convert_pixel_format(pix_fmt);
708
709        // Extract color information
710        let color_space = Self::convert_color_space(color_space_val);
711        let color_range = Self::convert_color_range(color_range_val);
712        let color_primaries = Self::convert_color_primaries(color_primaries_val);
713
714        // Extract codec
715        let codec = Self::convert_codec(codec_id);
716
717        // Build stream info
718        let mut builder = VideoStreamInfo::builder()
719            .index(stream_index as u32)
720            .codec(codec)
721            .width(width)
722            .height(height)
723            .frame_rate(frame_rate)
724            .pixel_format(pixel_format)
725            .color_space(color_space)
726            .color_range(color_range)
727            .color_primaries(color_primaries);
728
729        if let Some(d) = duration {
730            builder = builder.duration(d);
731        }
732
733        Ok(builder.build())
734    }
735
736    /// Converts FFmpeg pixel format to our PixelFormat enum.
737    fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
738        if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
739            PixelFormat::Yuv420p
740        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
741            PixelFormat::Yuv422p
742        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
743            PixelFormat::Yuv444p
744        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
745            PixelFormat::Rgb24
746        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
747            PixelFormat::Bgr24
748        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
749            PixelFormat::Rgba
750        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
751            PixelFormat::Bgra
752        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
753            PixelFormat::Gray8
754        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
755            PixelFormat::Nv12
756        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
757            PixelFormat::Nv21
758        } else {
759            log::warn!(
760                "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
761            );
762            PixelFormat::Yuv420p
763        }
764    }
765
766    /// Converts FFmpeg color space to our ColorSpace enum.
767    fn convert_color_space(space: AVColorSpace) -> ColorSpace {
768        if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
769            ColorSpace::Bt709
770        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
771            || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
772        {
773            ColorSpace::Bt601
774        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
775            ColorSpace::Bt2020
776        } else {
777            log::warn!(
778                "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
779            );
780            ColorSpace::Bt709
781        }
782    }
783
784    /// Converts FFmpeg color range to our ColorRange enum.
785    fn convert_color_range(range: AVColorRange) -> ColorRange {
786        if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
787            ColorRange::Full
788        } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
789            ColorRange::Limited
790        } else {
791            log::warn!(
792                "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
793            );
794            ColorRange::Limited
795        }
796    }
797
798    /// Converts FFmpeg color primaries to our ColorPrimaries enum.
799    fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
800        if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
801            ColorPrimaries::Bt709
802        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
803            || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
804        {
805            ColorPrimaries::Bt601
806        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
807            ColorPrimaries::Bt2020
808        } else {
809            log::warn!(
810                "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
811            );
812            ColorPrimaries::Bt709
813        }
814    }
815
816    /// Converts FFmpeg codec ID to our VideoCodec enum.
817    fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
818        if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
819            VideoCodec::H264
820        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
821            VideoCodec::H265
822        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
823            VideoCodec::Vp8
824        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
825            VideoCodec::Vp9
826        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
827            VideoCodec::Av1
828        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
829            VideoCodec::Mpeg4
830        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
831            VideoCodec::ProRes
832        } else {
833            log::warn!(
834                "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
835            );
836            VideoCodec::H264
837        }
838    }
839
840    /// Decodes the next video frame.
841    ///
842    /// # Returns
843    ///
844    /// - `Ok(Some(frame))` - Successfully decoded a frame
845    /// - `Ok(None)` - End of stream reached
846    /// - `Err(_)` - Decoding error occurred
847    pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
848        if self.eof {
849            return Ok(None);
850        }
851
852        unsafe {
853            loop {
854                // Try to receive a frame from the decoder
855                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
856
857                if ret == 0 {
858                    // Successfully received a frame
859                    // Check if this is a hardware frame and transfer to CPU memory if needed
860                    self.transfer_hardware_frame_if_needed()?;
861
862                    let video_frame = self.convert_frame_to_video_frame()?;
863
864                    // Update position based on frame timestamp
865                    let pts = (*self.frame).pts;
866                    if pts != ff_sys::AV_NOPTS_VALUE {
867                        let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
868                        let time_base = (*(*stream)).time_base;
869                        let timestamp_secs =
870                            pts as f64 * time_base.num as f64 / time_base.den as f64;
871                        self.position = Duration::from_secs_f64(timestamp_secs);
872                    }
873
874                    return Ok(Some(video_frame));
875                } else if ret == ff_sys::error_codes::EAGAIN {
876                    // Need to send more packets to the decoder
877                    // Read a packet from the file
878                    let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
879
880                    if read_ret == ff_sys::error_codes::EOF {
881                        // End of file - flush the decoder
882                        ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
883                        self.eof = true;
884                        continue;
885                    } else if read_ret < 0 {
886                        return Err(DecodeError::Ffmpeg {
887                            code: read_ret,
888                            message: format!(
889                                "Failed to read frame: {}",
890                                ff_sys::av_error_string(read_ret)
891                            ),
892                        });
893                    }
894
895                    // Check if this packet belongs to the video stream
896                    if (*self.packet).stream_index == self.stream_index {
897                        // Send the packet to the decoder
898                        let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
899                        ff_sys::av_packet_unref(self.packet);
900
901                        if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
902                            return Err(DecodeError::Ffmpeg {
903                                code: send_ret,
904                                message: format!(
905                                    "Failed to send packet: {}",
906                                    ff_sys::av_error_string(send_ret)
907                                ),
908                            });
909                        }
910                    } else {
911                        // Not our stream, unref and continue
912                        ff_sys::av_packet_unref(self.packet);
913                    }
914                } else if ret == ff_sys::error_codes::EOF {
915                    // Decoder has been fully flushed
916                    self.eof = true;
917                    return Ok(None);
918                } else {
919                    return Err(DecodeError::DecodingFailed {
920                        timestamp: Some(self.position),
921                        reason: ff_sys::av_error_string(ret),
922                    });
923                }
924            }
925        }
926    }
927
928    /// Converts an AVFrame to a VideoFrame, applying pixel format conversion if needed.
929    unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
930        // SAFETY: Caller ensures self.frame is valid
931        unsafe {
932            let width = (*self.frame).width as u32;
933            let height = (*self.frame).height as u32;
934            let src_format = (*self.frame).format;
935
936            // Determine output format
937            let dst_format = if let Some(fmt) = self.output_format {
938                Self::pixel_format_to_av(fmt)
939            } else {
940                src_format
941            };
942
943            // Check if conversion is needed
944            let needs_conversion = src_format != dst_format;
945
946            if needs_conversion {
947                self.convert_with_sws(width, height, src_format, dst_format)
948            } else {
949                self.av_frame_to_video_frame(self.frame)
950            }
951        }
952    }
953
954    /// Converts pixel format using SwScale.
955    unsafe fn convert_with_sws(
956        &mut self,
957        width: u32,
958        height: u32,
959        src_format: i32,
960        dst_format: i32,
961    ) -> Result<VideoFrame, DecodeError> {
962        // SAFETY: Caller ensures frame and context pointers are valid
963        unsafe {
964            // Get or create SwScale context
965            if self.sws_ctx.is_none() {
966                let ctx = ff_sys::swscale::get_context(
967                    width as i32,
968                    height as i32,
969                    src_format,
970                    width as i32,
971                    height as i32,
972                    dst_format,
973                    ff_sys::swscale::scale_flags::BILINEAR,
974                )
975                .map_err(|e| DecodeError::Ffmpeg {
976                    code: 0,
977                    message: format!("Failed to create sws context: {e}"),
978                })?;
979
980                self.sws_ctx = Some(ctx);
981            }
982
983            let Some(sws_ctx) = self.sws_ctx else {
984                return Err(DecodeError::Ffmpeg {
985                    code: 0,
986                    message: "SwsContext not initialized".to_string(),
987                });
988            };
989
990            // Allocate destination frame (with RAII guard)
991            let dst_frame_guard = AvFrameGuard::new()?;
992            let dst_frame = dst_frame_guard.as_ptr();
993
994            (*dst_frame).width = width as i32;
995            (*dst_frame).height = height as i32;
996            (*dst_frame).format = dst_format;
997
998            // Allocate buffer for destination frame
999            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1000            if buffer_ret < 0 {
1001                return Err(DecodeError::Ffmpeg {
1002                    code: buffer_ret,
1003                    message: format!(
1004                        "Failed to allocate frame buffer: {}",
1005                        ff_sys::av_error_string(buffer_ret)
1006                    ),
1007                });
1008            }
1009
1010            // Perform conversion
1011            ff_sys::swscale::scale(
1012                sws_ctx,
1013                (*self.frame).data.as_ptr() as *const *const u8,
1014                (*self.frame).linesize.as_ptr(),
1015                0,
1016                height as i32,
1017                (*dst_frame).data.as_ptr() as *const *mut u8,
1018                (*dst_frame).linesize.as_ptr(),
1019            )
1020            .map_err(|e| DecodeError::Ffmpeg {
1021                code: 0,
1022                message: format!("Failed to scale frame: {e}"),
1023            })?;
1024
1025            // Copy timestamp
1026            (*dst_frame).pts = (*self.frame).pts;
1027
1028            // Convert to VideoFrame
1029            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1030
1031            // dst_frame is automatically freed when guard drops
1032
1033            Ok(video_frame)
1034        }
1035    }
1036
1037    /// Converts an AVFrame to a VideoFrame.
1038    unsafe fn av_frame_to_video_frame(
1039        &self,
1040        frame: *const AVFrame,
1041    ) -> Result<VideoFrame, DecodeError> {
1042        // SAFETY: Caller ensures frame and format_ctx are valid
1043        unsafe {
1044            let width = (*frame).width as u32;
1045            let height = (*frame).height as u32;
1046            let format = Self::convert_pixel_format((*frame).format);
1047
1048            // Extract timestamp
1049            let pts = (*frame).pts;
1050            let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1051                let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1052                let time_base = (*(*stream)).time_base;
1053                Timestamp::new(
1054                    pts as i64,
1055                    Rational::new(time_base.num as i32, time_base.den as i32),
1056                )
1057            } else {
1058                Timestamp::default()
1059            };
1060
1061            // Convert frame to planes and strides
1062            let (planes, strides) =
1063                self.extract_planes_and_strides(frame, width, height, format)?;
1064
1065            VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1066                DecodeError::Ffmpeg {
1067                    code: 0,
1068                    message: format!("Failed to create VideoFrame: {e}"),
1069                }
1070            })
1071        }
1072    }
1073
1074    /// Allocates a buffer, optionally using the frame pool.
1075    ///
1076    /// If a frame pool is configured and has available buffers, uses the pool.
1077    /// Otherwise, allocates a new Vec<u8>.
1078    ///
1079    /// Allocates a buffer for decoded frame data.
1080    ///
1081    /// If a frame pool is configured, attempts to acquire a buffer from the pool.
1082    /// The returned PooledBuffer will automatically be returned to the pool when dropped.
1083    fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1084        if let Some(ref pool) = self.frame_pool
1085            && let Some(pooled_buffer) = pool.acquire(size)
1086        {
1087            // Return the pooled buffer directly - it will be automatically
1088            // returned to the pool when the VideoFrame is dropped
1089            return pooled_buffer;
1090        }
1091
1092        // Pool not available or exhausted - allocate a standalone buffer
1093        PooledBuffer::standalone(vec![0u8; size])
1094    }
1095
1096    /// Extracts planes and strides from an AVFrame.
1097    unsafe fn extract_planes_and_strides(
1098        &self,
1099        frame: *const AVFrame,
1100        width: u32,
1101        height: u32,
1102        format: PixelFormat,
1103    ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1104        // Bytes per pixel constants for different pixel formats
1105        const BYTES_PER_PIXEL_RGBA: usize = 4;
1106        const BYTES_PER_PIXEL_RGB24: usize = 3;
1107
1108        // SAFETY: Caller ensures frame is valid and format matches actual frame format
1109        unsafe {
1110            let mut planes = Vec::new();
1111            let mut strides = Vec::new();
1112
1113            #[allow(clippy::match_same_arms)]
1114            match format {
1115                PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1116                    // Packed formats - single plane
1117                    let stride = (*frame).linesize[0] as usize;
1118                    let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1119                    {
1120                        BYTES_PER_PIXEL_RGBA
1121                    } else {
1122                        BYTES_PER_PIXEL_RGB24
1123                    };
1124                    let row_size = (width as usize) * bytes_per_pixel;
1125                    let buffer_size = row_size * height as usize;
1126                    let mut plane_data = self.allocate_buffer(buffer_size);
1127
1128                    for y in 0..height as usize {
1129                        let src_offset = y * stride;
1130                        let dst_offset = y * row_size;
1131                        let src_ptr = (*frame).data[0].add(src_offset);
1132                        let plane_slice = plane_data.as_mut();
1133                        // SAFETY: We copy exactly `row_size` bytes per row. The source pointer
1134                        // is valid (from FFmpeg frame data), destination has sufficient capacity
1135                        // (allocated with height * row_size), and ranges don't overlap.
1136                        std::ptr::copy_nonoverlapping(
1137                            src_ptr,
1138                            plane_slice[dst_offset..].as_mut_ptr(),
1139                            row_size,
1140                        );
1141                    }
1142
1143                    planes.push(plane_data);
1144                    strides.push(row_size);
1145                }
1146                PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1147                    // Planar YUV formats
1148                    let (chroma_width, chroma_height) = match format {
1149                        PixelFormat::Yuv420p => (width / 2, height / 2),
1150                        PixelFormat::Yuv422p => (width / 2, height),
1151                        PixelFormat::Yuv444p => (width, height),
1152                        _ => unreachable!(),
1153                    };
1154
1155                    // Y plane
1156                    let y_stride = width as usize;
1157                    let y_size = y_stride * height as usize;
1158                    let mut y_data = self.allocate_buffer(y_size);
1159                    for y in 0..height as usize {
1160                        let src_offset = y * (*frame).linesize[0] as usize;
1161                        let dst_offset = y * y_stride;
1162                        let src_ptr = (*frame).data[0].add(src_offset);
1163                        let y_slice = y_data.as_mut();
1164                        // SAFETY: Copying Y plane row-by-row. Source is valid FFmpeg data,
1165                        // destination has sufficient capacity, no overlap.
1166                        std::ptr::copy_nonoverlapping(
1167                            src_ptr,
1168                            y_slice[dst_offset..].as_mut_ptr(),
1169                            width as usize,
1170                        );
1171                    }
1172                    planes.push(y_data);
1173                    strides.push(y_stride);
1174
1175                    // U plane
1176                    let u_stride = chroma_width as usize;
1177                    let u_size = u_stride * chroma_height as usize;
1178                    let mut u_data = self.allocate_buffer(u_size);
1179                    for y in 0..chroma_height as usize {
1180                        let src_offset = y * (*frame).linesize[1] as usize;
1181                        let dst_offset = y * u_stride;
1182                        let src_ptr = (*frame).data[1].add(src_offset);
1183                        let u_slice = u_data.as_mut();
1184                        // SAFETY: Copying U (chroma) plane row-by-row. Valid source,
1185                        // sufficient destination capacity, no overlap.
1186                        std::ptr::copy_nonoverlapping(
1187                            src_ptr,
1188                            u_slice[dst_offset..].as_mut_ptr(),
1189                            chroma_width as usize,
1190                        );
1191                    }
1192                    planes.push(u_data);
1193                    strides.push(u_stride);
1194
1195                    // V plane
1196                    let v_stride = chroma_width as usize;
1197                    let v_size = v_stride * chroma_height as usize;
1198                    let mut v_data = self.allocate_buffer(v_size);
1199                    for y in 0..chroma_height as usize {
1200                        let src_offset = y * (*frame).linesize[2] as usize;
1201                        let dst_offset = y * v_stride;
1202                        let src_ptr = (*frame).data[2].add(src_offset);
1203                        let v_slice = v_data.as_mut();
1204                        // SAFETY: Copying V (chroma) plane row-by-row. Valid source,
1205                        // sufficient destination capacity, no overlap.
1206                        std::ptr::copy_nonoverlapping(
1207                            src_ptr,
1208                            v_slice[dst_offset..].as_mut_ptr(),
1209                            chroma_width as usize,
1210                        );
1211                    }
1212                    planes.push(v_data);
1213                    strides.push(v_stride);
1214                }
1215                PixelFormat::Gray8 => {
1216                    // Single plane grayscale
1217                    let stride = width as usize;
1218                    let mut plane_data = self.allocate_buffer(stride * height as usize);
1219
1220                    for y in 0..height as usize {
1221                        let src_offset = y * (*frame).linesize[0] as usize;
1222                        let dst_offset = y * stride;
1223                        let src_ptr = (*frame).data[0].add(src_offset);
1224                        let plane_slice = plane_data.as_mut();
1225                        // SAFETY: Copying grayscale plane row-by-row. Valid source,
1226                        // sufficient destination capacity, no overlap.
1227                        std::ptr::copy_nonoverlapping(
1228                            src_ptr,
1229                            plane_slice[dst_offset..].as_mut_ptr(),
1230                            width as usize,
1231                        );
1232                    }
1233
1234                    planes.push(plane_data);
1235                    strides.push(stride);
1236                }
1237                PixelFormat::Nv12 | PixelFormat::Nv21 => {
1238                    // Semi-planar formats
1239                    let uv_height = height / 2;
1240
1241                    // Y plane
1242                    let y_stride = width as usize;
1243                    let mut y_data = self.allocate_buffer(y_stride * height as usize);
1244                    for y in 0..height as usize {
1245                        let src_offset = y * (*frame).linesize[0] as usize;
1246                        let dst_offset = y * y_stride;
1247                        let src_ptr = (*frame).data[0].add(src_offset);
1248                        let y_slice = y_data.as_mut();
1249                        // SAFETY: Copying Y plane (semi-planar) row-by-row. Valid source,
1250                        // sufficient destination capacity, no overlap.
1251                        std::ptr::copy_nonoverlapping(
1252                            src_ptr,
1253                            y_slice[dst_offset..].as_mut_ptr(),
1254                            width as usize,
1255                        );
1256                    }
1257                    planes.push(y_data);
1258                    strides.push(y_stride);
1259
1260                    // UV plane
1261                    let uv_stride = width as usize;
1262                    let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1263                    for y in 0..uv_height as usize {
1264                        let src_offset = y * (*frame).linesize[1] as usize;
1265                        let dst_offset = y * uv_stride;
1266                        let src_ptr = (*frame).data[1].add(src_offset);
1267                        let uv_slice = uv_data.as_mut();
1268                        // SAFETY: Copying interleaved UV plane (semi-planar) row-by-row.
1269                        // Valid source, sufficient destination capacity, no overlap.
1270                        std::ptr::copy_nonoverlapping(
1271                            src_ptr,
1272                            uv_slice[dst_offset..].as_mut_ptr(),
1273                            width as usize,
1274                        );
1275                    }
1276                    planes.push(uv_data);
1277                    strides.push(uv_stride);
1278                }
1279                _ => {
1280                    return Err(DecodeError::Ffmpeg {
1281                        code: 0,
1282                        message: format!("Unsupported pixel format: {format:?}"),
1283                    });
1284                }
1285            }
1286
1287            Ok((planes, strides))
1288        }
1289    }
1290
1291    /// Converts our `PixelFormat` to FFmpeg `AVPixelFormat`.
1292    fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1293        match format {
1294            PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1295            PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1296            PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1297            PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1298            PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1299            PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1300            PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1301            PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1302            PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1303            PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1304            _ => {
1305                log::warn!(
1306                    "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1307                );
1308                ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1309            }
1310        }
1311    }
1312
1313    /// Returns the current playback position.
1314    pub(crate) fn position(&self) -> Duration {
1315        self.position
1316    }
1317
1318    /// Returns whether end of file has been reached.
1319    pub(crate) fn is_eof(&self) -> bool {
1320        self.eof
1321    }
1322
1323    /// Converts a `Duration` to a presentation timestamp (PTS) in stream time_base units.
1324    ///
1325    /// # Arguments
1326    ///
1327    /// * `duration` - The duration to convert.
1328    ///
1329    /// # Returns
1330    ///
1331    /// The timestamp in stream time_base units.
1332    ///
1333    /// # Note
1334    ///
1335    /// av_seek_frame expects timestamps in stream time_base units when using a specific stream_index.
1336    fn duration_to_pts(&self, duration: Duration) -> i64 {
1337        // Convert duration to stream time_base units for seeking
1338        // SAFETY:
1339        // - format_ctx is valid: owned by VideoDecoderInner, initialized in constructor via avformat_open_input
1340        // - stream_index is valid: validated during decoder creation (find_stream_info + codec opening)
1341        // - streams array access is valid: guaranteed by FFmpeg after successful avformat_open_input
1342        let time_base = unsafe {
1343            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1344            (*(*stream)).time_base
1345        };
1346
1347        // Convert: duration (seconds) * (time_base.den / time_base.num) = PTS
1348        let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1349        (duration.as_secs_f64() * time_base_f64) as i64
1350    }
1351
1352    /// Converts a presentation timestamp (PTS) to a `Duration`.
1353    ///
1354    /// # Arguments
1355    ///
1356    /// * `pts` - The presentation timestamp in stream time base units.
1357    ///
1358    /// # Returns
1359    ///
1360    /// The duration corresponding to the PTS.
1361    ///
1362    /// # Safety
1363    ///
1364    /// Caller must ensure that `format_ctx` and `stream_index` are valid.
1365    ///
1366    /// # Note
1367    ///
1368    /// Currently unused but kept for potential future use in more advanced seeking scenarios.
1369    #[allow(dead_code)]
1370    fn pts_to_duration(&self, pts: i64) -> Duration {
1371        // SAFETY: Caller ensures format_ctx and stream_index are valid
1372        unsafe {
1373            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1374            let time_base = (*(*stream)).time_base;
1375
1376            // Convert PTS to duration
1377            let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1378            Duration::from_secs_f64(duration_secs)
1379        }
1380    }
1381
1382    /// Seeks to a specified position in the video stream.
1383    ///
1384    /// This method performs efficient seeking without reopening the file.
1385    /// It uses `av_seek_frame` internally and flushes the decoder buffers.
1386    ///
1387    /// # Performance Characteristics
1388    ///
1389    /// - **Keyframe seek**: 5-10ms for typical GOP sizes (1-2 seconds)
1390    /// - **Exact seek**: Proportional to distance from nearest keyframe
1391    /// - **Large GOP videos**: May require sequential decoding from distant keyframe
1392    ///
1393    /// For videos with sparse keyframes (GOP > 2 seconds), the method will
1394    /// decode frames sequentially from the nearest keyframe to reach the target.
1395    /// This ensures correct frame data but may take longer (10-50ms for very large GOPs).
1396    ///
1397    /// # Arguments
1398    ///
1399    /// * `position` - Target position to seek to.
1400    /// * `mode` - Seek mode (Keyframe, Exact, or Backward).
1401    ///
1402    /// # Errors
1403    ///
1404    /// Returns [`DecodeError::SeekFailed`] if the seek operation fails.
1405    pub(crate) fn seek(
1406        &mut self,
1407        position: Duration,
1408        mode: crate::SeekMode,
1409    ) -> Result<(), DecodeError> {
1410        use crate::SeekMode;
1411
1412        let timestamp = self.duration_to_pts(position);
1413
1414        // All seek modes use BACKWARD flag to find the nearest keyframe at or before target.
1415        // The difference between modes is in the post-seek processing below.
1416        let flags = ff_sys::avformat::seek_flags::BACKWARD;
1417
1418        // 1. Clear any pending packet and frame to avoid reading stale data after seek
1419        // SAFETY:
1420        // - packet is valid: allocated in constructor, owned by VideoDecoderInner
1421        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1422        unsafe {
1423            ff_sys::av_packet_unref(self.packet);
1424            ff_sys::av_frame_unref(self.frame);
1425        }
1426
1427        // 2. Seek in the format context (file is NOT reopened)
1428        // Use av_seek_frame with the stream index and timestamp in stream time_base units
1429        // SAFETY:
1430        // - format_ctx is valid: owned by VideoDecoderInner, initialized via avformat_open_input
1431        // - stream_index is valid: validated during decoder creation
1432        // - timestamp is valid: converted from Duration using stream's time_base
1433        unsafe {
1434            ff_sys::avformat::seek_frame(
1435                self.format_ctx,
1436                self.stream_index as i32,
1437                timestamp,
1438                flags,
1439            )
1440            .map_err(|e| DecodeError::SeekFailed {
1441                target: position,
1442                reason: ff_sys::av_error_string(e),
1443            })?;
1444        }
1445
1446        // 3. Flush decoder buffers to clear any cached frames
1447        // SAFETY: codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1448        unsafe {
1449            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1450        }
1451
1452        // 4. Drain any remaining frames from the decoder after flush
1453        // This ensures no stale frames are returned after the seek
1454        // SAFETY:
1455        // - codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1456        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1457        unsafe {
1458            loop {
1459                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1460                if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1461                    // No more frames in the decoder buffer
1462                    break;
1463                } else if ret == 0 {
1464                    // Got a frame, unref it and continue draining
1465                    ff_sys::av_frame_unref(self.frame);
1466                } else {
1467                    // Other error, break out
1468                    break;
1469                }
1470            }
1471        }
1472
1473        // 5. Reset internal state
1474        self.eof = false;
1475        // Note: We don't update self.position here because it will be updated
1476        // when the next frame is decoded. This ensures position reflects actual decoded position.
1477
1478        // 6. Skip forward to the target position
1479        //
1480        // Context: av_seek_frame with BACKWARD flag seeks to the nearest keyframe *at or before*
1481        // the target timestamp. For videos with sparse keyframes (large GOP size), this may
1482        // land far from the target (e.g., at the first keyframe for GOP=entire video).
1483        //
1484        // Solution: Decode frames sequentially from the keyframe until reaching the target.
1485        // This is necessary because H.264/H.265 P-frames and B-frames depend on previous
1486        // frames for reconstruction, so we must decode all intermediate frames.
1487        //
1488        // Performance Impact:
1489        // - Typical GOP (1-2s): 30-60 frames to skip, ~5-10ms overhead
1490        // - Large GOP (5-10s): 150-300 frames to skip, ~20-50ms overhead
1491        // - Worst case (single keyframe): May decode entire video, ~100ms-1s
1492        if mode == SeekMode::Exact {
1493            // For exact mode, decode until we reach or pass the exact target
1494            self.skip_to_exact(position)?;
1495        } else {
1496            // For keyframe/backward modes, decode until we're reasonably close to the target
1497            // Rationale: Balances accuracy with performance for common use cases
1498            let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1499            let min_position = position.saturating_sub(tolerance);
1500
1501            while let Some(frame) = self.decode_one()? {
1502                let frame_time = frame.timestamp().as_duration();
1503                if frame_time >= min_position {
1504                    // We're close enough to the target
1505                    break;
1506                }
1507                // Continue decoding to get closer (frames are automatically dropped)
1508            }
1509        }
1510
1511        Ok(())
1512    }
1513
1514    /// Skips frames until reaching the exact target position.
1515    ///
1516    /// This is used by [`Self::seek`] when `SeekMode::Exact` is specified.
1517    /// It decodes and discards frames from the nearest keyframe until
1518    /// reaching the target position.
1519    ///
1520    /// # Performance
1521    ///
1522    /// Time complexity is O(n) where n is the number of frames between the
1523    /// keyframe and target. For a 30fps video with 2-second GOP:
1524    /// - Worst case: ~60 frames to decode, ~10-20ms
1525    /// - Average case: ~30 frames to decode, ~5-10ms
1526    ///
1527    /// # Arguments
1528    ///
1529    /// * `target` - The exact target position.
1530    ///
1531    /// # Errors
1532    ///
1533    /// Returns [`DecodeError::SeekFailed`] if EOF is reached before the target position.
1534    fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1535        loop {
1536            match self.decode_one()? {
1537                Some(frame) => {
1538                    let frame_time = frame.timestamp().as_duration();
1539                    if frame_time >= target {
1540                        // Reached or passed the target frame
1541                        // Position will be updated by decode_one() which was just called
1542                        break;
1543                    }
1544                    // Continue decoding (frame is automatically dropped)
1545                }
1546                None => {
1547                    // Reached EOF before finding target frame
1548                    return Err(DecodeError::SeekFailed {
1549                        target,
1550                        reason: "Reached end of stream before target position".to_string(),
1551                    });
1552                }
1553            }
1554        }
1555        Ok(())
1556    }
1557
1558    /// Flushes the decoder's internal buffers.
1559    ///
1560    /// This clears any cached frames and resets the decoder state.
1561    /// The decoder is ready to receive new packets after flushing.
1562    pub(crate) fn flush(&mut self) {
1563        // SAFETY: codec_ctx is valid and owned by this instance
1564        unsafe {
1565            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1566        }
1567        self.eof = false;
1568    }
1569
1570    /// Scales a video frame to the specified dimensions while preserving aspect ratio.
1571    ///
1572    /// This method uses SwScale to resize frames efficiently using a "fit-within"
1573    /// strategy that preserves the original aspect ratio.
1574    ///
1575    /// # Aspect Ratio Preservation
1576    ///
1577    /// The frame is scaled to fit within `(target_width, target_height)` while
1578    /// maintaining its original aspect ratio. The output dimensions will be at most
1579    /// the target size, with at least one dimension matching the target. No letterboxing
1580    /// or pillarboxing is applied - the frame is simply scaled down to fit.
1581    ///
1582    /// # Arguments
1583    ///
1584    /// * `frame` - The source frame to scale.
1585    /// * `target_width` - Desired width in pixels.
1586    /// * `target_height` - Desired height in pixels.
1587    ///
1588    /// # Returns
1589    ///
1590    /// A new `VideoFrame` scaled to fit within the target dimensions.
1591    ///
1592    /// # Errors
1593    ///
1594    /// Returns [`DecodeError`] if SwScale context creation or scaling fails.
1595    ///
1596    /// # Performance
1597    ///
1598    /// - Caches SwScale context for repeated calls with same dimensions
1599    /// - Context creation: ~0.1-0.5ms (only on first call or dimension change)
1600    /// - Typical scaling time: 1-3ms for 1080p → 320x180
1601    /// - Uses bilinear interpolation for quality/performance balance
1602    ///
1603    /// # Cache Behavior
1604    ///
1605    /// The SwScale context is cached based on source/target dimensions and format.
1606    /// When generating multiple thumbnails with the same size (e.g., via `thumbnails()`),
1607    /// the context is reused, eliminating the ~0.1-0.5ms creation overhead per thumbnail.
1608    pub(crate) fn scale_frame(
1609        &mut self,
1610        frame: &VideoFrame,
1611        target_width: u32,
1612        target_height: u32,
1613    ) -> Result<VideoFrame, DecodeError> {
1614        let src_width = frame.width();
1615        let src_height = frame.height();
1616        let src_format = frame.format();
1617
1618        // Calculate scaled dimensions to preserve aspect ratio (fit within target)
1619        let src_aspect = src_width as f64 / src_height as f64;
1620        let target_aspect = target_width as f64 / target_height as f64;
1621
1622        let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1623            // Source is wider - fit to width
1624            let height = (target_width as f64 / src_aspect).round() as u32;
1625            (target_width, height)
1626        } else {
1627            // Source is taller or equal - fit to height
1628            let width = (target_height as f64 * src_aspect).round() as u32;
1629            (width, target_height)
1630        };
1631
1632        // Convert pixel format to FFmpeg format
1633        let av_format = Self::pixel_format_to_av(src_format);
1634
1635        // Cache key: (src_width, src_height, scaled_width, scaled_height, format)
1636        let cache_key = (
1637            src_width,
1638            src_height,
1639            scaled_width,
1640            scaled_height,
1641            av_format,
1642        );
1643
1644        // SAFETY: We're creating temporary FFmpeg objects for scaling
1645        unsafe {
1646            // Check if we can reuse the cached SwScale context
1647            let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1648                (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1649            {
1650                if cached_key == cache_key {
1651                    // Cache hit - reuse existing context
1652                    (cached_ctx, true)
1653                } else {
1654                    // Cache miss - free old context and create new one
1655                    ff_sys::swscale::free_context(cached_ctx);
1656                    // Clear cache immediately to prevent dangling pointer
1657                    self.thumbnail_sws_ctx = None;
1658                    self.thumbnail_cache_key = None;
1659
1660                    let new_ctx = ff_sys::swscale::get_context(
1661                        src_width as i32,
1662                        src_height as i32,
1663                        av_format,
1664                        scaled_width as i32,
1665                        scaled_height as i32,
1666                        av_format,
1667                        ff_sys::swscale::scale_flags::BILINEAR,
1668                    )
1669                    .map_err(|e| DecodeError::Ffmpeg {
1670                        code: 0,
1671                        message: format!("Failed to create scaling context: {e}"),
1672                    })?;
1673
1674                    // Don't cache yet - will cache after successful scaling
1675                    (new_ctx, false)
1676                }
1677            } else {
1678                // No cache - create new context
1679                let new_ctx = ff_sys::swscale::get_context(
1680                    src_width as i32,
1681                    src_height as i32,
1682                    av_format,
1683                    scaled_width as i32,
1684                    scaled_height as i32,
1685                    av_format,
1686                    ff_sys::swscale::scale_flags::BILINEAR,
1687                )
1688                .map_err(|e| DecodeError::Ffmpeg {
1689                    code: 0,
1690                    message: format!("Failed to create scaling context: {e}"),
1691                })?;
1692
1693                // Don't cache yet - will cache after successful scaling
1694                (new_ctx, false)
1695            };
1696
1697            // Set up source frame with VideoFrame data
1698            let src_frame_guard = AvFrameGuard::new()?;
1699            let src_frame = src_frame_guard.as_ptr();
1700
1701            (*src_frame).width = src_width as i32;
1702            (*src_frame).height = src_height as i32;
1703            (*src_frame).format = av_format;
1704
1705            // Set up source frame data pointers directly from VideoFrame (no copy)
1706            let planes = frame.planes();
1707            let strides = frame.strides();
1708
1709            for (i, plane_data) in planes.iter().enumerate() {
1710                if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1711                    break;
1712                }
1713                (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1714                (*src_frame).linesize[i] = strides[i] as i32;
1715            }
1716
1717            // Allocate destination frame
1718            let dst_frame_guard = AvFrameGuard::new()?;
1719            let dst_frame = dst_frame_guard.as_ptr();
1720
1721            (*dst_frame).width = scaled_width as i32;
1722            (*dst_frame).height = scaled_height as i32;
1723            (*dst_frame).format = av_format;
1724
1725            // Allocate buffer for destination frame
1726            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1727            if buffer_ret < 0 {
1728                // Clean up context if not cached
1729                if !is_cached {
1730                    ff_sys::swscale::free_context(sws_ctx);
1731                }
1732                return Err(DecodeError::Ffmpeg {
1733                    code: buffer_ret,
1734                    message: format!(
1735                        "Failed to allocate destination frame buffer: {}",
1736                        ff_sys::av_error_string(buffer_ret)
1737                    ),
1738                });
1739            }
1740
1741            // Perform scaling
1742            let scale_result = ff_sys::swscale::scale(
1743                sws_ctx,
1744                (*src_frame).data.as_ptr() as *const *const u8,
1745                (*src_frame).linesize.as_ptr(),
1746                0,
1747                src_height as i32,
1748                (*dst_frame).data.as_ptr() as *const *mut u8,
1749                (*dst_frame).linesize.as_ptr(),
1750            );
1751
1752            if let Err(e) = scale_result {
1753                // Clean up context if not cached
1754                if !is_cached {
1755                    ff_sys::swscale::free_context(sws_ctx);
1756                }
1757                return Err(DecodeError::Ffmpeg {
1758                    code: 0,
1759                    message: format!("Failed to scale frame: {e}"),
1760                });
1761            }
1762
1763            // Scaling successful - cache the context if it's new
1764            if !is_cached {
1765                self.thumbnail_sws_ctx = Some(sws_ctx);
1766                self.thumbnail_cache_key = Some(cache_key);
1767            }
1768
1769            // Copy timestamp
1770            (*dst_frame).pts = frame.timestamp().pts();
1771
1772            // Convert destination frame to VideoFrame
1773            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1774
1775            Ok(video_frame)
1776        }
1777    }
1778}
1779
1780impl Drop for VideoDecoderInner {
1781    fn drop(&mut self) {
1782        // Free SwScale context if allocated
1783        if let Some(sws_ctx) = self.sws_ctx {
1784            // SAFETY: sws_ctx is valid and owned by this instance
1785            unsafe {
1786                ff_sys::swscale::free_context(sws_ctx);
1787            }
1788        }
1789
1790        // Free cached thumbnail SwScale context if allocated
1791        if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1792            // SAFETY: thumbnail_ctx is valid and owned by this instance
1793            unsafe {
1794                ff_sys::swscale::free_context(thumbnail_ctx);
1795            }
1796        }
1797
1798        // Free hardware device context if allocated
1799        if let Some(hw_ctx) = self.hw_device_ctx {
1800            // SAFETY: hw_ctx is valid and owned by this instance
1801            unsafe {
1802                ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
1803            }
1804        }
1805
1806        // Free frame and packet
1807        if !self.frame.is_null() {
1808            // SAFETY: self.frame is valid and owned by this instance
1809            unsafe {
1810                ff_sys::av_frame_free(&mut (self.frame as *mut _));
1811            }
1812        }
1813
1814        if !self.packet.is_null() {
1815            // SAFETY: self.packet is valid and owned by this instance
1816            unsafe {
1817                ff_sys::av_packet_free(&mut (self.packet as *mut _));
1818            }
1819        }
1820
1821        // Free codec context
1822        if !self.codec_ctx.is_null() {
1823            // SAFETY: self.codec_ctx is valid and owned by this instance
1824            unsafe {
1825                ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
1826            }
1827        }
1828
1829        // Close format context
1830        if !self.format_ctx.is_null() {
1831            // SAFETY: self.format_ctx is valid and owned by this instance
1832            unsafe {
1833                ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
1834            }
1835        }
1836    }
1837}
1838
1839// SAFETY: VideoDecoderInner manages FFmpeg contexts which are thread-safe when not shared.
1840// We don't expose mutable access across threads, so Send is safe.
1841unsafe impl Send for VideoDecoderInner {}
1842
1843#[cfg(test)]
1844mod tests {
1845    use ff_format::PixelFormat;
1846    use ff_format::codec::VideoCodec;
1847    use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
1848
1849    use crate::HardwareAccel;
1850
1851    use super::VideoDecoderInner;
1852
1853    // -------------------------------------------------------------------------
1854    // convert_pixel_format
1855    // -------------------------------------------------------------------------
1856
1857    #[test]
1858    fn pixel_format_yuv420p() {
1859        assert_eq!(
1860            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
1861            PixelFormat::Yuv420p
1862        );
1863    }
1864
1865    #[test]
1866    fn pixel_format_yuv422p() {
1867        assert_eq!(
1868            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
1869            PixelFormat::Yuv422p
1870        );
1871    }
1872
1873    #[test]
1874    fn pixel_format_yuv444p() {
1875        assert_eq!(
1876            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
1877            PixelFormat::Yuv444p
1878        );
1879    }
1880
1881    #[test]
1882    fn pixel_format_rgb24() {
1883        assert_eq!(
1884            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
1885            PixelFormat::Rgb24
1886        );
1887    }
1888
1889    #[test]
1890    fn pixel_format_bgr24() {
1891        assert_eq!(
1892            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
1893            PixelFormat::Bgr24
1894        );
1895    }
1896
1897    #[test]
1898    fn pixel_format_rgba() {
1899        assert_eq!(
1900            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
1901            PixelFormat::Rgba
1902        );
1903    }
1904
1905    #[test]
1906    fn pixel_format_bgra() {
1907        assert_eq!(
1908            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
1909            PixelFormat::Bgra
1910        );
1911    }
1912
1913    #[test]
1914    fn pixel_format_gray8() {
1915        assert_eq!(
1916            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
1917            PixelFormat::Gray8
1918        );
1919    }
1920
1921    #[test]
1922    fn pixel_format_nv12() {
1923        assert_eq!(
1924            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
1925            PixelFormat::Nv12
1926        );
1927    }
1928
1929    #[test]
1930    fn pixel_format_nv21() {
1931        assert_eq!(
1932            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
1933            PixelFormat::Nv21
1934        );
1935    }
1936
1937    #[test]
1938    fn pixel_format_unknown_falls_back_to_yuv420p() {
1939        assert_eq!(
1940            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
1941            PixelFormat::Yuv420p
1942        );
1943    }
1944
1945    // -------------------------------------------------------------------------
1946    // convert_color_space
1947    // -------------------------------------------------------------------------
1948
1949    #[test]
1950    fn color_space_bt709() {
1951        assert_eq!(
1952            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
1953            ColorSpace::Bt709
1954        );
1955    }
1956
1957    #[test]
1958    fn color_space_bt470bg_yields_bt601() {
1959        assert_eq!(
1960            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
1961            ColorSpace::Bt601
1962        );
1963    }
1964
1965    #[test]
1966    fn color_space_smpte170m_yields_bt601() {
1967        assert_eq!(
1968            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
1969            ColorSpace::Bt601
1970        );
1971    }
1972
1973    #[test]
1974    fn color_space_bt2020_ncl() {
1975        assert_eq!(
1976            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
1977            ColorSpace::Bt2020
1978        );
1979    }
1980
1981    #[test]
1982    fn color_space_unknown_falls_back_to_bt709() {
1983        assert_eq!(
1984            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
1985            ColorSpace::Bt709
1986        );
1987    }
1988
1989    // -------------------------------------------------------------------------
1990    // convert_color_range
1991    // -------------------------------------------------------------------------
1992
1993    #[test]
1994    fn color_range_jpeg_yields_full() {
1995        assert_eq!(
1996            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
1997            ColorRange::Full
1998        );
1999    }
2000
2001    #[test]
2002    fn color_range_mpeg_yields_limited() {
2003        assert_eq!(
2004            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2005            ColorRange::Limited
2006        );
2007    }
2008
2009    #[test]
2010    fn color_range_unknown_falls_back_to_limited() {
2011        assert_eq!(
2012            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2013            ColorRange::Limited
2014        );
2015    }
2016
2017    // -------------------------------------------------------------------------
2018    // convert_color_primaries
2019    // -------------------------------------------------------------------------
2020
2021    #[test]
2022    fn color_primaries_bt709() {
2023        assert_eq!(
2024            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2025            ColorPrimaries::Bt709
2026        );
2027    }
2028
2029    #[test]
2030    fn color_primaries_bt470bg_yields_bt601() {
2031        assert_eq!(
2032            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2033            ColorPrimaries::Bt601
2034        );
2035    }
2036
2037    #[test]
2038    fn color_primaries_smpte170m_yields_bt601() {
2039        assert_eq!(
2040            VideoDecoderInner::convert_color_primaries(
2041                ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2042            ),
2043            ColorPrimaries::Bt601
2044        );
2045    }
2046
2047    #[test]
2048    fn color_primaries_bt2020() {
2049        assert_eq!(
2050            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2051            ColorPrimaries::Bt2020
2052        );
2053    }
2054
2055    #[test]
2056    fn color_primaries_unknown_falls_back_to_bt709() {
2057        assert_eq!(
2058            VideoDecoderInner::convert_color_primaries(
2059                ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2060            ),
2061            ColorPrimaries::Bt709
2062        );
2063    }
2064
2065    // -------------------------------------------------------------------------
2066    // convert_codec
2067    // -------------------------------------------------------------------------
2068
2069    #[test]
2070    fn codec_h264() {
2071        assert_eq!(
2072            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2073            VideoCodec::H264
2074        );
2075    }
2076
2077    #[test]
2078    fn codec_hevc_yields_h265() {
2079        assert_eq!(
2080            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2081            VideoCodec::H265
2082        );
2083    }
2084
2085    #[test]
2086    fn codec_vp8() {
2087        assert_eq!(
2088            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2089            VideoCodec::Vp8
2090        );
2091    }
2092
2093    #[test]
2094    fn codec_vp9() {
2095        assert_eq!(
2096            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2097            VideoCodec::Vp9
2098        );
2099    }
2100
2101    #[test]
2102    fn codec_av1() {
2103        assert_eq!(
2104            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2105            VideoCodec::Av1
2106        );
2107    }
2108
2109    #[test]
2110    fn codec_mpeg4() {
2111        assert_eq!(
2112            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2113            VideoCodec::Mpeg4
2114        );
2115    }
2116
2117    #[test]
2118    fn codec_prores() {
2119        assert_eq!(
2120            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2121            VideoCodec::ProRes
2122        );
2123    }
2124
2125    #[test]
2126    fn codec_unknown_falls_back_to_h264() {
2127        assert_eq!(
2128            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2129            VideoCodec::H264
2130        );
2131    }
2132
2133    // -------------------------------------------------------------------------
2134    // hw_accel_to_device_type
2135    // -------------------------------------------------------------------------
2136
2137    #[test]
2138    fn hw_accel_auto_yields_none() {
2139        assert_eq!(
2140            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2141            None
2142        );
2143    }
2144
2145    #[test]
2146    fn hw_accel_none_yields_none() {
2147        assert_eq!(
2148            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2149            None
2150        );
2151    }
2152
2153    #[test]
2154    fn hw_accel_nvdec_yields_cuda() {
2155        assert_eq!(
2156            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2157            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2158        );
2159    }
2160
2161    #[test]
2162    fn hw_accel_qsv_yields_qsv() {
2163        assert_eq!(
2164            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2165            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2166        );
2167    }
2168
2169    #[test]
2170    fn hw_accel_amf_yields_d3d11va() {
2171        assert_eq!(
2172            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2173            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2174        );
2175    }
2176
2177    #[test]
2178    fn hw_accel_videotoolbox() {
2179        assert_eq!(
2180            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2181            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2182        );
2183    }
2184
2185    #[test]
2186    fn hw_accel_vaapi() {
2187        assert_eq!(
2188            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2189            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2190        );
2191    }
2192
2193    // -------------------------------------------------------------------------
2194    // pixel_format_to_av — round-trip
2195    // -------------------------------------------------------------------------
2196
2197    #[test]
2198    fn pixel_format_to_av_round_trip_yuv420p() {
2199        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2200        assert_eq!(
2201            VideoDecoderInner::convert_pixel_format(av),
2202            PixelFormat::Yuv420p
2203        );
2204    }
2205
2206    #[test]
2207    fn pixel_format_to_av_round_trip_yuv422p() {
2208        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2209        assert_eq!(
2210            VideoDecoderInner::convert_pixel_format(av),
2211            PixelFormat::Yuv422p
2212        );
2213    }
2214
2215    #[test]
2216    fn pixel_format_to_av_round_trip_yuv444p() {
2217        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2218        assert_eq!(
2219            VideoDecoderInner::convert_pixel_format(av),
2220            PixelFormat::Yuv444p
2221        );
2222    }
2223
2224    #[test]
2225    fn pixel_format_to_av_round_trip_rgb24() {
2226        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2227        assert_eq!(
2228            VideoDecoderInner::convert_pixel_format(av),
2229            PixelFormat::Rgb24
2230        );
2231    }
2232
2233    #[test]
2234    fn pixel_format_to_av_round_trip_bgr24() {
2235        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2236        assert_eq!(
2237            VideoDecoderInner::convert_pixel_format(av),
2238            PixelFormat::Bgr24
2239        );
2240    }
2241
2242    #[test]
2243    fn pixel_format_to_av_round_trip_rgba() {
2244        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2245        assert_eq!(
2246            VideoDecoderInner::convert_pixel_format(av),
2247            PixelFormat::Rgba
2248        );
2249    }
2250
2251    #[test]
2252    fn pixel_format_to_av_round_trip_bgra() {
2253        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2254        assert_eq!(
2255            VideoDecoderInner::convert_pixel_format(av),
2256            PixelFormat::Bgra
2257        );
2258    }
2259
2260    #[test]
2261    fn pixel_format_to_av_round_trip_gray8() {
2262        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2263        assert_eq!(
2264            VideoDecoderInner::convert_pixel_format(av),
2265            PixelFormat::Gray8
2266        );
2267    }
2268
2269    #[test]
2270    fn pixel_format_to_av_round_trip_nv12() {
2271        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2272        assert_eq!(
2273            VideoDecoderInner::convert_pixel_format(av),
2274            PixelFormat::Nv12
2275        );
2276    }
2277
2278    #[test]
2279    fn pixel_format_to_av_round_trip_nv21() {
2280        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2281        assert_eq!(
2282            VideoDecoderInner::convert_pixel_format(av),
2283            PixelFormat::Nv21
2284        );
2285    }
2286
2287    #[test]
2288    fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2289        // Yuv420p10le has no explicit mapping in pixel_format_to_av, so it hits the _ arm
2290        assert_eq!(
2291            VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p10le),
2292            ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2293        );
2294    }
2295}