Skip to main content

ff_decode/video/
decoder_inner.rs

1//! Internal video decoder implementation using FFmpeg.
2//!
3//! This module contains the low-level decoder logic that directly interacts
4//! with FFmpeg's C API through the ff-sys crate. It is not exposed publicly.
5
6// Allow unsafe code in this module as it's necessary for FFmpeg FFI
7#![allow(unsafe_code)]
8// Allow specific clippy lints for FFmpeg FFI code
9#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::ffi::CStr;
26use std::path::Path;
27use std::ptr;
28use std::sync::Arc;
29use std::time::Duration;
30
31use ff_format::PooledBuffer;
32use ff_format::codec::VideoCodec;
33use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
34use ff_format::time::{Rational, Timestamp};
35use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
36use ff_sys::{
37    AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
38    AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
39    AVPixelFormat, SwsContext,
40};
41
42use crate::HardwareAccel;
43use crate::error::DecodeError;
44use ff_common::FramePool;
45
46/// Tolerance in seconds for keyframe/backward seek modes.
47///
48/// When seeking in Keyframe or Backward mode, frames are skipped until we're within
49/// this tolerance of the target position. This balances accuracy with performance for
50/// typical GOP sizes (1-2 seconds).
51const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
52
53/// RAII guard for `AVFormatContext` to ensure proper cleanup.
54struct AvFormatContextGuard(*mut AVFormatContext);
55
56impl AvFormatContextGuard {
57    /// Creates a new guard by opening an input file.
58    ///
59    /// # Safety
60    ///
61    /// Caller must ensure FFmpeg is initialized and path is valid.
62    unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
63        // SAFETY: Caller ensures FFmpeg is initialized and path is valid
64        let format_ctx = unsafe {
65            ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
66                code: e,
67                message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
68            })?
69        };
70        Ok(Self(format_ctx))
71    }
72
73    /// Returns the raw pointer.
74    const fn as_ptr(&self) -> *mut AVFormatContext {
75        self.0
76    }
77
78    /// Consumes the guard and returns the raw pointer without dropping.
79    fn into_raw(self) -> *mut AVFormatContext {
80        let ptr = self.0;
81        std::mem::forget(self);
82        ptr
83    }
84}
85
86impl Drop for AvFormatContextGuard {
87    fn drop(&mut self) {
88        if !self.0.is_null() {
89            // SAFETY: self.0 is valid and owned by this guard
90            unsafe {
91                ff_sys::avformat::close_input(&mut (self.0 as *mut _));
92            }
93        }
94    }
95}
96
97/// RAII guard for `AVCodecContext` to ensure proper cleanup.
98struct AvCodecContextGuard(*mut AVCodecContext);
99
100impl AvCodecContextGuard {
101    /// Creates a new guard by allocating a codec context.
102    ///
103    /// # Safety
104    ///
105    /// Caller must ensure codec pointer is valid.
106    unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
107        // SAFETY: Caller ensures codec pointer is valid
108        let codec_ctx = unsafe {
109            ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
110                code: e,
111                message: format!("Failed to allocate codec context: {e}"),
112            })?
113        };
114        Ok(Self(codec_ctx))
115    }
116
117    /// Returns the raw pointer.
118    const fn as_ptr(&self) -> *mut AVCodecContext {
119        self.0
120    }
121
122    /// Consumes the guard and returns the raw pointer without dropping.
123    fn into_raw(self) -> *mut AVCodecContext {
124        let ptr = self.0;
125        std::mem::forget(self);
126        ptr
127    }
128}
129
130impl Drop for AvCodecContextGuard {
131    fn drop(&mut self) {
132        if !self.0.is_null() {
133            // SAFETY: self.0 is valid and owned by this guard
134            unsafe {
135                ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
136            }
137        }
138    }
139}
140
141/// RAII guard for `AVPacket` to ensure proper cleanup.
142struct AvPacketGuard(*mut AVPacket);
143
144impl AvPacketGuard {
145    /// Creates a new guard by allocating a packet.
146    ///
147    /// # Safety
148    ///
149    /// Must be called after FFmpeg initialization.
150    unsafe fn new() -> Result<Self, DecodeError> {
151        // SAFETY: Caller ensures FFmpeg is initialized
152        let packet = unsafe { ff_sys::av_packet_alloc() };
153        if packet.is_null() {
154            return Err(DecodeError::Ffmpeg {
155                code: 0,
156                message: "Failed to allocate packet".to_string(),
157            });
158        }
159        Ok(Self(packet))
160    }
161
162    /// Returns the raw pointer.
163    #[allow(dead_code)]
164    const fn as_ptr(&self) -> *mut AVPacket {
165        self.0
166    }
167
168    /// Consumes the guard and returns the raw pointer without dropping.
169    fn into_raw(self) -> *mut AVPacket {
170        let ptr = self.0;
171        std::mem::forget(self);
172        ptr
173    }
174}
175
176impl Drop for AvPacketGuard {
177    fn drop(&mut self) {
178        if !self.0.is_null() {
179            // SAFETY: self.0 is valid and owned by this guard
180            unsafe {
181                ff_sys::av_packet_free(&mut (self.0 as *mut _));
182            }
183        }
184    }
185}
186
187/// RAII guard for `AVFrame` to ensure proper cleanup.
188struct AvFrameGuard(*mut AVFrame);
189
190impl AvFrameGuard {
191    /// Creates a new guard by allocating a frame.
192    ///
193    /// # Safety
194    ///
195    /// Must be called after FFmpeg initialization.
196    unsafe fn new() -> Result<Self, DecodeError> {
197        // SAFETY: Caller ensures FFmpeg is initialized
198        let frame = unsafe { ff_sys::av_frame_alloc() };
199        if frame.is_null() {
200            return Err(DecodeError::Ffmpeg {
201                code: 0,
202                message: "Failed to allocate frame".to_string(),
203            });
204        }
205        Ok(Self(frame))
206    }
207
208    /// Returns the raw pointer.
209    const fn as_ptr(&self) -> *mut AVFrame {
210        self.0
211    }
212
213    /// Consumes the guard and returns the raw pointer without dropping.
214    fn into_raw(self) -> *mut AVFrame {
215        let ptr = self.0;
216        std::mem::forget(self);
217        ptr
218    }
219}
220
221impl Drop for AvFrameGuard {
222    fn drop(&mut self) {
223        if !self.0.is_null() {
224            // SAFETY: self.0 is valid and owned by this guard
225            unsafe {
226                ff_sys::av_frame_free(&mut (self.0 as *mut _));
227            }
228        }
229    }
230}
231
232/// Internal decoder state holding FFmpeg contexts.
233///
234/// This structure manages the lifecycle of FFmpeg objects and is responsible
235/// for proper cleanup when dropped.
236pub(crate) struct VideoDecoderInner {
237    /// Format context for reading the media file
238    format_ctx: *mut AVFormatContext,
239    /// Codec context for decoding video frames
240    codec_ctx: *mut AVCodecContext,
241    /// Video stream index in the format context
242    stream_index: i32,
243    /// SwScale context for pixel format conversion (optional)
244    sws_ctx: Option<*mut SwsContext>,
245    /// Target output pixel format (if conversion is needed)
246    output_format: Option<PixelFormat>,
247    /// Whether end of file has been reached
248    eof: bool,
249    /// Current playback position
250    position: Duration,
251    /// Reusable packet for reading from file
252    packet: *mut AVPacket,
253    /// Reusable frame for decoding
254    frame: *mut AVFrame,
255    /// Cached SwScale context for thumbnail generation
256    thumbnail_sws_ctx: Option<*mut SwsContext>,
257    /// Last thumbnail dimensions (for cache invalidation)
258    thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
259    /// Hardware device context (if hardware acceleration is active)
260    hw_device_ctx: Option<*mut AVBufferRef>,
261    /// Active hardware acceleration mode
262    active_hw_accel: HardwareAccel,
263    /// Optional frame pool for memory reuse
264    frame_pool: Option<Arc<dyn FramePool>>,
265}
266
267impl VideoDecoderInner {
268    /// Maps our `HardwareAccel` enum to the corresponding FFmpeg `AVHWDeviceType`.
269    ///
270    /// Returns `None` for `Auto` and `None` variants as they require special handling.
271    fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
272        match accel {
273            HardwareAccel::Auto => None,
274            HardwareAccel::None => None,
275            HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
276            HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
277            HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), // AMF uses D3D11
278            HardwareAccel::VideoToolbox => {
279                Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
280            }
281            HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
282        }
283    }
284
285    /// Returns the hardware decoders to try in priority order for Auto mode.
286    const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
287        // Priority order: NVDEC, QSV, VideoToolbox, VA-API, AMF
288        &[
289            HardwareAccel::Nvdec,
290            HardwareAccel::Qsv,
291            HardwareAccel::VideoToolbox,
292            HardwareAccel::Vaapi,
293            HardwareAccel::Amf,
294        ]
295    }
296
297    /// Attempts to initialize hardware acceleration.
298    ///
299    /// # Arguments
300    ///
301    /// * `codec_ctx` - The codec context to configure
302    /// * `accel` - Requested hardware acceleration mode
303    ///
304    /// # Returns
305    ///
306    /// Returns `Ok((hw_device_ctx, active_accel))` if hardware acceleration was initialized,
307    /// or `Ok((None, HardwareAccel::None))` if software decoding should be used.
308    ///
309    /// # Errors
310    ///
311    /// Returns an error only if a specific hardware accelerator was requested but failed to initialize.
312    unsafe fn init_hardware_accel(
313        codec_ctx: *mut AVCodecContext,
314        accel: HardwareAccel,
315    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
316        match accel {
317            HardwareAccel::Auto => {
318                // Try hardware accelerators in priority order
319                for &hw_type in Self::hw_accel_auto_priority() {
320                    // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
321                    if let Ok((Some(ctx), active)) =
322                        unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
323                    {
324                        return Ok((Some(ctx), active));
325                    }
326                    // Ignore errors in Auto mode and try the next one
327                }
328                // All hardware accelerators failed, fall back to software
329                Ok((None, HardwareAccel::None))
330            }
331            HardwareAccel::None => {
332                // Software decoding explicitly requested
333                Ok((None, HardwareAccel::None))
334            }
335            _ => {
336                // Specific hardware accelerator requested
337                // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
338                unsafe { Self::try_init_hw_device(codec_ctx, accel) }
339            }
340        }
341    }
342
343    /// Tries to initialize a specific hardware device.
344    ///
345    /// # Safety
346    ///
347    /// Caller must ensure `codec_ctx` is valid and not yet configured with a hardware device.
348    unsafe fn try_init_hw_device(
349        codec_ctx: *mut AVCodecContext,
350        accel: HardwareAccel,
351    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
352        // Get the FFmpeg device type
353        let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
354            return Ok((None, HardwareAccel::None));
355        };
356
357        // Create hardware device context
358        // SAFETY: FFmpeg is initialized, device_type is valid
359        let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
360        let ret = unsafe {
361            ff_sys::av_hwdevice_ctx_create(
362                ptr::addr_of_mut!(hw_device_ctx),
363                device_type,
364                ptr::null(),     // device: null for default device
365                ptr::null_mut(), // opts: null for default options
366                0,               // flags: currently unused by FFmpeg
367            )
368        };
369
370        if ret < 0 {
371            // Hardware device creation failed
372            return Err(DecodeError::HwAccelUnavailable { accel });
373        }
374
375        // Assign hardware device context to codec context
376        // We transfer ownership of the reference to codec_ctx
377        // SAFETY: codec_ctx and hw_device_ctx are valid
378        unsafe {
379            (*codec_ctx).hw_device_ctx = hw_device_ctx;
380        }
381
382        // We keep our own reference for cleanup in Drop
383        // SAFETY: hw_device_ctx is valid
384        let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
385        if our_ref.is_null() {
386            // Failed to create our reference
387            // codec_ctx still owns the original, so we don't need to clean it up here
388            return Err(DecodeError::HwAccelUnavailable { accel });
389        }
390
391        Ok((Some(our_ref), accel))
392    }
393
394    /// Returns the currently active hardware acceleration mode.
395    pub(crate) fn hardware_accel(&self) -> HardwareAccel {
396        self.active_hw_accel
397    }
398
399    /// Checks if a pixel format is a hardware format.
400    ///
401    /// Hardware formats include: D3D11, CUDA, VAAPI, VideoToolbox, QSV, etc.
402    const fn is_hardware_format(format: AVPixelFormat) -> bool {
403        matches!(
404            format,
405            ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
406                | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
407                | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
408                | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
409                | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
410                | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
411                | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
412                | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
413                | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
414                | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
415        )
416    }
417
418    /// Transfers a hardware frame to CPU memory if needed.
419    ///
420    /// If `self.frame` is a hardware frame, creates a new software frame
421    /// and transfers the data from GPU to CPU memory.
422    ///
423    /// # Safety
424    ///
425    /// Caller must ensure `self.frame` contains a valid decoded frame.
426    unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
427        // SAFETY: self.frame is valid and owned by this instance
428        let frame_format = unsafe { (*self.frame).format };
429
430        if !Self::is_hardware_format(frame_format) {
431            // Not a hardware frame, no transfer needed
432            return Ok(());
433        }
434
435        // Create a temporary software frame for transfer
436        // SAFETY: FFmpeg is initialized
437        let sw_frame = unsafe { ff_sys::av_frame_alloc() };
438        if sw_frame.is_null() {
439            return Err(DecodeError::Ffmpeg {
440                code: 0,
441                message: "Failed to allocate software frame for hardware transfer".to_string(),
442            });
443        }
444
445        // Transfer data from hardware frame to software frame
446        // SAFETY: self.frame and sw_frame are valid
447        let ret = unsafe {
448            ff_sys::av_hwframe_transfer_data(
449                sw_frame, self.frame, 0, // flags: currently unused
450            )
451        };
452
453        if ret < 0 {
454            // Transfer failed, clean up
455            unsafe {
456                ff_sys::av_frame_free(&mut (sw_frame as *mut _));
457            }
458            return Err(DecodeError::Ffmpeg {
459                code: ret,
460                message: format!(
461                    "Failed to transfer hardware frame to CPU memory: {}",
462                    ff_sys::av_error_string(ret)
463                ),
464            });
465        }
466
467        // Copy metadata (pts, duration, etc.) from hardware frame to software frame
468        // SAFETY: Both frames are valid
469        unsafe {
470            (*sw_frame).pts = (*self.frame).pts;
471            (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
472            (*sw_frame).duration = (*self.frame).duration;
473            (*sw_frame).time_base = (*self.frame).time_base;
474        }
475
476        // Replace self.frame with the software frame
477        // SAFETY: self.frame is valid and owned by this instance
478        unsafe {
479            ff_sys::av_frame_unref(self.frame);
480            ff_sys::av_frame_move_ref(self.frame, sw_frame);
481            ff_sys::av_frame_free(&mut (sw_frame as *mut _));
482        }
483
484        Ok(())
485    }
486
487    /// Opens a media file and initializes the decoder.
488    ///
489    /// # Arguments
490    ///
491    /// * `path` - Path to the media file
492    /// * `output_format` - Optional target pixel format for conversion
493    /// * `hardware_accel` - Hardware acceleration mode
494    /// * `thread_count` - Number of decoding threads (0 = auto)
495    ///
496    /// # Errors
497    ///
498    /// Returns an error if:
499    /// - The file cannot be opened
500    /// - No video stream is found
501    /// - The codec is not supported
502    /// - Decoder initialization fails
503    pub(crate) fn new(
504        path: &Path,
505        output_format: Option<PixelFormat>,
506        hardware_accel: HardwareAccel,
507        thread_count: usize,
508        frame_pool: Option<Arc<dyn FramePool>>,
509    ) -> Result<(Self, VideoStreamInfo), DecodeError> {
510        // Ensure FFmpeg is initialized (thread-safe and idempotent)
511        ff_sys::ensure_initialized();
512
513        // Open the input file (with RAII guard)
514        // SAFETY: Path is valid, AvFormatContextGuard ensures cleanup
515        let format_ctx_guard = unsafe { AvFormatContextGuard::new(path)? };
516        let format_ctx = format_ctx_guard.as_ptr();
517
518        // Read stream information
519        // SAFETY: format_ctx is valid and owned by guard
520        unsafe {
521            ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
522                code: e,
523                message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
524            })?;
525        }
526
527        // Find the video stream
528        // SAFETY: format_ctx is valid
529        let (stream_index, codec_id) =
530            unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
531                DecodeError::NoVideoStream {
532                    path: path.to_path_buf(),
533                }
534            })?;
535
536        // Find the decoder for this codec
537        // SAFETY: codec_id is valid from FFmpeg
538        let codec_name = unsafe { Self::extract_codec_name(codec_id) };
539        let codec = unsafe {
540            ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
541                DecodeError::UnsupportedCodec {
542                    codec: format!("{codec_name} (codec_id={codec_id:?})"),
543                }
544            })?
545        };
546
547        // Allocate codec context (with RAII guard)
548        // SAFETY: codec pointer is valid, AvCodecContextGuard ensures cleanup
549        let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
550        let codec_ctx = codec_ctx_guard.as_ptr();
551
552        // Copy codec parameters from stream to context
553        // SAFETY: format_ctx and codec_ctx are valid, stream_index is valid
554        unsafe {
555            let stream = (*format_ctx).streams.add(stream_index as usize);
556            let codecpar = (*(*stream)).codecpar;
557            ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
558                DecodeError::Ffmpeg {
559                    code: e,
560                    message: format!(
561                        "Failed to copy codec parameters: {}",
562                        ff_sys::av_error_string(e)
563                    ),
564                }
565            })?;
566
567            // Set thread count
568            if thread_count > 0 {
569                (*codec_ctx).thread_count = thread_count as i32;
570            }
571        }
572
573        // Initialize hardware acceleration if requested
574        // SAFETY: codec_ctx is valid and not yet opened
575        let (hw_device_ctx, active_hw_accel) =
576            unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
577
578        // Open the codec
579        // SAFETY: codec_ctx and codec are valid, hardware device context is set if requested
580        unsafe {
581            ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
582                // If codec opening failed, we still own our reference to hw_device_ctx
583                // but it will be cleaned up when codec_ctx is freed (which happens
584                // when codec_ctx_guard is dropped)
585                // Our reference in hw_device_ctx will be cleaned up here
586                if let Some(hw_ctx) = hw_device_ctx {
587                    ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
588                }
589                DecodeError::Ffmpeg {
590                    code: e,
591                    message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
592                }
593            })?;
594        }
595
596        // Extract stream information
597        // SAFETY: All pointers are valid
598        let stream_info =
599            unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
600
601        // Allocate packet and frame (with RAII guards)
602        // SAFETY: FFmpeg is initialized, guards ensure cleanup
603        let packet_guard = unsafe { AvPacketGuard::new()? };
604        let frame_guard = unsafe { AvFrameGuard::new()? };
605
606        // All initialization successful - transfer ownership to VideoDecoderInner
607        Ok((
608            Self {
609                format_ctx: format_ctx_guard.into_raw(),
610                codec_ctx: codec_ctx_guard.into_raw(),
611                stream_index: stream_index as i32,
612                sws_ctx: None,
613                output_format,
614                eof: false,
615                position: Duration::ZERO,
616                packet: packet_guard.into_raw(),
617                frame: frame_guard.into_raw(),
618                thumbnail_sws_ctx: None,
619                thumbnail_cache_key: None,
620                hw_device_ctx,
621                active_hw_accel,
622                frame_pool,
623            },
624            stream_info,
625        ))
626    }
627
628    /// Finds the first video stream in the format context.
629    ///
630    /// # Returns
631    ///
632    /// Returns `Some((index, codec_id))` if a video stream is found, `None` otherwise.
633    ///
634    /// # Safety
635    ///
636    /// Caller must ensure `format_ctx` is valid and initialized.
637    unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
638        // SAFETY: Caller ensures format_ctx is valid
639        unsafe {
640            let nb_streams = (*format_ctx).nb_streams as usize;
641
642            for i in 0..nb_streams {
643                let stream = (*format_ctx).streams.add(i);
644                let codecpar = (*(*stream)).codecpar;
645
646                if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
647                    return Some((i, (*codecpar).codec_id));
648                }
649            }
650
651            None
652        }
653    }
654
655    /// Returns the human-readable codec name for a given `AVCodecID`.
656    unsafe fn extract_codec_name(codec_id: ff_sys::AVCodecID) -> String {
657        // SAFETY: avcodec_get_name is safe for any codec ID value
658        let name_ptr = unsafe { ff_sys::avcodec_get_name(codec_id) };
659        if name_ptr.is_null() {
660            return String::from("unknown");
661        }
662        // SAFETY: avcodec_get_name returns a valid C string with static lifetime
663        unsafe { CStr::from_ptr(name_ptr).to_string_lossy().into_owned() }
664    }
665
666    /// Extracts video stream information from FFmpeg structures.
667    unsafe fn extract_stream_info(
668        format_ctx: *mut AVFormatContext,
669        stream_index: i32,
670        codec_ctx: *mut AVCodecContext,
671    ) -> Result<VideoStreamInfo, DecodeError> {
672        // SAFETY: Caller ensures all pointers are valid
673        let (
674            width,
675            height,
676            fps_rational,
677            duration_val,
678            pix_fmt,
679            color_space_val,
680            color_range_val,
681            color_primaries_val,
682            codec_id,
683        ) = unsafe {
684            let stream = (*format_ctx).streams.add(stream_index as usize);
685            let codecpar = (*(*stream)).codecpar;
686
687            (
688                (*codecpar).width as u32,
689                (*codecpar).height as u32,
690                (*(*stream)).avg_frame_rate,
691                (*format_ctx).duration,
692                (*codec_ctx).pix_fmt,
693                (*codecpar).color_space,
694                (*codecpar).color_range,
695                (*codecpar).color_primaries,
696                (*codecpar).codec_id,
697            )
698        };
699
700        // Extract frame rate
701        let frame_rate = if fps_rational.den != 0 {
702            Rational::new(fps_rational.num as i32, fps_rational.den as i32)
703        } else {
704            log::warn!(
705                "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
706                fps_rational.num
707            );
708            Rational::new(30, 1)
709        };
710
711        // Extract duration
712        let duration = if duration_val > 0 {
713            let duration_secs = duration_val as f64 / 1_000_000.0;
714            Some(Duration::from_secs_f64(duration_secs))
715        } else {
716            None
717        };
718
719        // Extract pixel format
720        let pixel_format = Self::convert_pixel_format(pix_fmt);
721
722        // Extract color information
723        let color_space = Self::convert_color_space(color_space_val);
724        let color_range = Self::convert_color_range(color_range_val);
725        let color_primaries = Self::convert_color_primaries(color_primaries_val);
726
727        // Extract codec
728        let codec = Self::convert_codec(codec_id);
729        let codec_name = unsafe { Self::extract_codec_name(codec_id) };
730
731        // Build stream info
732        let mut builder = VideoStreamInfo::builder()
733            .index(stream_index as u32)
734            .codec(codec)
735            .codec_name(codec_name)
736            .width(width)
737            .height(height)
738            .frame_rate(frame_rate)
739            .pixel_format(pixel_format)
740            .color_space(color_space)
741            .color_range(color_range)
742            .color_primaries(color_primaries);
743
744        if let Some(d) = duration {
745            builder = builder.duration(d);
746        }
747
748        Ok(builder.build())
749    }
750
751    /// Converts FFmpeg pixel format to our PixelFormat enum.
752    fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
753        if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
754            PixelFormat::Yuv420p
755        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
756            PixelFormat::Yuv422p
757        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
758            PixelFormat::Yuv444p
759        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
760            PixelFormat::Rgb24
761        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
762            PixelFormat::Bgr24
763        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
764            PixelFormat::Rgba
765        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
766            PixelFormat::Bgra
767        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
768            PixelFormat::Gray8
769        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
770            PixelFormat::Nv12
771        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
772            PixelFormat::Nv21
773        } else {
774            log::warn!(
775                "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
776            );
777            PixelFormat::Yuv420p
778        }
779    }
780
781    /// Converts FFmpeg color space to our ColorSpace enum.
782    fn convert_color_space(space: AVColorSpace) -> ColorSpace {
783        if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
784            ColorSpace::Bt709
785        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
786            || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
787        {
788            ColorSpace::Bt601
789        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
790            ColorSpace::Bt2020
791        } else {
792            log::warn!(
793                "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
794            );
795            ColorSpace::Bt709
796        }
797    }
798
799    /// Converts FFmpeg color range to our ColorRange enum.
800    fn convert_color_range(range: AVColorRange) -> ColorRange {
801        if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
802            ColorRange::Full
803        } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
804            ColorRange::Limited
805        } else {
806            log::warn!(
807                "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
808            );
809            ColorRange::Limited
810        }
811    }
812
813    /// Converts FFmpeg color primaries to our ColorPrimaries enum.
814    fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
815        if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
816            ColorPrimaries::Bt709
817        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
818            || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
819        {
820            ColorPrimaries::Bt601
821        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
822            ColorPrimaries::Bt2020
823        } else {
824            log::warn!(
825                "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
826            );
827            ColorPrimaries::Bt709
828        }
829    }
830
831    /// Converts FFmpeg codec ID to our VideoCodec enum.
832    fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
833        if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
834            VideoCodec::H264
835        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
836            VideoCodec::H265
837        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
838            VideoCodec::Vp8
839        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
840            VideoCodec::Vp9
841        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
842            VideoCodec::Av1
843        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
844            VideoCodec::Mpeg4
845        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
846            VideoCodec::ProRes
847        } else {
848            log::warn!(
849                "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
850            );
851            VideoCodec::H264
852        }
853    }
854
855    /// Decodes the next video frame.
856    ///
857    /// # Returns
858    ///
859    /// - `Ok(Some(frame))` - Successfully decoded a frame
860    /// - `Ok(None)` - End of stream reached
861    /// - `Err(_)` - Decoding error occurred
862    pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
863        if self.eof {
864            return Ok(None);
865        }
866
867        unsafe {
868            loop {
869                // Try to receive a frame from the decoder
870                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
871
872                if ret == 0 {
873                    // Successfully received a frame
874                    // Check if this is a hardware frame and transfer to CPU memory if needed
875                    self.transfer_hardware_frame_if_needed()?;
876
877                    let video_frame = self.convert_frame_to_video_frame()?;
878
879                    // Update position based on frame timestamp
880                    let pts = (*self.frame).pts;
881                    if pts != ff_sys::AV_NOPTS_VALUE {
882                        let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
883                        let time_base = (*(*stream)).time_base;
884                        let timestamp_secs =
885                            pts as f64 * time_base.num as f64 / time_base.den as f64;
886                        self.position = Duration::from_secs_f64(timestamp_secs);
887                    }
888
889                    return Ok(Some(video_frame));
890                } else if ret == ff_sys::error_codes::EAGAIN {
891                    // Need to send more packets to the decoder
892                    // Read a packet from the file
893                    let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
894
895                    if read_ret == ff_sys::error_codes::EOF {
896                        // End of file - flush the decoder
897                        ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
898                        self.eof = true;
899                        continue;
900                    } else if read_ret < 0 {
901                        return Err(DecodeError::Ffmpeg {
902                            code: read_ret,
903                            message: format!(
904                                "Failed to read frame: {}",
905                                ff_sys::av_error_string(read_ret)
906                            ),
907                        });
908                    }
909
910                    // Check if this packet belongs to the video stream
911                    if (*self.packet).stream_index == self.stream_index {
912                        // Send the packet to the decoder
913                        let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
914                        ff_sys::av_packet_unref(self.packet);
915
916                        if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
917                            return Err(DecodeError::Ffmpeg {
918                                code: send_ret,
919                                message: format!(
920                                    "Failed to send packet: {}",
921                                    ff_sys::av_error_string(send_ret)
922                                ),
923                            });
924                        }
925                    } else {
926                        // Not our stream, unref and continue
927                        ff_sys::av_packet_unref(self.packet);
928                    }
929                } else if ret == ff_sys::error_codes::EOF {
930                    // Decoder has been fully flushed
931                    self.eof = true;
932                    return Ok(None);
933                } else {
934                    return Err(DecodeError::DecodingFailed {
935                        timestamp: Some(self.position),
936                        reason: ff_sys::av_error_string(ret),
937                    });
938                }
939            }
940        }
941    }
942
943    /// Converts an AVFrame to a VideoFrame, applying pixel format conversion if needed.
944    unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
945        // SAFETY: Caller ensures self.frame is valid
946        unsafe {
947            let width = (*self.frame).width as u32;
948            let height = (*self.frame).height as u32;
949            let src_format = (*self.frame).format;
950
951            // Determine output format
952            let dst_format = if let Some(fmt) = self.output_format {
953                Self::pixel_format_to_av(fmt)
954            } else {
955                src_format
956            };
957
958            // Check if conversion is needed
959            let needs_conversion = src_format != dst_format;
960
961            if needs_conversion {
962                self.convert_with_sws(width, height, src_format, dst_format)
963            } else {
964                self.av_frame_to_video_frame(self.frame)
965            }
966        }
967    }
968
969    /// Converts pixel format using SwScale.
970    unsafe fn convert_with_sws(
971        &mut self,
972        width: u32,
973        height: u32,
974        src_format: i32,
975        dst_format: i32,
976    ) -> Result<VideoFrame, DecodeError> {
977        // SAFETY: Caller ensures frame and context pointers are valid
978        unsafe {
979            // Get or create SwScale context
980            if self.sws_ctx.is_none() {
981                let ctx = ff_sys::swscale::get_context(
982                    width as i32,
983                    height as i32,
984                    src_format,
985                    width as i32,
986                    height as i32,
987                    dst_format,
988                    ff_sys::swscale::scale_flags::BILINEAR,
989                )
990                .map_err(|e| DecodeError::Ffmpeg {
991                    code: 0,
992                    message: format!("Failed to create sws context: {e}"),
993                })?;
994
995                self.sws_ctx = Some(ctx);
996            }
997
998            let Some(sws_ctx) = self.sws_ctx else {
999                return Err(DecodeError::Ffmpeg {
1000                    code: 0,
1001                    message: "SwsContext not initialized".to_string(),
1002                });
1003            };
1004
1005            // Allocate destination frame (with RAII guard)
1006            let dst_frame_guard = AvFrameGuard::new()?;
1007            let dst_frame = dst_frame_guard.as_ptr();
1008
1009            (*dst_frame).width = width as i32;
1010            (*dst_frame).height = height as i32;
1011            (*dst_frame).format = dst_format;
1012
1013            // Allocate buffer for destination frame
1014            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1015            if buffer_ret < 0 {
1016                return Err(DecodeError::Ffmpeg {
1017                    code: buffer_ret,
1018                    message: format!(
1019                        "Failed to allocate frame buffer: {}",
1020                        ff_sys::av_error_string(buffer_ret)
1021                    ),
1022                });
1023            }
1024
1025            // Perform conversion
1026            ff_sys::swscale::scale(
1027                sws_ctx,
1028                (*self.frame).data.as_ptr() as *const *const u8,
1029                (*self.frame).linesize.as_ptr(),
1030                0,
1031                height as i32,
1032                (*dst_frame).data.as_ptr() as *const *mut u8,
1033                (*dst_frame).linesize.as_ptr(),
1034            )
1035            .map_err(|e| DecodeError::Ffmpeg {
1036                code: 0,
1037                message: format!("Failed to scale frame: {e}"),
1038            })?;
1039
1040            // Copy timestamp
1041            (*dst_frame).pts = (*self.frame).pts;
1042
1043            // Convert to VideoFrame
1044            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1045
1046            // dst_frame is automatically freed when guard drops
1047
1048            Ok(video_frame)
1049        }
1050    }
1051
1052    /// Converts an AVFrame to a VideoFrame.
1053    unsafe fn av_frame_to_video_frame(
1054        &self,
1055        frame: *const AVFrame,
1056    ) -> Result<VideoFrame, DecodeError> {
1057        // SAFETY: Caller ensures frame and format_ctx are valid
1058        unsafe {
1059            let width = (*frame).width as u32;
1060            let height = (*frame).height as u32;
1061            let format = Self::convert_pixel_format((*frame).format);
1062
1063            // Extract timestamp
1064            let pts = (*frame).pts;
1065            let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1066                let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1067                let time_base = (*(*stream)).time_base;
1068                Timestamp::new(
1069                    pts as i64,
1070                    Rational::new(time_base.num as i32, time_base.den as i32),
1071                )
1072            } else {
1073                Timestamp::default()
1074            };
1075
1076            // Convert frame to planes and strides
1077            let (planes, strides) =
1078                self.extract_planes_and_strides(frame, width, height, format)?;
1079
1080            VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1081                DecodeError::Ffmpeg {
1082                    code: 0,
1083                    message: format!("Failed to create VideoFrame: {e}"),
1084                }
1085            })
1086        }
1087    }
1088
1089    /// Allocates a buffer, optionally using the frame pool.
1090    ///
1091    /// If a frame pool is configured and has available buffers, uses the pool.
1092    /// Otherwise, allocates a new Vec<u8>.
1093    ///
1094    /// Allocates a buffer for decoded frame data.
1095    ///
1096    /// If a frame pool is configured, attempts to acquire a buffer from the pool.
1097    /// The returned PooledBuffer will automatically be returned to the pool when dropped.
1098    fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1099        if let Some(ref pool) = self.frame_pool {
1100            if let Some(pooled_buffer) = pool.acquire(size) {
1101                return pooled_buffer;
1102            }
1103            // Pool is configured but currently empty (or has no buffer large
1104            // enough). Allocate fresh memory and attach it to the pool so
1105            // that when the VideoFrame is dropped the buffer is returned via
1106            // pool.release() and becomes available for the next frame.
1107            return PooledBuffer::new(vec![0u8; size], Arc::downgrade(pool));
1108        }
1109        PooledBuffer::standalone(vec![0u8; size])
1110    }
1111
1112    /// Extracts planes and strides from an AVFrame.
1113    unsafe fn extract_planes_and_strides(
1114        &self,
1115        frame: *const AVFrame,
1116        width: u32,
1117        height: u32,
1118        format: PixelFormat,
1119    ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1120        // Bytes per pixel constants for different pixel formats
1121        const BYTES_PER_PIXEL_RGBA: usize = 4;
1122        const BYTES_PER_PIXEL_RGB24: usize = 3;
1123
1124        // SAFETY: Caller ensures frame is valid and format matches actual frame format
1125        unsafe {
1126            let mut planes = Vec::new();
1127            let mut strides = Vec::new();
1128
1129            #[allow(clippy::match_same_arms)]
1130            match format {
1131                PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1132                    // Packed formats - single plane
1133                    let stride = (*frame).linesize[0] as usize;
1134                    let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1135                    {
1136                        BYTES_PER_PIXEL_RGBA
1137                    } else {
1138                        BYTES_PER_PIXEL_RGB24
1139                    };
1140                    let row_size = (width as usize) * bytes_per_pixel;
1141                    let buffer_size = row_size * height as usize;
1142                    let mut plane_data = self.allocate_buffer(buffer_size);
1143
1144                    for y in 0..height as usize {
1145                        let src_offset = y * stride;
1146                        let dst_offset = y * row_size;
1147                        let src_ptr = (*frame).data[0].add(src_offset);
1148                        let plane_slice = plane_data.as_mut();
1149                        // SAFETY: We copy exactly `row_size` bytes per row. The source pointer
1150                        // is valid (from FFmpeg frame data), destination has sufficient capacity
1151                        // (allocated with height * row_size), and ranges don't overlap.
1152                        std::ptr::copy_nonoverlapping(
1153                            src_ptr,
1154                            plane_slice[dst_offset..].as_mut_ptr(),
1155                            row_size,
1156                        );
1157                    }
1158
1159                    planes.push(plane_data);
1160                    strides.push(row_size);
1161                }
1162                PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1163                    // Planar YUV formats
1164                    let (chroma_width, chroma_height) = match format {
1165                        PixelFormat::Yuv420p => (width / 2, height / 2),
1166                        PixelFormat::Yuv422p => (width / 2, height),
1167                        PixelFormat::Yuv444p => (width, height),
1168                        _ => unreachable!(),
1169                    };
1170
1171                    // Y plane
1172                    let y_stride = width as usize;
1173                    let y_size = y_stride * height as usize;
1174                    let mut y_data = self.allocate_buffer(y_size);
1175                    for y in 0..height as usize {
1176                        let src_offset = y * (*frame).linesize[0] as usize;
1177                        let dst_offset = y * y_stride;
1178                        let src_ptr = (*frame).data[0].add(src_offset);
1179                        let y_slice = y_data.as_mut();
1180                        // SAFETY: Copying Y plane row-by-row. Source is valid FFmpeg data,
1181                        // destination has sufficient capacity, no overlap.
1182                        std::ptr::copy_nonoverlapping(
1183                            src_ptr,
1184                            y_slice[dst_offset..].as_mut_ptr(),
1185                            width as usize,
1186                        );
1187                    }
1188                    planes.push(y_data);
1189                    strides.push(y_stride);
1190
1191                    // U plane
1192                    let u_stride = chroma_width as usize;
1193                    let u_size = u_stride * chroma_height as usize;
1194                    let mut u_data = self.allocate_buffer(u_size);
1195                    for y in 0..chroma_height as usize {
1196                        let src_offset = y * (*frame).linesize[1] as usize;
1197                        let dst_offset = y * u_stride;
1198                        let src_ptr = (*frame).data[1].add(src_offset);
1199                        let u_slice = u_data.as_mut();
1200                        // SAFETY: Copying U (chroma) plane row-by-row. Valid source,
1201                        // sufficient destination capacity, no overlap.
1202                        std::ptr::copy_nonoverlapping(
1203                            src_ptr,
1204                            u_slice[dst_offset..].as_mut_ptr(),
1205                            chroma_width as usize,
1206                        );
1207                    }
1208                    planes.push(u_data);
1209                    strides.push(u_stride);
1210
1211                    // V plane
1212                    let v_stride = chroma_width as usize;
1213                    let v_size = v_stride * chroma_height as usize;
1214                    let mut v_data = self.allocate_buffer(v_size);
1215                    for y in 0..chroma_height as usize {
1216                        let src_offset = y * (*frame).linesize[2] as usize;
1217                        let dst_offset = y * v_stride;
1218                        let src_ptr = (*frame).data[2].add(src_offset);
1219                        let v_slice = v_data.as_mut();
1220                        // SAFETY: Copying V (chroma) plane row-by-row. Valid source,
1221                        // sufficient destination capacity, no overlap.
1222                        std::ptr::copy_nonoverlapping(
1223                            src_ptr,
1224                            v_slice[dst_offset..].as_mut_ptr(),
1225                            chroma_width as usize,
1226                        );
1227                    }
1228                    planes.push(v_data);
1229                    strides.push(v_stride);
1230                }
1231                PixelFormat::Gray8 => {
1232                    // Single plane grayscale
1233                    let stride = width as usize;
1234                    let mut plane_data = self.allocate_buffer(stride * height as usize);
1235
1236                    for y in 0..height as usize {
1237                        let src_offset = y * (*frame).linesize[0] as usize;
1238                        let dst_offset = y * stride;
1239                        let src_ptr = (*frame).data[0].add(src_offset);
1240                        let plane_slice = plane_data.as_mut();
1241                        // SAFETY: Copying grayscale plane row-by-row. Valid source,
1242                        // sufficient destination capacity, no overlap.
1243                        std::ptr::copy_nonoverlapping(
1244                            src_ptr,
1245                            plane_slice[dst_offset..].as_mut_ptr(),
1246                            width as usize,
1247                        );
1248                    }
1249
1250                    planes.push(plane_data);
1251                    strides.push(stride);
1252                }
1253                PixelFormat::Nv12 | PixelFormat::Nv21 => {
1254                    // Semi-planar formats
1255                    let uv_height = height / 2;
1256
1257                    // Y plane
1258                    let y_stride = width as usize;
1259                    let mut y_data = self.allocate_buffer(y_stride * height as usize);
1260                    for y in 0..height as usize {
1261                        let src_offset = y * (*frame).linesize[0] as usize;
1262                        let dst_offset = y * y_stride;
1263                        let src_ptr = (*frame).data[0].add(src_offset);
1264                        let y_slice = y_data.as_mut();
1265                        // SAFETY: Copying Y plane (semi-planar) row-by-row. Valid source,
1266                        // sufficient destination capacity, no overlap.
1267                        std::ptr::copy_nonoverlapping(
1268                            src_ptr,
1269                            y_slice[dst_offset..].as_mut_ptr(),
1270                            width as usize,
1271                        );
1272                    }
1273                    planes.push(y_data);
1274                    strides.push(y_stride);
1275
1276                    // UV plane
1277                    let uv_stride = width as usize;
1278                    let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1279                    for y in 0..uv_height as usize {
1280                        let src_offset = y * (*frame).linesize[1] as usize;
1281                        let dst_offset = y * uv_stride;
1282                        let src_ptr = (*frame).data[1].add(src_offset);
1283                        let uv_slice = uv_data.as_mut();
1284                        // SAFETY: Copying interleaved UV plane (semi-planar) row-by-row.
1285                        // Valid source, sufficient destination capacity, no overlap.
1286                        std::ptr::copy_nonoverlapping(
1287                            src_ptr,
1288                            uv_slice[dst_offset..].as_mut_ptr(),
1289                            width as usize,
1290                        );
1291                    }
1292                    planes.push(uv_data);
1293                    strides.push(uv_stride);
1294                }
1295                _ => {
1296                    return Err(DecodeError::Ffmpeg {
1297                        code: 0,
1298                        message: format!("Unsupported pixel format: {format:?}"),
1299                    });
1300                }
1301            }
1302
1303            Ok((planes, strides))
1304        }
1305    }
1306
1307    /// Converts our `PixelFormat` to FFmpeg `AVPixelFormat`.
1308    fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1309        match format {
1310            PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1311            PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1312            PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1313            PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1314            PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1315            PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1316            PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1317            PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1318            PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1319            PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1320            _ => {
1321                log::warn!(
1322                    "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1323                );
1324                ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1325            }
1326        }
1327    }
1328
1329    /// Returns the current playback position.
1330    pub(crate) fn position(&self) -> Duration {
1331        self.position
1332    }
1333
1334    /// Returns whether end of file has been reached.
1335    pub(crate) fn is_eof(&self) -> bool {
1336        self.eof
1337    }
1338
1339    /// Converts a `Duration` to a presentation timestamp (PTS) in stream time_base units.
1340    ///
1341    /// # Arguments
1342    ///
1343    /// * `duration` - The duration to convert.
1344    ///
1345    /// # Returns
1346    ///
1347    /// The timestamp in stream time_base units.
1348    ///
1349    /// # Note
1350    ///
1351    /// av_seek_frame expects timestamps in stream time_base units when using a specific stream_index.
1352    fn duration_to_pts(&self, duration: Duration) -> i64 {
1353        // Convert duration to stream time_base units for seeking
1354        // SAFETY:
1355        // - format_ctx is valid: owned by VideoDecoderInner, initialized in constructor via avformat_open_input
1356        // - stream_index is valid: validated during decoder creation (find_stream_info + codec opening)
1357        // - streams array access is valid: guaranteed by FFmpeg after successful avformat_open_input
1358        let time_base = unsafe {
1359            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1360            (*(*stream)).time_base
1361        };
1362
1363        // Convert: duration (seconds) * (time_base.den / time_base.num) = PTS
1364        let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1365        (duration.as_secs_f64() * time_base_f64) as i64
1366    }
1367
1368    /// Converts a presentation timestamp (PTS) to a `Duration`.
1369    ///
1370    /// # Arguments
1371    ///
1372    /// * `pts` - The presentation timestamp in stream time base units.
1373    ///
1374    /// # Returns
1375    ///
1376    /// The duration corresponding to the PTS.
1377    ///
1378    /// # Safety
1379    ///
1380    /// Caller must ensure that `format_ctx` and `stream_index` are valid.
1381    ///
1382    /// # Note
1383    ///
1384    /// Currently unused but kept for potential future use in more advanced seeking scenarios.
1385    #[allow(dead_code)]
1386    fn pts_to_duration(&self, pts: i64) -> Duration {
1387        // SAFETY: Caller ensures format_ctx and stream_index are valid
1388        unsafe {
1389            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1390            let time_base = (*(*stream)).time_base;
1391
1392            // Convert PTS to duration
1393            let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1394            Duration::from_secs_f64(duration_secs)
1395        }
1396    }
1397
1398    /// Seeks to a specified position in the video stream.
1399    ///
1400    /// This method performs efficient seeking without reopening the file.
1401    /// It uses `av_seek_frame` internally and flushes the decoder buffers.
1402    ///
1403    /// # Performance Characteristics
1404    ///
1405    /// - **Keyframe seek**: 5-10ms for typical GOP sizes (1-2 seconds)
1406    /// - **Exact seek**: Proportional to distance from nearest keyframe
1407    /// - **Large GOP videos**: May require sequential decoding from distant keyframe
1408    ///
1409    /// For videos with sparse keyframes (GOP > 2 seconds), the method will
1410    /// decode frames sequentially from the nearest keyframe to reach the target.
1411    /// This ensures correct frame data but may take longer (10-50ms for very large GOPs).
1412    ///
1413    /// # Arguments
1414    ///
1415    /// * `position` - Target position to seek to.
1416    /// * `mode` - Seek mode (Keyframe, Exact, or Backward).
1417    ///
1418    /// # Errors
1419    ///
1420    /// Returns [`DecodeError::SeekFailed`] if the seek operation fails.
1421    pub(crate) fn seek(
1422        &mut self,
1423        position: Duration,
1424        mode: crate::SeekMode,
1425    ) -> Result<(), DecodeError> {
1426        use crate::SeekMode;
1427
1428        let timestamp = self.duration_to_pts(position);
1429
1430        // All seek modes use BACKWARD flag to find the nearest keyframe at or before target.
1431        // The difference between modes is in the post-seek processing below.
1432        let flags = ff_sys::avformat::seek_flags::BACKWARD;
1433
1434        // 1. Clear any pending packet and frame to avoid reading stale data after seek
1435        // SAFETY:
1436        // - packet is valid: allocated in constructor, owned by VideoDecoderInner
1437        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1438        unsafe {
1439            ff_sys::av_packet_unref(self.packet);
1440            ff_sys::av_frame_unref(self.frame);
1441        }
1442
1443        // 2. Seek in the format context (file is NOT reopened)
1444        // Use av_seek_frame with the stream index and timestamp in stream time_base units
1445        // SAFETY:
1446        // - format_ctx is valid: owned by VideoDecoderInner, initialized via avformat_open_input
1447        // - stream_index is valid: validated during decoder creation
1448        // - timestamp is valid: converted from Duration using stream's time_base
1449        unsafe {
1450            ff_sys::avformat::seek_frame(
1451                self.format_ctx,
1452                self.stream_index as i32,
1453                timestamp,
1454                flags,
1455            )
1456            .map_err(|e| DecodeError::SeekFailed {
1457                target: position,
1458                reason: ff_sys::av_error_string(e),
1459            })?;
1460        }
1461
1462        // 3. Flush decoder buffers to clear any cached frames
1463        // SAFETY: codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1464        unsafe {
1465            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1466        }
1467
1468        // 4. Drain any remaining frames from the decoder after flush
1469        // This ensures no stale frames are returned after the seek
1470        // SAFETY:
1471        // - codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1472        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1473        unsafe {
1474            loop {
1475                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1476                if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1477                    // No more frames in the decoder buffer
1478                    break;
1479                } else if ret == 0 {
1480                    // Got a frame, unref it and continue draining
1481                    ff_sys::av_frame_unref(self.frame);
1482                } else {
1483                    // Other error, break out
1484                    break;
1485                }
1486            }
1487        }
1488
1489        // 5. Reset internal state
1490        self.eof = false;
1491        // Note: We don't update self.position here because it will be updated
1492        // when the next frame is decoded. This ensures position reflects actual decoded position.
1493
1494        // 6. Skip forward to the target position
1495        //
1496        // Context: av_seek_frame with BACKWARD flag seeks to the nearest keyframe *at or before*
1497        // the target timestamp. For videos with sparse keyframes (large GOP size), this may
1498        // land far from the target (e.g., at the first keyframe for GOP=entire video).
1499        //
1500        // Solution: Decode frames sequentially from the keyframe until reaching the target.
1501        // This is necessary because H.264/H.265 P-frames and B-frames depend on previous
1502        // frames for reconstruction, so we must decode all intermediate frames.
1503        //
1504        // Performance Impact:
1505        // - Typical GOP (1-2s): 30-60 frames to skip, ~5-10ms overhead
1506        // - Large GOP (5-10s): 150-300 frames to skip, ~20-50ms overhead
1507        // - Worst case (single keyframe): May decode entire video, ~100ms-1s
1508        if mode == SeekMode::Exact {
1509            // For exact mode, decode until we reach or pass the exact target
1510            self.skip_to_exact(position)?;
1511        } else {
1512            // For keyframe/backward modes, decode until we're reasonably close to the target
1513            // Rationale: Balances accuracy with performance for common use cases
1514            let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1515            let min_position = position.saturating_sub(tolerance);
1516
1517            while let Some(frame) = self.decode_one()? {
1518                let frame_time = frame.timestamp().as_duration();
1519                if frame_time >= min_position {
1520                    // We're close enough to the target
1521                    break;
1522                }
1523                // Continue decoding to get closer (frames are automatically dropped)
1524            }
1525        }
1526
1527        Ok(())
1528    }
1529
1530    /// Skips frames until reaching the exact target position.
1531    ///
1532    /// This is used by [`Self::seek`] when `SeekMode::Exact` is specified.
1533    /// It decodes and discards frames from the nearest keyframe until
1534    /// reaching the target position.
1535    ///
1536    /// # Performance
1537    ///
1538    /// Time complexity is O(n) where n is the number of frames between the
1539    /// keyframe and target. For a 30fps video with 2-second GOP:
1540    /// - Worst case: ~60 frames to decode, ~10-20ms
1541    /// - Average case: ~30 frames to decode, ~5-10ms
1542    ///
1543    /// # Arguments
1544    ///
1545    /// * `target` - The exact target position.
1546    ///
1547    /// # Errors
1548    ///
1549    /// Returns [`DecodeError::SeekFailed`] if EOF is reached before the target position.
1550    fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1551        loop {
1552            match self.decode_one()? {
1553                Some(frame) => {
1554                    let frame_time = frame.timestamp().as_duration();
1555                    if frame_time >= target {
1556                        // Reached or passed the target frame
1557                        // Position will be updated by decode_one() which was just called
1558                        break;
1559                    }
1560                    // Continue decoding (frame is automatically dropped)
1561                }
1562                None => {
1563                    // Reached EOF before finding target frame
1564                    return Err(DecodeError::SeekFailed {
1565                        target,
1566                        reason: "Reached end of stream before target position".to_string(),
1567                    });
1568                }
1569            }
1570        }
1571        Ok(())
1572    }
1573
1574    /// Flushes the decoder's internal buffers.
1575    ///
1576    /// This clears any cached frames and resets the decoder state.
1577    /// The decoder is ready to receive new packets after flushing.
1578    pub(crate) fn flush(&mut self) {
1579        // SAFETY: codec_ctx is valid and owned by this instance
1580        unsafe {
1581            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1582        }
1583        self.eof = false;
1584    }
1585
1586    /// Scales a video frame to the specified dimensions while preserving aspect ratio.
1587    ///
1588    /// This method uses SwScale to resize frames efficiently using a "fit-within"
1589    /// strategy that preserves the original aspect ratio.
1590    ///
1591    /// # Aspect Ratio Preservation
1592    ///
1593    /// The frame is scaled to fit within `(target_width, target_height)` while
1594    /// maintaining its original aspect ratio. The output dimensions will be at most
1595    /// the target size, with at least one dimension matching the target. No letterboxing
1596    /// or pillarboxing is applied - the frame is simply scaled down to fit.
1597    ///
1598    /// # Arguments
1599    ///
1600    /// * `frame` - The source frame to scale.
1601    /// * `target_width` - Desired width in pixels.
1602    /// * `target_height` - Desired height in pixels.
1603    ///
1604    /// # Returns
1605    ///
1606    /// A new `VideoFrame` scaled to fit within the target dimensions.
1607    ///
1608    /// # Errors
1609    ///
1610    /// Returns [`DecodeError`] if SwScale context creation or scaling fails.
1611    ///
1612    /// # Performance
1613    ///
1614    /// - Caches SwScale context for repeated calls with same dimensions
1615    /// - Context creation: ~0.1-0.5ms (only on first call or dimension change)
1616    /// - Typical scaling time: 1-3ms for 1080p → 320x180
1617    /// - Uses bilinear interpolation for quality/performance balance
1618    ///
1619    /// # Cache Behavior
1620    ///
1621    /// The SwScale context is cached based on source/target dimensions and format.
1622    /// When generating multiple thumbnails with the same size (e.g., via `thumbnails()`),
1623    /// the context is reused, eliminating the ~0.1-0.5ms creation overhead per thumbnail.
1624    pub(crate) fn scale_frame(
1625        &mut self,
1626        frame: &VideoFrame,
1627        target_width: u32,
1628        target_height: u32,
1629    ) -> Result<VideoFrame, DecodeError> {
1630        let src_width = frame.width();
1631        let src_height = frame.height();
1632        let src_format = frame.format();
1633
1634        // Calculate scaled dimensions to preserve aspect ratio (fit within target)
1635        let src_aspect = src_width as f64 / src_height as f64;
1636        let target_aspect = target_width as f64 / target_height as f64;
1637
1638        let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1639            // Source is wider - fit to width
1640            let height = (target_width as f64 / src_aspect).round() as u32;
1641            (target_width, height)
1642        } else {
1643            // Source is taller or equal - fit to height
1644            let width = (target_height as f64 * src_aspect).round() as u32;
1645            (width, target_height)
1646        };
1647
1648        // Convert pixel format to FFmpeg format
1649        let av_format = Self::pixel_format_to_av(src_format);
1650
1651        // Cache key: (src_width, src_height, scaled_width, scaled_height, format)
1652        let cache_key = (
1653            src_width,
1654            src_height,
1655            scaled_width,
1656            scaled_height,
1657            av_format,
1658        );
1659
1660        // SAFETY: We're creating temporary FFmpeg objects for scaling
1661        unsafe {
1662            // Check if we can reuse the cached SwScale context
1663            let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1664                (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1665            {
1666                if cached_key == cache_key {
1667                    // Cache hit - reuse existing context
1668                    (cached_ctx, true)
1669                } else {
1670                    // Cache miss - free old context and create new one
1671                    ff_sys::swscale::free_context(cached_ctx);
1672                    // Clear cache immediately to prevent dangling pointer
1673                    self.thumbnail_sws_ctx = None;
1674                    self.thumbnail_cache_key = None;
1675
1676                    let new_ctx = ff_sys::swscale::get_context(
1677                        src_width as i32,
1678                        src_height as i32,
1679                        av_format,
1680                        scaled_width as i32,
1681                        scaled_height as i32,
1682                        av_format,
1683                        ff_sys::swscale::scale_flags::BILINEAR,
1684                    )
1685                    .map_err(|e| DecodeError::Ffmpeg {
1686                        code: 0,
1687                        message: format!("Failed to create scaling context: {e}"),
1688                    })?;
1689
1690                    // Don't cache yet - will cache after successful scaling
1691                    (new_ctx, false)
1692                }
1693            } else {
1694                // No cache - create new context
1695                let new_ctx = ff_sys::swscale::get_context(
1696                    src_width as i32,
1697                    src_height as i32,
1698                    av_format,
1699                    scaled_width as i32,
1700                    scaled_height as i32,
1701                    av_format,
1702                    ff_sys::swscale::scale_flags::BILINEAR,
1703                )
1704                .map_err(|e| DecodeError::Ffmpeg {
1705                    code: 0,
1706                    message: format!("Failed to create scaling context: {e}"),
1707                })?;
1708
1709                // Don't cache yet - will cache after successful scaling
1710                (new_ctx, false)
1711            };
1712
1713            // Set up source frame with VideoFrame data
1714            let src_frame_guard = AvFrameGuard::new()?;
1715            let src_frame = src_frame_guard.as_ptr();
1716
1717            (*src_frame).width = src_width as i32;
1718            (*src_frame).height = src_height as i32;
1719            (*src_frame).format = av_format;
1720
1721            // Set up source frame data pointers directly from VideoFrame (no copy)
1722            let planes = frame.planes();
1723            let strides = frame.strides();
1724
1725            for (i, plane_data) in planes.iter().enumerate() {
1726                if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1727                    break;
1728                }
1729                (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1730                (*src_frame).linesize[i] = strides[i] as i32;
1731            }
1732
1733            // Allocate destination frame
1734            let dst_frame_guard = AvFrameGuard::new()?;
1735            let dst_frame = dst_frame_guard.as_ptr();
1736
1737            (*dst_frame).width = scaled_width as i32;
1738            (*dst_frame).height = scaled_height as i32;
1739            (*dst_frame).format = av_format;
1740
1741            // Allocate buffer for destination frame
1742            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1743            if buffer_ret < 0 {
1744                // Clean up context if not cached
1745                if !is_cached {
1746                    ff_sys::swscale::free_context(sws_ctx);
1747                }
1748                return Err(DecodeError::Ffmpeg {
1749                    code: buffer_ret,
1750                    message: format!(
1751                        "Failed to allocate destination frame buffer: {}",
1752                        ff_sys::av_error_string(buffer_ret)
1753                    ),
1754                });
1755            }
1756
1757            // Perform scaling
1758            let scale_result = ff_sys::swscale::scale(
1759                sws_ctx,
1760                (*src_frame).data.as_ptr() as *const *const u8,
1761                (*src_frame).linesize.as_ptr(),
1762                0,
1763                src_height as i32,
1764                (*dst_frame).data.as_ptr() as *const *mut u8,
1765                (*dst_frame).linesize.as_ptr(),
1766            );
1767
1768            if let Err(e) = scale_result {
1769                // Clean up context if not cached
1770                if !is_cached {
1771                    ff_sys::swscale::free_context(sws_ctx);
1772                }
1773                return Err(DecodeError::Ffmpeg {
1774                    code: 0,
1775                    message: format!("Failed to scale frame: {e}"),
1776                });
1777            }
1778
1779            // Scaling successful - cache the context if it's new
1780            if !is_cached {
1781                self.thumbnail_sws_ctx = Some(sws_ctx);
1782                self.thumbnail_cache_key = Some(cache_key);
1783            }
1784
1785            // Copy timestamp
1786            (*dst_frame).pts = frame.timestamp().pts();
1787
1788            // Convert destination frame to VideoFrame
1789            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1790
1791            Ok(video_frame)
1792        }
1793    }
1794}
1795
1796impl Drop for VideoDecoderInner {
1797    fn drop(&mut self) {
1798        // Free SwScale context if allocated
1799        if let Some(sws_ctx) = self.sws_ctx {
1800            // SAFETY: sws_ctx is valid and owned by this instance
1801            unsafe {
1802                ff_sys::swscale::free_context(sws_ctx);
1803            }
1804        }
1805
1806        // Free cached thumbnail SwScale context if allocated
1807        if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1808            // SAFETY: thumbnail_ctx is valid and owned by this instance
1809            unsafe {
1810                ff_sys::swscale::free_context(thumbnail_ctx);
1811            }
1812        }
1813
1814        // Free hardware device context if allocated
1815        if let Some(hw_ctx) = self.hw_device_ctx {
1816            // SAFETY: hw_ctx is valid and owned by this instance
1817            unsafe {
1818                ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
1819            }
1820        }
1821
1822        // Free frame and packet
1823        if !self.frame.is_null() {
1824            // SAFETY: self.frame is valid and owned by this instance
1825            unsafe {
1826                ff_sys::av_frame_free(&mut (self.frame as *mut _));
1827            }
1828        }
1829
1830        if !self.packet.is_null() {
1831            // SAFETY: self.packet is valid and owned by this instance
1832            unsafe {
1833                ff_sys::av_packet_free(&mut (self.packet as *mut _));
1834            }
1835        }
1836
1837        // Free codec context
1838        if !self.codec_ctx.is_null() {
1839            // SAFETY: self.codec_ctx is valid and owned by this instance
1840            unsafe {
1841                ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
1842            }
1843        }
1844
1845        // Close format context
1846        if !self.format_ctx.is_null() {
1847            // SAFETY: self.format_ctx is valid and owned by this instance
1848            unsafe {
1849                ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
1850            }
1851        }
1852    }
1853}
1854
1855// SAFETY: VideoDecoderInner manages FFmpeg contexts which are thread-safe when not shared.
1856// We don't expose mutable access across threads, so Send is safe.
1857unsafe impl Send for VideoDecoderInner {}
1858
1859#[cfg(test)]
1860mod tests {
1861    use ff_format::PixelFormat;
1862    use ff_format::codec::VideoCodec;
1863    use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
1864
1865    use crate::HardwareAccel;
1866
1867    use super::VideoDecoderInner;
1868
1869    // -------------------------------------------------------------------------
1870    // convert_pixel_format
1871    // -------------------------------------------------------------------------
1872
1873    #[test]
1874    fn pixel_format_yuv420p() {
1875        assert_eq!(
1876            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
1877            PixelFormat::Yuv420p
1878        );
1879    }
1880
1881    #[test]
1882    fn pixel_format_yuv422p() {
1883        assert_eq!(
1884            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
1885            PixelFormat::Yuv422p
1886        );
1887    }
1888
1889    #[test]
1890    fn pixel_format_yuv444p() {
1891        assert_eq!(
1892            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
1893            PixelFormat::Yuv444p
1894        );
1895    }
1896
1897    #[test]
1898    fn pixel_format_rgb24() {
1899        assert_eq!(
1900            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
1901            PixelFormat::Rgb24
1902        );
1903    }
1904
1905    #[test]
1906    fn pixel_format_bgr24() {
1907        assert_eq!(
1908            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
1909            PixelFormat::Bgr24
1910        );
1911    }
1912
1913    #[test]
1914    fn pixel_format_rgba() {
1915        assert_eq!(
1916            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
1917            PixelFormat::Rgba
1918        );
1919    }
1920
1921    #[test]
1922    fn pixel_format_bgra() {
1923        assert_eq!(
1924            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
1925            PixelFormat::Bgra
1926        );
1927    }
1928
1929    #[test]
1930    fn pixel_format_gray8() {
1931        assert_eq!(
1932            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
1933            PixelFormat::Gray8
1934        );
1935    }
1936
1937    #[test]
1938    fn pixel_format_nv12() {
1939        assert_eq!(
1940            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
1941            PixelFormat::Nv12
1942        );
1943    }
1944
1945    #[test]
1946    fn pixel_format_nv21() {
1947        assert_eq!(
1948            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
1949            PixelFormat::Nv21
1950        );
1951    }
1952
1953    #[test]
1954    fn pixel_format_unknown_falls_back_to_yuv420p() {
1955        assert_eq!(
1956            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
1957            PixelFormat::Yuv420p
1958        );
1959    }
1960
1961    // -------------------------------------------------------------------------
1962    // convert_color_space
1963    // -------------------------------------------------------------------------
1964
1965    #[test]
1966    fn color_space_bt709() {
1967        assert_eq!(
1968            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
1969            ColorSpace::Bt709
1970        );
1971    }
1972
1973    #[test]
1974    fn color_space_bt470bg_yields_bt601() {
1975        assert_eq!(
1976            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
1977            ColorSpace::Bt601
1978        );
1979    }
1980
1981    #[test]
1982    fn color_space_smpte170m_yields_bt601() {
1983        assert_eq!(
1984            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
1985            ColorSpace::Bt601
1986        );
1987    }
1988
1989    #[test]
1990    fn color_space_bt2020_ncl() {
1991        assert_eq!(
1992            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
1993            ColorSpace::Bt2020
1994        );
1995    }
1996
1997    #[test]
1998    fn color_space_unknown_falls_back_to_bt709() {
1999        assert_eq!(
2000            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
2001            ColorSpace::Bt709
2002        );
2003    }
2004
2005    // -------------------------------------------------------------------------
2006    // convert_color_range
2007    // -------------------------------------------------------------------------
2008
2009    #[test]
2010    fn color_range_jpeg_yields_full() {
2011        assert_eq!(
2012            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
2013            ColorRange::Full
2014        );
2015    }
2016
2017    #[test]
2018    fn color_range_mpeg_yields_limited() {
2019        assert_eq!(
2020            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2021            ColorRange::Limited
2022        );
2023    }
2024
2025    #[test]
2026    fn color_range_unknown_falls_back_to_limited() {
2027        assert_eq!(
2028            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2029            ColorRange::Limited
2030        );
2031    }
2032
2033    // -------------------------------------------------------------------------
2034    // convert_color_primaries
2035    // -------------------------------------------------------------------------
2036
2037    #[test]
2038    fn color_primaries_bt709() {
2039        assert_eq!(
2040            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2041            ColorPrimaries::Bt709
2042        );
2043    }
2044
2045    #[test]
2046    fn color_primaries_bt470bg_yields_bt601() {
2047        assert_eq!(
2048            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2049            ColorPrimaries::Bt601
2050        );
2051    }
2052
2053    #[test]
2054    fn color_primaries_smpte170m_yields_bt601() {
2055        assert_eq!(
2056            VideoDecoderInner::convert_color_primaries(
2057                ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2058            ),
2059            ColorPrimaries::Bt601
2060        );
2061    }
2062
2063    #[test]
2064    fn color_primaries_bt2020() {
2065        assert_eq!(
2066            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2067            ColorPrimaries::Bt2020
2068        );
2069    }
2070
2071    #[test]
2072    fn color_primaries_unknown_falls_back_to_bt709() {
2073        assert_eq!(
2074            VideoDecoderInner::convert_color_primaries(
2075                ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2076            ),
2077            ColorPrimaries::Bt709
2078        );
2079    }
2080
2081    // -------------------------------------------------------------------------
2082    // convert_codec
2083    // -------------------------------------------------------------------------
2084
2085    #[test]
2086    fn codec_h264() {
2087        assert_eq!(
2088            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2089            VideoCodec::H264
2090        );
2091    }
2092
2093    #[test]
2094    fn codec_hevc_yields_h265() {
2095        assert_eq!(
2096            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2097            VideoCodec::H265
2098        );
2099    }
2100
2101    #[test]
2102    fn codec_vp8() {
2103        assert_eq!(
2104            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2105            VideoCodec::Vp8
2106        );
2107    }
2108
2109    #[test]
2110    fn codec_vp9() {
2111        assert_eq!(
2112            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2113            VideoCodec::Vp9
2114        );
2115    }
2116
2117    #[test]
2118    fn codec_av1() {
2119        assert_eq!(
2120            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2121            VideoCodec::Av1
2122        );
2123    }
2124
2125    #[test]
2126    fn codec_mpeg4() {
2127        assert_eq!(
2128            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2129            VideoCodec::Mpeg4
2130        );
2131    }
2132
2133    #[test]
2134    fn codec_prores() {
2135        assert_eq!(
2136            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2137            VideoCodec::ProRes
2138        );
2139    }
2140
2141    #[test]
2142    fn codec_unknown_falls_back_to_h264() {
2143        assert_eq!(
2144            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2145            VideoCodec::H264
2146        );
2147    }
2148
2149    // -------------------------------------------------------------------------
2150    // hw_accel_to_device_type
2151    // -------------------------------------------------------------------------
2152
2153    #[test]
2154    fn hw_accel_auto_yields_none() {
2155        assert_eq!(
2156            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2157            None
2158        );
2159    }
2160
2161    #[test]
2162    fn hw_accel_none_yields_none() {
2163        assert_eq!(
2164            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2165            None
2166        );
2167    }
2168
2169    #[test]
2170    fn hw_accel_nvdec_yields_cuda() {
2171        assert_eq!(
2172            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2173            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2174        );
2175    }
2176
2177    #[test]
2178    fn hw_accel_qsv_yields_qsv() {
2179        assert_eq!(
2180            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2181            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2182        );
2183    }
2184
2185    #[test]
2186    fn hw_accel_amf_yields_d3d11va() {
2187        assert_eq!(
2188            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2189            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2190        );
2191    }
2192
2193    #[test]
2194    fn hw_accel_videotoolbox() {
2195        assert_eq!(
2196            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2197            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2198        );
2199    }
2200
2201    #[test]
2202    fn hw_accel_vaapi() {
2203        assert_eq!(
2204            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2205            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2206        );
2207    }
2208
2209    // -------------------------------------------------------------------------
2210    // pixel_format_to_av — round-trip
2211    // -------------------------------------------------------------------------
2212
2213    #[test]
2214    fn pixel_format_to_av_round_trip_yuv420p() {
2215        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2216        assert_eq!(
2217            VideoDecoderInner::convert_pixel_format(av),
2218            PixelFormat::Yuv420p
2219        );
2220    }
2221
2222    #[test]
2223    fn pixel_format_to_av_round_trip_yuv422p() {
2224        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2225        assert_eq!(
2226            VideoDecoderInner::convert_pixel_format(av),
2227            PixelFormat::Yuv422p
2228        );
2229    }
2230
2231    #[test]
2232    fn pixel_format_to_av_round_trip_yuv444p() {
2233        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2234        assert_eq!(
2235            VideoDecoderInner::convert_pixel_format(av),
2236            PixelFormat::Yuv444p
2237        );
2238    }
2239
2240    #[test]
2241    fn pixel_format_to_av_round_trip_rgb24() {
2242        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2243        assert_eq!(
2244            VideoDecoderInner::convert_pixel_format(av),
2245            PixelFormat::Rgb24
2246        );
2247    }
2248
2249    #[test]
2250    fn pixel_format_to_av_round_trip_bgr24() {
2251        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2252        assert_eq!(
2253            VideoDecoderInner::convert_pixel_format(av),
2254            PixelFormat::Bgr24
2255        );
2256    }
2257
2258    #[test]
2259    fn pixel_format_to_av_round_trip_rgba() {
2260        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2261        assert_eq!(
2262            VideoDecoderInner::convert_pixel_format(av),
2263            PixelFormat::Rgba
2264        );
2265    }
2266
2267    #[test]
2268    fn pixel_format_to_av_round_trip_bgra() {
2269        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2270        assert_eq!(
2271            VideoDecoderInner::convert_pixel_format(av),
2272            PixelFormat::Bgra
2273        );
2274    }
2275
2276    #[test]
2277    fn pixel_format_to_av_round_trip_gray8() {
2278        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2279        assert_eq!(
2280            VideoDecoderInner::convert_pixel_format(av),
2281            PixelFormat::Gray8
2282        );
2283    }
2284
2285    #[test]
2286    fn pixel_format_to_av_round_trip_nv12() {
2287        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2288        assert_eq!(
2289            VideoDecoderInner::convert_pixel_format(av),
2290            PixelFormat::Nv12
2291        );
2292    }
2293
2294    #[test]
2295    fn pixel_format_to_av_round_trip_nv21() {
2296        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2297        assert_eq!(
2298            VideoDecoderInner::convert_pixel_format(av),
2299            PixelFormat::Nv21
2300        );
2301    }
2302
2303    #[test]
2304    fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2305        // Yuv420p10le has no explicit mapping in pixel_format_to_av, so it hits the _ arm
2306        assert_eq!(
2307            VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p10le),
2308            ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2309        );
2310    }
2311
2312    // -------------------------------------------------------------------------
2313    // extract_codec_name
2314    // -------------------------------------------------------------------------
2315
2316    #[test]
2317    fn codec_name_should_return_h264_for_h264_codec_id() {
2318        let name =
2319            unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_H264) };
2320        assert_eq!(name, "h264");
2321    }
2322
2323    #[test]
2324    fn codec_name_should_return_none_for_none_codec_id() {
2325        let name =
2326            unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_NONE) };
2327        assert_eq!(name, "none");
2328    }
2329
2330    #[test]
2331    fn unsupported_codec_error_should_include_codec_name() {
2332        let codec_id = ff_sys::AVCodecID_AV_CODEC_ID_H264;
2333        let codec_name = unsafe { VideoDecoderInner::extract_codec_name(codec_id) };
2334        let error = crate::error::DecodeError::UnsupportedCodec {
2335            codec: format!("{codec_name} (codec_id={codec_id:?})"),
2336        };
2337        let msg = error.to_string();
2338        assert!(msg.contains("h264"), "expected codec name in error: {msg}");
2339        assert!(
2340            msg.contains("codec_id="),
2341            "expected codec_id in error: {msg}"
2342        );
2343    }
2344}