Skip to main content

ff_decode/video/
decoder_inner.rs

1//! Internal video decoder implementation using FFmpeg.
2//!
3//! This module contains the low-level decoder logic that directly interacts
4//! with FFmpeg's C API through the ff-sys crate. It is not exposed publicly.
5
6// Allow unsafe code in this module as it's necessary for FFmpeg FFI
7#![allow(unsafe_code)]
8// Allow specific clippy lints for FFmpeg FFI code
9#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::ffi::CStr;
26use std::path::Path;
27use std::ptr;
28use std::sync::Arc;
29use std::time::Duration;
30
31use ff_format::PooledBuffer;
32use ff_format::codec::VideoCodec;
33use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
34use ff_format::container::ContainerInfo;
35use ff_format::time::{Rational, Timestamp};
36use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
37use ff_sys::{
38    AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
39    AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
40    AVPixelFormat, SwsContext,
41};
42
43use crate::HardwareAccel;
44use crate::error::DecodeError;
45use crate::video::builder::OutputScale;
46use ff_common::FramePool;
47
48/// Tolerance in seconds for keyframe/backward seek modes.
49///
50/// When seeking in Keyframe or Backward mode, frames are skipped until we're within
51/// this tolerance of the target position. This balances accuracy with performance for
52/// typical GOP sizes (1-2 seconds).
53const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
54
55/// RAII guard for `AVFormatContext` to ensure proper cleanup.
56struct AvFormatContextGuard(*mut AVFormatContext);
57
58impl AvFormatContextGuard {
59    /// Creates a new guard by opening an input file.
60    ///
61    /// # Safety
62    ///
63    /// Caller must ensure FFmpeg is initialized and path is valid.
64    unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
65        // SAFETY: Caller ensures FFmpeg is initialized and path is valid
66        let format_ctx = unsafe {
67            ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
68                code: e,
69                message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
70            })?
71        };
72        Ok(Self(format_ctx))
73    }
74
75    /// Returns the raw pointer.
76    const fn as_ptr(&self) -> *mut AVFormatContext {
77        self.0
78    }
79
80    /// Consumes the guard and returns the raw pointer without dropping.
81    fn into_raw(self) -> *mut AVFormatContext {
82        let ptr = self.0;
83        std::mem::forget(self);
84        ptr
85    }
86
87    /// Opens an image sequence using the `image2` demuxer.
88    ///
89    /// # Safety
90    ///
91    /// Caller must ensure FFmpeg is initialized and path is valid.
92    unsafe fn new_image_sequence(path: &Path, framerate: u32) -> Result<Self, DecodeError> {
93        // SAFETY: Caller ensures FFmpeg is initialized and path is a valid image-sequence pattern
94        let format_ctx = unsafe {
95            ff_sys::avformat::open_input_image_sequence(path, framerate).map_err(|e| {
96                DecodeError::Ffmpeg {
97                    code: e,
98                    message: format!(
99                        "Failed to open image sequence: {}",
100                        ff_sys::av_error_string(e)
101                    ),
102                }
103            })?
104        };
105        Ok(Self(format_ctx))
106    }
107}
108
109impl Drop for AvFormatContextGuard {
110    fn drop(&mut self) {
111        if !self.0.is_null() {
112            // SAFETY: self.0 is valid and owned by this guard
113            unsafe {
114                ff_sys::avformat::close_input(&mut (self.0 as *mut _));
115            }
116        }
117    }
118}
119
120/// RAII guard for `AVCodecContext` to ensure proper cleanup.
121struct AvCodecContextGuard(*mut AVCodecContext);
122
123impl AvCodecContextGuard {
124    /// Creates a new guard by allocating a codec context.
125    ///
126    /// # Safety
127    ///
128    /// Caller must ensure codec pointer is valid.
129    unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
130        // SAFETY: Caller ensures codec pointer is valid
131        let codec_ctx = unsafe {
132            ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
133                code: e,
134                message: format!("Failed to allocate codec context: {e}"),
135            })?
136        };
137        Ok(Self(codec_ctx))
138    }
139
140    /// Returns the raw pointer.
141    const fn as_ptr(&self) -> *mut AVCodecContext {
142        self.0
143    }
144
145    /// Consumes the guard and returns the raw pointer without dropping.
146    fn into_raw(self) -> *mut AVCodecContext {
147        let ptr = self.0;
148        std::mem::forget(self);
149        ptr
150    }
151}
152
153impl Drop for AvCodecContextGuard {
154    fn drop(&mut self) {
155        if !self.0.is_null() {
156            // SAFETY: self.0 is valid and owned by this guard
157            unsafe {
158                ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
159            }
160        }
161    }
162}
163
164/// RAII guard for `AVPacket` to ensure proper cleanup.
165struct AvPacketGuard(*mut AVPacket);
166
167impl AvPacketGuard {
168    /// Creates a new guard by allocating a packet.
169    ///
170    /// # Safety
171    ///
172    /// Must be called after FFmpeg initialization.
173    unsafe fn new() -> Result<Self, DecodeError> {
174        // SAFETY: Caller ensures FFmpeg is initialized
175        let packet = unsafe { ff_sys::av_packet_alloc() };
176        if packet.is_null() {
177            return Err(DecodeError::Ffmpeg {
178                code: 0,
179                message: "Failed to allocate packet".to_string(),
180            });
181        }
182        Ok(Self(packet))
183    }
184
185    /// Returns the raw pointer.
186    #[allow(dead_code)]
187    const fn as_ptr(&self) -> *mut AVPacket {
188        self.0
189    }
190
191    /// Consumes the guard and returns the raw pointer without dropping.
192    fn into_raw(self) -> *mut AVPacket {
193        let ptr = self.0;
194        std::mem::forget(self);
195        ptr
196    }
197}
198
199impl Drop for AvPacketGuard {
200    fn drop(&mut self) {
201        if !self.0.is_null() {
202            // SAFETY: self.0 is valid and owned by this guard
203            unsafe {
204                ff_sys::av_packet_free(&mut (self.0 as *mut _));
205            }
206        }
207    }
208}
209
210/// RAII guard for `AVFrame` to ensure proper cleanup.
211struct AvFrameGuard(*mut AVFrame);
212
213impl AvFrameGuard {
214    /// Creates a new guard by allocating a frame.
215    ///
216    /// # Safety
217    ///
218    /// Must be called after FFmpeg initialization.
219    unsafe fn new() -> Result<Self, DecodeError> {
220        // SAFETY: Caller ensures FFmpeg is initialized
221        let frame = unsafe { ff_sys::av_frame_alloc() };
222        if frame.is_null() {
223            return Err(DecodeError::Ffmpeg {
224                code: 0,
225                message: "Failed to allocate frame".to_string(),
226            });
227        }
228        Ok(Self(frame))
229    }
230
231    /// Returns the raw pointer.
232    const fn as_ptr(&self) -> *mut AVFrame {
233        self.0
234    }
235
236    /// Consumes the guard and returns the raw pointer without dropping.
237    fn into_raw(self) -> *mut AVFrame {
238        let ptr = self.0;
239        std::mem::forget(self);
240        ptr
241    }
242}
243
244impl Drop for AvFrameGuard {
245    fn drop(&mut self) {
246        if !self.0.is_null() {
247            // SAFETY: self.0 is valid and owned by this guard
248            unsafe {
249                ff_sys::av_frame_free(&mut (self.0 as *mut _));
250            }
251        }
252    }
253}
254
255/// Internal decoder state holding FFmpeg contexts.
256///
257/// This structure manages the lifecycle of FFmpeg objects and is responsible
258/// for proper cleanup when dropped.
259pub(crate) struct VideoDecoderInner {
260    /// Format context for reading the media file
261    format_ctx: *mut AVFormatContext,
262    /// Codec context for decoding video frames
263    codec_ctx: *mut AVCodecContext,
264    /// Video stream index in the format context
265    stream_index: i32,
266    /// SwScale context for pixel format conversion and/or scaling (optional)
267    sws_ctx: Option<*mut SwsContext>,
268    /// Cache key for the main sws_ctx: (src_w, src_h, src_fmt, dst_w, dst_h, dst_fmt)
269    sws_cache_key: Option<(u32, u32, i32, u32, u32, i32)>,
270    /// Target output pixel format (if conversion is needed)
271    output_format: Option<PixelFormat>,
272    /// Requested output scale (if resizing is needed)
273    output_scale: Option<OutputScale>,
274    /// Whether end of file has been reached
275    eof: bool,
276    /// Current playback position
277    position: Duration,
278    /// Reusable packet for reading from file
279    packet: *mut AVPacket,
280    /// Reusable frame for decoding
281    frame: *mut AVFrame,
282    /// Cached SwScale context for thumbnail generation
283    thumbnail_sws_ctx: Option<*mut SwsContext>,
284    /// Last thumbnail dimensions (for cache invalidation)
285    thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
286    /// Hardware device context (if hardware acceleration is active)
287    hw_device_ctx: Option<*mut AVBufferRef>,
288    /// Active hardware acceleration mode
289    active_hw_accel: HardwareAccel,
290    /// Optional frame pool for memory reuse
291    frame_pool: Option<Arc<dyn FramePool>>,
292}
293
294impl VideoDecoderInner {
295    /// Maps our `HardwareAccel` enum to the corresponding FFmpeg `AVHWDeviceType`.
296    ///
297    /// Returns `None` for `Auto` and `None` variants as they require special handling.
298    fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
299        match accel {
300            HardwareAccel::Auto => None,
301            HardwareAccel::None => None,
302            HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
303            HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
304            HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), // AMF uses D3D11
305            HardwareAccel::VideoToolbox => {
306                Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
307            }
308            HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
309        }
310    }
311
312    /// Returns the hardware decoders to try in priority order for Auto mode.
313    const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
314        // Priority order: NVDEC, QSV, VideoToolbox, VA-API, AMF
315        &[
316            HardwareAccel::Nvdec,
317            HardwareAccel::Qsv,
318            HardwareAccel::VideoToolbox,
319            HardwareAccel::Vaapi,
320            HardwareAccel::Amf,
321        ]
322    }
323
324    /// Attempts to initialize hardware acceleration.
325    ///
326    /// # Arguments
327    ///
328    /// * `codec_ctx` - The codec context to configure
329    /// * `accel` - Requested hardware acceleration mode
330    ///
331    /// # Returns
332    ///
333    /// Returns `Ok((hw_device_ctx, active_accel))` if hardware acceleration was initialized,
334    /// or `Ok((None, HardwareAccel::None))` if software decoding should be used.
335    ///
336    /// # Errors
337    ///
338    /// Returns an error only if a specific hardware accelerator was requested but failed to initialize.
339    unsafe fn init_hardware_accel(
340        codec_ctx: *mut AVCodecContext,
341        accel: HardwareAccel,
342    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
343        match accel {
344            HardwareAccel::Auto => {
345                // Try hardware accelerators in priority order
346                for &hw_type in Self::hw_accel_auto_priority() {
347                    // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
348                    if let Ok((Some(ctx), active)) =
349                        unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
350                    {
351                        return Ok((Some(ctx), active));
352                    }
353                    // Ignore errors in Auto mode and try the next one
354                }
355                // All hardware accelerators failed, fall back to software
356                Ok((None, HardwareAccel::None))
357            }
358            HardwareAccel::None => {
359                // Software decoding explicitly requested
360                Ok((None, HardwareAccel::None))
361            }
362            _ => {
363                // Specific hardware accelerator requested
364                // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
365                unsafe { Self::try_init_hw_device(codec_ctx, accel) }
366            }
367        }
368    }
369
370    /// Tries to initialize a specific hardware device.
371    ///
372    /// # Safety
373    ///
374    /// Caller must ensure `codec_ctx` is valid and not yet configured with a hardware device.
375    unsafe fn try_init_hw_device(
376        codec_ctx: *mut AVCodecContext,
377        accel: HardwareAccel,
378    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
379        // Get the FFmpeg device type
380        let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
381            return Ok((None, HardwareAccel::None));
382        };
383
384        // Create hardware device context
385        // SAFETY: FFmpeg is initialized, device_type is valid
386        let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
387        let ret = unsafe {
388            ff_sys::av_hwdevice_ctx_create(
389                ptr::addr_of_mut!(hw_device_ctx),
390                device_type,
391                ptr::null(),     // device: null for default device
392                ptr::null_mut(), // opts: null for default options
393                0,               // flags: currently unused by FFmpeg
394            )
395        };
396
397        if ret < 0 {
398            // Hardware device creation failed
399            return Err(DecodeError::HwAccelUnavailable { accel });
400        }
401
402        // Assign hardware device context to codec context
403        // We transfer ownership of the reference to codec_ctx
404        // SAFETY: codec_ctx and hw_device_ctx are valid
405        unsafe {
406            (*codec_ctx).hw_device_ctx = hw_device_ctx;
407        }
408
409        // We keep our own reference for cleanup in Drop
410        // SAFETY: hw_device_ctx is valid
411        let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
412        if our_ref.is_null() {
413            // Failed to create our reference
414            // codec_ctx still owns the original, so we don't need to clean it up here
415            return Err(DecodeError::HwAccelUnavailable { accel });
416        }
417
418        Ok((Some(our_ref), accel))
419    }
420
421    /// Returns the currently active hardware acceleration mode.
422    pub(crate) fn hardware_accel(&self) -> HardwareAccel {
423        self.active_hw_accel
424    }
425
426    /// Checks if a pixel format is a hardware format.
427    ///
428    /// Hardware formats include: D3D11, CUDA, VAAPI, VideoToolbox, QSV, etc.
429    const fn is_hardware_format(format: AVPixelFormat) -> bool {
430        matches!(
431            format,
432            ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
433                | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
434                | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
435                | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
436                | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
437                | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
438                | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
439                | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
440                | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
441                | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
442        )
443    }
444
445    /// Transfers a hardware frame to CPU memory if needed.
446    ///
447    /// If `self.frame` is a hardware frame, creates a new software frame
448    /// and transfers the data from GPU to CPU memory.
449    ///
450    /// # Safety
451    ///
452    /// Caller must ensure `self.frame` contains a valid decoded frame.
453    unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
454        // SAFETY: self.frame is valid and owned by this instance
455        let frame_format = unsafe { (*self.frame).format };
456
457        if !Self::is_hardware_format(frame_format) {
458            // Not a hardware frame, no transfer needed
459            return Ok(());
460        }
461
462        // Create a temporary software frame for transfer
463        // SAFETY: FFmpeg is initialized
464        let sw_frame = unsafe { ff_sys::av_frame_alloc() };
465        if sw_frame.is_null() {
466            return Err(DecodeError::Ffmpeg {
467                code: 0,
468                message: "Failed to allocate software frame for hardware transfer".to_string(),
469            });
470        }
471
472        // Transfer data from hardware frame to software frame
473        // SAFETY: self.frame and sw_frame are valid
474        let ret = unsafe {
475            ff_sys::av_hwframe_transfer_data(
476                sw_frame, self.frame, 0, // flags: currently unused
477            )
478        };
479
480        if ret < 0 {
481            // Transfer failed, clean up
482            unsafe {
483                ff_sys::av_frame_free(&mut (sw_frame as *mut _));
484            }
485            return Err(DecodeError::Ffmpeg {
486                code: ret,
487                message: format!(
488                    "Failed to transfer hardware frame to CPU memory: {}",
489                    ff_sys::av_error_string(ret)
490                ),
491            });
492        }
493
494        // Copy metadata (pts, duration, etc.) from hardware frame to software frame
495        // SAFETY: Both frames are valid
496        unsafe {
497            (*sw_frame).pts = (*self.frame).pts;
498            (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
499            (*sw_frame).duration = (*self.frame).duration;
500            (*sw_frame).time_base = (*self.frame).time_base;
501        }
502
503        // Replace self.frame with the software frame
504        // SAFETY: self.frame is valid and owned by this instance
505        unsafe {
506            ff_sys::av_frame_unref(self.frame);
507            ff_sys::av_frame_move_ref(self.frame, sw_frame);
508            ff_sys::av_frame_free(&mut (sw_frame as *mut _));
509        }
510
511        Ok(())
512    }
513
514    /// Opens a media file and initializes the decoder.
515    ///
516    /// # Arguments
517    ///
518    /// * `path` - Path to the media file
519    /// * `output_format` - Optional target pixel format for conversion
520    /// * `hardware_accel` - Hardware acceleration mode
521    /// * `thread_count` - Number of decoding threads (0 = auto)
522    ///
523    /// # Errors
524    ///
525    /// Returns an error if:
526    /// - The file cannot be opened
527    /// - No video stream is found
528    /// - The codec is not supported
529    /// - Decoder initialization fails
530    pub(crate) fn new(
531        path: &Path,
532        output_format: Option<PixelFormat>,
533        output_scale: Option<OutputScale>,
534        hardware_accel: HardwareAccel,
535        thread_count: usize,
536        frame_rate: Option<u32>,
537        frame_pool: Option<Arc<dyn FramePool>>,
538    ) -> Result<(Self, VideoStreamInfo, ContainerInfo), DecodeError> {
539        // Ensure FFmpeg is initialized (thread-safe and idempotent)
540        ff_sys::ensure_initialized();
541
542        // Open the input file (with RAII guard).
543        // Image-sequence patterns contain '%'; use the image2 demuxer in that case.
544        let is_image_sequence = path.to_str().is_some_and(|s| s.contains('%'));
545        // SAFETY: Path is valid, AvFormatContextGuard ensures cleanup
546        let format_ctx_guard = unsafe {
547            if is_image_sequence {
548                let fps = frame_rate.unwrap_or(25);
549                AvFormatContextGuard::new_image_sequence(path, fps)?
550            } else {
551                AvFormatContextGuard::new(path)?
552            }
553        };
554        let format_ctx = format_ctx_guard.as_ptr();
555
556        // Read stream information
557        // SAFETY: format_ctx is valid and owned by guard
558        unsafe {
559            ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
560                code: e,
561                message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
562            })?;
563        }
564
565        // Find the video stream
566        // SAFETY: format_ctx is valid
567        let (stream_index, codec_id) =
568            unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
569                DecodeError::NoVideoStream {
570                    path: path.to_path_buf(),
571                }
572            })?;
573
574        // Find the decoder for this codec
575        // SAFETY: codec_id is valid from FFmpeg
576        let codec_name = unsafe { Self::extract_codec_name(codec_id) };
577        let codec = unsafe {
578            ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
579                // Distinguish between a totally unknown codec ID and a known codec
580                // whose decoder was not compiled into this FFmpeg build.
581                if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_EXR {
582                    DecodeError::DecoderUnavailable {
583                        codec: "exr".to_string(),
584                        hint: "Requires FFmpeg built with EXR support \
585                               (--enable-decoder=exr)"
586                            .to_string(),
587                    }
588                } else {
589                    DecodeError::UnsupportedCodec {
590                        codec: format!("{codec_name} (codec_id={codec_id:?})"),
591                    }
592                }
593            })?
594        };
595
596        // Allocate codec context (with RAII guard)
597        // SAFETY: codec pointer is valid, AvCodecContextGuard ensures cleanup
598        let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
599        let codec_ctx = codec_ctx_guard.as_ptr();
600
601        // Copy codec parameters from stream to context
602        // SAFETY: format_ctx and codec_ctx are valid, stream_index is valid
603        unsafe {
604            let stream = (*format_ctx).streams.add(stream_index as usize);
605            let codecpar = (*(*stream)).codecpar;
606            ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
607                DecodeError::Ffmpeg {
608                    code: e,
609                    message: format!(
610                        "Failed to copy codec parameters: {}",
611                        ff_sys::av_error_string(e)
612                    ),
613                }
614            })?;
615
616            // Set thread count
617            if thread_count > 0 {
618                (*codec_ctx).thread_count = thread_count as i32;
619            }
620        }
621
622        // Initialize hardware acceleration if requested
623        // SAFETY: codec_ctx is valid and not yet opened
624        let (hw_device_ctx, active_hw_accel) =
625            unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
626
627        // Open the codec
628        // SAFETY: codec_ctx and codec are valid, hardware device context is set if requested
629        unsafe {
630            ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
631                // If codec opening failed, we still own our reference to hw_device_ctx
632                // but it will be cleaned up when codec_ctx is freed (which happens
633                // when codec_ctx_guard is dropped)
634                // Our reference in hw_device_ctx will be cleaned up here
635                if let Some(hw_ctx) = hw_device_ctx {
636                    ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
637                }
638                DecodeError::Ffmpeg {
639                    code: e,
640                    message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
641                }
642            })?;
643        }
644
645        // Extract stream information
646        // SAFETY: All pointers are valid
647        let stream_info =
648            unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
649
650        // Extract container information
651        // SAFETY: format_ctx is valid and avformat_find_stream_info has been called
652        let container_info = unsafe { Self::extract_container_info(format_ctx) };
653
654        // Allocate packet and frame (with RAII guards)
655        // SAFETY: FFmpeg is initialized, guards ensure cleanup
656        let packet_guard = unsafe { AvPacketGuard::new()? };
657        let frame_guard = unsafe { AvFrameGuard::new()? };
658
659        // All initialization successful - transfer ownership to VideoDecoderInner
660        Ok((
661            Self {
662                format_ctx: format_ctx_guard.into_raw(),
663                codec_ctx: codec_ctx_guard.into_raw(),
664                stream_index: stream_index as i32,
665                sws_ctx: None,
666                sws_cache_key: None,
667                output_format,
668                output_scale,
669                eof: false,
670                position: Duration::ZERO,
671                packet: packet_guard.into_raw(),
672                frame: frame_guard.into_raw(),
673                thumbnail_sws_ctx: None,
674                thumbnail_cache_key: None,
675                hw_device_ctx,
676                active_hw_accel,
677                frame_pool,
678            },
679            stream_info,
680            container_info,
681        ))
682    }
683
684    /// Finds the first video stream in the format context.
685    ///
686    /// # Returns
687    ///
688    /// Returns `Some((index, codec_id))` if a video stream is found, `None` otherwise.
689    ///
690    /// # Safety
691    ///
692    /// Caller must ensure `format_ctx` is valid and initialized.
693    unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
694        // SAFETY: Caller ensures format_ctx is valid
695        unsafe {
696            let nb_streams = (*format_ctx).nb_streams as usize;
697
698            for i in 0..nb_streams {
699                let stream = (*format_ctx).streams.add(i);
700                let codecpar = (*(*stream)).codecpar;
701
702                if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
703                    return Some((i, (*codecpar).codec_id));
704                }
705            }
706
707            None
708        }
709    }
710
711    /// Returns the human-readable codec name for a given `AVCodecID`.
712    unsafe fn extract_codec_name(codec_id: ff_sys::AVCodecID) -> String {
713        // SAFETY: avcodec_get_name is safe for any codec ID value
714        let name_ptr = unsafe { ff_sys::avcodec_get_name(codec_id) };
715        if name_ptr.is_null() {
716            return String::from("unknown");
717        }
718        // SAFETY: avcodec_get_name returns a valid C string with static lifetime
719        unsafe { CStr::from_ptr(name_ptr).to_string_lossy().into_owned() }
720    }
721
722    /// Extracts video stream information from FFmpeg structures.
723    unsafe fn extract_stream_info(
724        format_ctx: *mut AVFormatContext,
725        stream_index: i32,
726        codec_ctx: *mut AVCodecContext,
727    ) -> Result<VideoStreamInfo, DecodeError> {
728        // SAFETY: Caller ensures all pointers are valid
729        let (
730            width,
731            height,
732            fps_rational,
733            duration_val,
734            pix_fmt,
735            color_space_val,
736            color_range_val,
737            color_primaries_val,
738            codec_id,
739        ) = unsafe {
740            let stream = (*format_ctx).streams.add(stream_index as usize);
741            let codecpar = (*(*stream)).codecpar;
742
743            (
744                (*codecpar).width as u32,
745                (*codecpar).height as u32,
746                (*(*stream)).avg_frame_rate,
747                (*format_ctx).duration,
748                (*codec_ctx).pix_fmt,
749                (*codecpar).color_space,
750                (*codecpar).color_range,
751                (*codecpar).color_primaries,
752                (*codecpar).codec_id,
753            )
754        };
755
756        // Extract frame rate
757        let frame_rate = if fps_rational.den != 0 {
758            Rational::new(fps_rational.num as i32, fps_rational.den as i32)
759        } else {
760            log::warn!(
761                "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
762                fps_rational.num
763            );
764            Rational::new(30, 1)
765        };
766
767        // Extract duration
768        let duration = if duration_val > 0 {
769            let duration_secs = duration_val as f64 / 1_000_000.0;
770            Some(Duration::from_secs_f64(duration_secs))
771        } else {
772            None
773        };
774
775        // Extract pixel format
776        let pixel_format = Self::convert_pixel_format(pix_fmt);
777
778        // Extract color information
779        let color_space = Self::convert_color_space(color_space_val);
780        let color_range = Self::convert_color_range(color_range_val);
781        let color_primaries = Self::convert_color_primaries(color_primaries_val);
782
783        // Extract codec
784        let codec = Self::convert_codec(codec_id);
785        let codec_name = unsafe { Self::extract_codec_name(codec_id) };
786
787        // Build stream info
788        let mut builder = VideoStreamInfo::builder()
789            .index(stream_index as u32)
790            .codec(codec)
791            .codec_name(codec_name)
792            .width(width)
793            .height(height)
794            .frame_rate(frame_rate)
795            .pixel_format(pixel_format)
796            .color_space(color_space)
797            .color_range(color_range)
798            .color_primaries(color_primaries);
799
800        if let Some(d) = duration {
801            builder = builder.duration(d);
802        }
803
804        Ok(builder.build())
805    }
806
807    /// Extracts container-level information from the `AVFormatContext`.
808    ///
809    /// # Safety
810    ///
811    /// Caller must ensure `format_ctx` is valid and `avformat_find_stream_info` has been called.
812    unsafe fn extract_container_info(format_ctx: *mut AVFormatContext) -> ContainerInfo {
813        // SAFETY: Caller ensures format_ctx is valid
814        unsafe {
815            let format_name = if (*format_ctx).iformat.is_null() {
816                String::new()
817            } else {
818                let ptr = (*(*format_ctx).iformat).name;
819                if ptr.is_null() {
820                    String::new()
821                } else {
822                    CStr::from_ptr(ptr).to_string_lossy().into_owned()
823                }
824            };
825
826            let bit_rate = {
827                let br = (*format_ctx).bit_rate;
828                if br > 0 { Some(br as u64) } else { None }
829            };
830
831            let nb_streams = (*format_ctx).nb_streams as u32;
832
833            let mut builder = ContainerInfo::builder()
834                .format_name(format_name)
835                .nb_streams(nb_streams);
836            if let Some(br) = bit_rate {
837                builder = builder.bit_rate(br);
838            }
839            builder.build()
840        }
841    }
842
843    /// Converts FFmpeg pixel format to our PixelFormat enum.
844    fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
845        if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
846            PixelFormat::Yuv420p
847        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
848            PixelFormat::Yuv422p
849        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
850            PixelFormat::Yuv444p
851        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
852            PixelFormat::Rgb24
853        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
854            PixelFormat::Bgr24
855        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
856            PixelFormat::Rgba
857        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
858            PixelFormat::Bgra
859        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
860            PixelFormat::Gray8
861        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
862            PixelFormat::Nv12
863        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
864            PixelFormat::Nv21
865        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE {
866            PixelFormat::Yuv420p10le
867        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE {
868            PixelFormat::Yuv422p10le
869        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE {
870            PixelFormat::Yuv444p10le
871        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE {
872            PixelFormat::P010le
873        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE {
874            PixelFormat::Gbrpf32le
875        } else {
876            log::warn!(
877                "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
878            );
879            PixelFormat::Yuv420p
880        }
881    }
882
883    /// Converts FFmpeg color space to our ColorSpace enum.
884    fn convert_color_space(space: AVColorSpace) -> ColorSpace {
885        if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
886            ColorSpace::Bt709
887        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
888            || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
889        {
890            ColorSpace::Bt601
891        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
892            ColorSpace::Bt2020
893        } else {
894            log::warn!(
895                "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
896            );
897            ColorSpace::Bt709
898        }
899    }
900
901    /// Converts FFmpeg color range to our ColorRange enum.
902    fn convert_color_range(range: AVColorRange) -> ColorRange {
903        if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
904            ColorRange::Full
905        } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
906            ColorRange::Limited
907        } else {
908            log::warn!(
909                "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
910            );
911            ColorRange::Limited
912        }
913    }
914
915    /// Converts FFmpeg color primaries to our ColorPrimaries enum.
916    fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
917        if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
918            ColorPrimaries::Bt709
919        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
920            || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
921        {
922            ColorPrimaries::Bt601
923        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
924            ColorPrimaries::Bt2020
925        } else {
926            log::warn!(
927                "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
928            );
929            ColorPrimaries::Bt709
930        }
931    }
932
933    /// Converts FFmpeg codec ID to our VideoCodec enum.
934    fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
935        if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
936            VideoCodec::H264
937        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
938            VideoCodec::H265
939        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
940            VideoCodec::Vp8
941        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
942            VideoCodec::Vp9
943        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
944            VideoCodec::Av1
945        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
946            VideoCodec::Mpeg4
947        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
948            VideoCodec::ProRes
949        } else {
950            log::warn!(
951                "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
952            );
953            VideoCodec::H264
954        }
955    }
956
957    /// Decodes the next video frame.
958    ///
959    /// # Returns
960    ///
961    /// - `Ok(Some(frame))` - Successfully decoded a frame
962    /// - `Ok(None)` - End of stream reached
963    /// - `Err(_)` - Decoding error occurred
964    pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
965        if self.eof {
966            return Ok(None);
967        }
968
969        unsafe {
970            loop {
971                // Try to receive a frame from the decoder
972                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
973
974                if ret == 0 {
975                    // Successfully received a frame
976                    // Check if this is a hardware frame and transfer to CPU memory if needed
977                    self.transfer_hardware_frame_if_needed()?;
978
979                    let video_frame = self.convert_frame_to_video_frame()?;
980
981                    // Update position based on frame timestamp
982                    let pts = (*self.frame).pts;
983                    if pts != ff_sys::AV_NOPTS_VALUE {
984                        let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
985                        let time_base = (*(*stream)).time_base;
986                        let timestamp_secs =
987                            pts as f64 * time_base.num as f64 / time_base.den as f64;
988                        self.position = Duration::from_secs_f64(timestamp_secs);
989                    }
990
991                    return Ok(Some(video_frame));
992                } else if ret == ff_sys::error_codes::EAGAIN {
993                    // Need to send more packets to the decoder
994                    // Read a packet from the file
995                    let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
996
997                    if read_ret == ff_sys::error_codes::EOF {
998                        // End of file - flush the decoder
999                        ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
1000                        self.eof = true;
1001                        continue;
1002                    } else if read_ret < 0 {
1003                        return Err(DecodeError::Ffmpeg {
1004                            code: read_ret,
1005                            message: format!(
1006                                "Failed to read frame: {}",
1007                                ff_sys::av_error_string(read_ret)
1008                            ),
1009                        });
1010                    }
1011
1012                    // Check if this packet belongs to the video stream
1013                    if (*self.packet).stream_index == self.stream_index {
1014                        // Send the packet to the decoder
1015                        let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
1016                        ff_sys::av_packet_unref(self.packet);
1017
1018                        if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
1019                            return Err(DecodeError::Ffmpeg {
1020                                code: send_ret,
1021                                message: format!(
1022                                    "Failed to send packet: {}",
1023                                    ff_sys::av_error_string(send_ret)
1024                                ),
1025                            });
1026                        }
1027                    } else {
1028                        // Not our stream, unref and continue
1029                        ff_sys::av_packet_unref(self.packet);
1030                    }
1031                } else if ret == ff_sys::error_codes::EOF {
1032                    // Decoder has been fully flushed
1033                    self.eof = true;
1034                    return Ok(None);
1035                } else {
1036                    return Err(DecodeError::DecodingFailed {
1037                        timestamp: Some(self.position),
1038                        reason: ff_sys::av_error_string(ret),
1039                    });
1040                }
1041            }
1042        }
1043    }
1044
1045    /// Converts an AVFrame to a VideoFrame, applying pixel format conversion if needed.
1046    unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
1047        // SAFETY: Caller ensures self.frame is valid
1048        unsafe {
1049            let src_width = (*self.frame).width as u32;
1050            let src_height = (*self.frame).height as u32;
1051            let src_format = (*self.frame).format;
1052
1053            // Determine output format
1054            let dst_format = if let Some(fmt) = self.output_format {
1055                Self::pixel_format_to_av(fmt)
1056            } else {
1057                src_format
1058            };
1059
1060            // Determine output dimensions
1061            let (dst_width, dst_height) = self.resolve_output_dims(src_width, src_height);
1062
1063            // Check if conversion or scaling is needed
1064            let needs_conversion =
1065                src_format != dst_format || dst_width != src_width || dst_height != src_height;
1066
1067            if needs_conversion {
1068                self.convert_with_sws(
1069                    src_width, src_height, src_format, dst_width, dst_height, dst_format,
1070                )
1071            } else {
1072                self.av_frame_to_video_frame(self.frame)
1073            }
1074        }
1075    }
1076
1077    /// Computes the destination (width, height) from `output_scale` and source dimensions.
1078    ///
1079    /// Returns `(src_width, src_height)` when no scale is set.
1080    /// All returned dimensions are rounded up to the nearest even number.
1081    fn resolve_output_dims(&self, src_width: u32, src_height: u32) -> (u32, u32) {
1082        let round_even = |n: u32| (n + 1) & !1;
1083
1084        match self.output_scale {
1085            None => (src_width, src_height),
1086            Some(OutputScale::Exact { width, height }) => (round_even(width), round_even(height)),
1087            Some(OutputScale::FitWidth(target_w)) => {
1088                let target_w = round_even(target_w);
1089                if src_width == 0 {
1090                    return (target_w, target_w);
1091                }
1092                let h = (target_w as u64 * src_height as u64 / src_width as u64) as u32;
1093                (target_w, round_even(h.max(2)))
1094            }
1095            Some(OutputScale::FitHeight(target_h)) => {
1096                let target_h = round_even(target_h);
1097                if src_height == 0 {
1098                    return (target_h, target_h);
1099                }
1100                let w = (target_h as u64 * src_width as u64 / src_height as u64) as u32;
1101                (round_even(w.max(2)), target_h)
1102            }
1103        }
1104    }
1105
1106    /// Converts pixel format and/or scales a frame using `libswscale`.
1107    ///
1108    /// The `sws_ctx` is cached and recreated only when the source/destination
1109    /// parameters change (cache key: `(src_w, src_h, src_fmt, dst_w, dst_h, dst_fmt)`).
1110    unsafe fn convert_with_sws(
1111        &mut self,
1112        src_width: u32,
1113        src_height: u32,
1114        src_format: i32,
1115        dst_width: u32,
1116        dst_height: u32,
1117        dst_format: i32,
1118    ) -> Result<VideoFrame, DecodeError> {
1119        // SAFETY: Caller ensures frame and context pointers are valid
1120        unsafe {
1121            // Get or create SwScale context, invalidating cache when parameters change.
1122            let cache_key = (
1123                src_width, src_height, src_format, dst_width, dst_height, dst_format,
1124            );
1125            if self.sws_cache_key != Some(cache_key) {
1126                // Free the old context if it exists.
1127                if let Some(old_ctx) = self.sws_ctx.take() {
1128                    ff_sys::swscale::free_context(old_ctx);
1129                }
1130
1131                let ctx = ff_sys::swscale::get_context(
1132                    src_width as i32,
1133                    src_height as i32,
1134                    src_format,
1135                    dst_width as i32,
1136                    dst_height as i32,
1137                    dst_format,
1138                    ff_sys::swscale::scale_flags::BILINEAR,
1139                )
1140                .map_err(|e| DecodeError::Ffmpeg {
1141                    code: 0,
1142                    message: format!("Failed to create sws context: {e}"),
1143                })?;
1144
1145                self.sws_ctx = Some(ctx);
1146                self.sws_cache_key = Some(cache_key);
1147            }
1148
1149            let Some(sws_ctx) = self.sws_ctx else {
1150                return Err(DecodeError::Ffmpeg {
1151                    code: 0,
1152                    message: "SwsContext not initialized".to_string(),
1153                });
1154            };
1155
1156            // Allocate destination frame (with RAII guard)
1157            let dst_frame_guard = AvFrameGuard::new()?;
1158            let dst_frame = dst_frame_guard.as_ptr();
1159
1160            (*dst_frame).width = dst_width as i32;
1161            (*dst_frame).height = dst_height as i32;
1162            (*dst_frame).format = dst_format;
1163
1164            // Allocate buffer for destination frame
1165            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1166            if buffer_ret < 0 {
1167                return Err(DecodeError::Ffmpeg {
1168                    code: buffer_ret,
1169                    message: format!(
1170                        "Failed to allocate frame buffer: {}",
1171                        ff_sys::av_error_string(buffer_ret)
1172                    ),
1173                });
1174            }
1175
1176            // Perform conversion/scaling (src_height is the number of input rows to process)
1177            ff_sys::swscale::scale(
1178                sws_ctx,
1179                (*self.frame).data.as_ptr() as *const *const u8,
1180                (*self.frame).linesize.as_ptr(),
1181                0,
1182                src_height as i32,
1183                (*dst_frame).data.as_ptr() as *const *mut u8,
1184                (*dst_frame).linesize.as_ptr(),
1185            )
1186            .map_err(|e| DecodeError::Ffmpeg {
1187                code: 0,
1188                message: format!("Failed to scale frame: {e}"),
1189            })?;
1190
1191            // Copy timestamp
1192            (*dst_frame).pts = (*self.frame).pts;
1193
1194            // Convert to VideoFrame
1195            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1196
1197            // dst_frame is automatically freed when guard drops
1198
1199            Ok(video_frame)
1200        }
1201    }
1202
1203    /// Converts an AVFrame to a VideoFrame.
1204    unsafe fn av_frame_to_video_frame(
1205        &self,
1206        frame: *const AVFrame,
1207    ) -> Result<VideoFrame, DecodeError> {
1208        // SAFETY: Caller ensures frame and format_ctx are valid
1209        unsafe {
1210            let width = (*frame).width as u32;
1211            let height = (*frame).height as u32;
1212            let format = Self::convert_pixel_format((*frame).format);
1213
1214            // Extract timestamp
1215            let pts = (*frame).pts;
1216            let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1217                let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1218                let time_base = (*(*stream)).time_base;
1219                Timestamp::new(
1220                    pts as i64,
1221                    Rational::new(time_base.num as i32, time_base.den as i32),
1222                )
1223            } else {
1224                Timestamp::default()
1225            };
1226
1227            // Convert frame to planes and strides
1228            let (planes, strides) =
1229                self.extract_planes_and_strides(frame, width, height, format)?;
1230
1231            VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1232                DecodeError::Ffmpeg {
1233                    code: 0,
1234                    message: format!("Failed to create VideoFrame: {e}"),
1235                }
1236            })
1237        }
1238    }
1239
1240    /// Allocates a buffer, optionally using the frame pool.
1241    ///
1242    /// If a frame pool is configured and has available buffers, uses the pool.
1243    /// Otherwise, allocates a new Vec<u8>.
1244    ///
1245    /// Allocates a buffer for decoded frame data.
1246    ///
1247    /// If a frame pool is configured, attempts to acquire a buffer from the pool.
1248    /// The returned PooledBuffer will automatically be returned to the pool when dropped.
1249    fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1250        if let Some(ref pool) = self.frame_pool {
1251            if let Some(pooled_buffer) = pool.acquire(size) {
1252                return pooled_buffer;
1253            }
1254            // Pool is configured but currently empty (or has no buffer large
1255            // enough). Allocate fresh memory and attach it to the pool so
1256            // that when the VideoFrame is dropped the buffer is returned via
1257            // pool.release() and becomes available for the next frame.
1258            return PooledBuffer::new(vec![0u8; size], Arc::downgrade(pool));
1259        }
1260        PooledBuffer::standalone(vec![0u8; size])
1261    }
1262
1263    /// Extracts planes and strides from an AVFrame.
1264    unsafe fn extract_planes_and_strides(
1265        &self,
1266        frame: *const AVFrame,
1267        width: u32,
1268        height: u32,
1269        format: PixelFormat,
1270    ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1271        // Bytes per pixel constants for different pixel formats
1272        const BYTES_PER_PIXEL_RGBA: usize = 4;
1273        const BYTES_PER_PIXEL_RGB24: usize = 3;
1274
1275        // SAFETY: Caller ensures frame is valid and format matches actual frame format
1276        unsafe {
1277            let mut planes = Vec::new();
1278            let mut strides = Vec::new();
1279
1280            #[allow(clippy::match_same_arms)]
1281            match format {
1282                PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1283                    // Packed formats - single plane
1284                    let stride = (*frame).linesize[0] as usize;
1285                    let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1286                    {
1287                        BYTES_PER_PIXEL_RGBA
1288                    } else {
1289                        BYTES_PER_PIXEL_RGB24
1290                    };
1291                    let row_size = (width as usize) * bytes_per_pixel;
1292                    let buffer_size = row_size * height as usize;
1293                    let mut plane_data = self.allocate_buffer(buffer_size);
1294
1295                    for y in 0..height as usize {
1296                        let src_offset = y * stride;
1297                        let dst_offset = y * row_size;
1298                        let src_ptr = (*frame).data[0].add(src_offset);
1299                        let plane_slice = plane_data.as_mut();
1300                        // SAFETY: We copy exactly `row_size` bytes per row. The source pointer
1301                        // is valid (from FFmpeg frame data), destination has sufficient capacity
1302                        // (allocated with height * row_size), and ranges don't overlap.
1303                        std::ptr::copy_nonoverlapping(
1304                            src_ptr,
1305                            plane_slice[dst_offset..].as_mut_ptr(),
1306                            row_size,
1307                        );
1308                    }
1309
1310                    planes.push(plane_data);
1311                    strides.push(row_size);
1312                }
1313                PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1314                    // Planar YUV formats
1315                    let (chroma_width, chroma_height) = match format {
1316                        PixelFormat::Yuv420p => (width / 2, height / 2),
1317                        PixelFormat::Yuv422p => (width / 2, height),
1318                        PixelFormat::Yuv444p => (width, height),
1319                        _ => unreachable!(),
1320                    };
1321
1322                    // Y plane
1323                    let y_stride = width as usize;
1324                    let y_size = y_stride * height as usize;
1325                    let mut y_data = self.allocate_buffer(y_size);
1326                    for y in 0..height as usize {
1327                        let src_offset = y * (*frame).linesize[0] as usize;
1328                        let dst_offset = y * y_stride;
1329                        let src_ptr = (*frame).data[0].add(src_offset);
1330                        let y_slice = y_data.as_mut();
1331                        // SAFETY: Copying Y plane row-by-row. Source is valid FFmpeg data,
1332                        // destination has sufficient capacity, no overlap.
1333                        std::ptr::copy_nonoverlapping(
1334                            src_ptr,
1335                            y_slice[dst_offset..].as_mut_ptr(),
1336                            width as usize,
1337                        );
1338                    }
1339                    planes.push(y_data);
1340                    strides.push(y_stride);
1341
1342                    // U plane
1343                    let u_stride = chroma_width as usize;
1344                    let u_size = u_stride * chroma_height as usize;
1345                    let mut u_data = self.allocate_buffer(u_size);
1346                    for y in 0..chroma_height as usize {
1347                        let src_offset = y * (*frame).linesize[1] as usize;
1348                        let dst_offset = y * u_stride;
1349                        let src_ptr = (*frame).data[1].add(src_offset);
1350                        let u_slice = u_data.as_mut();
1351                        // SAFETY: Copying U (chroma) plane row-by-row. Valid source,
1352                        // sufficient destination capacity, no overlap.
1353                        std::ptr::copy_nonoverlapping(
1354                            src_ptr,
1355                            u_slice[dst_offset..].as_mut_ptr(),
1356                            chroma_width as usize,
1357                        );
1358                    }
1359                    planes.push(u_data);
1360                    strides.push(u_stride);
1361
1362                    // V plane
1363                    let v_stride = chroma_width as usize;
1364                    let v_size = v_stride * chroma_height as usize;
1365                    let mut v_data = self.allocate_buffer(v_size);
1366                    for y in 0..chroma_height as usize {
1367                        let src_offset = y * (*frame).linesize[2] as usize;
1368                        let dst_offset = y * v_stride;
1369                        let src_ptr = (*frame).data[2].add(src_offset);
1370                        let v_slice = v_data.as_mut();
1371                        // SAFETY: Copying V (chroma) plane row-by-row. Valid source,
1372                        // sufficient destination capacity, no overlap.
1373                        std::ptr::copy_nonoverlapping(
1374                            src_ptr,
1375                            v_slice[dst_offset..].as_mut_ptr(),
1376                            chroma_width as usize,
1377                        );
1378                    }
1379                    planes.push(v_data);
1380                    strides.push(v_stride);
1381                }
1382                PixelFormat::Gray8 => {
1383                    // Single plane grayscale
1384                    let stride = width as usize;
1385                    let mut plane_data = self.allocate_buffer(stride * height as usize);
1386
1387                    for y in 0..height as usize {
1388                        let src_offset = y * (*frame).linesize[0] as usize;
1389                        let dst_offset = y * stride;
1390                        let src_ptr = (*frame).data[0].add(src_offset);
1391                        let plane_slice = plane_data.as_mut();
1392                        // SAFETY: Copying grayscale plane row-by-row. Valid source,
1393                        // sufficient destination capacity, no overlap.
1394                        std::ptr::copy_nonoverlapping(
1395                            src_ptr,
1396                            plane_slice[dst_offset..].as_mut_ptr(),
1397                            width as usize,
1398                        );
1399                    }
1400
1401                    planes.push(plane_data);
1402                    strides.push(stride);
1403                }
1404                PixelFormat::Nv12 | PixelFormat::Nv21 => {
1405                    // Semi-planar formats
1406                    let uv_height = height / 2;
1407
1408                    // Y plane
1409                    let y_stride = width as usize;
1410                    let mut y_data = self.allocate_buffer(y_stride * height as usize);
1411                    for y in 0..height as usize {
1412                        let src_offset = y * (*frame).linesize[0] as usize;
1413                        let dst_offset = y * y_stride;
1414                        let src_ptr = (*frame).data[0].add(src_offset);
1415                        let y_slice = y_data.as_mut();
1416                        // SAFETY: Copying Y plane (semi-planar) row-by-row. Valid source,
1417                        // sufficient destination capacity, no overlap.
1418                        std::ptr::copy_nonoverlapping(
1419                            src_ptr,
1420                            y_slice[dst_offset..].as_mut_ptr(),
1421                            width as usize,
1422                        );
1423                    }
1424                    planes.push(y_data);
1425                    strides.push(y_stride);
1426
1427                    // UV plane
1428                    let uv_stride = width as usize;
1429                    let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1430                    for y in 0..uv_height as usize {
1431                        let src_offset = y * (*frame).linesize[1] as usize;
1432                        let dst_offset = y * uv_stride;
1433                        let src_ptr = (*frame).data[1].add(src_offset);
1434                        let uv_slice = uv_data.as_mut();
1435                        // SAFETY: Copying interleaved UV plane (semi-planar) row-by-row.
1436                        // Valid source, sufficient destination capacity, no overlap.
1437                        std::ptr::copy_nonoverlapping(
1438                            src_ptr,
1439                            uv_slice[dst_offset..].as_mut_ptr(),
1440                            width as usize,
1441                        );
1442                    }
1443                    planes.push(uv_data);
1444                    strides.push(uv_stride);
1445                }
1446                PixelFormat::Gbrpf32le => {
1447                    // Planar GBR float: 3 full-resolution planes, 4 bytes per sample (f32)
1448                    const BYTES_PER_SAMPLE: usize = 4;
1449                    let row_size = width as usize * BYTES_PER_SAMPLE;
1450                    let size = row_size * height as usize;
1451
1452                    for plane_idx in 0..3usize {
1453                        let src_linesize = (*frame).linesize[plane_idx] as usize;
1454                        let mut plane_data = self.allocate_buffer(size);
1455                        for y in 0..height as usize {
1456                            let src_offset = y * src_linesize;
1457                            let dst_offset = y * row_size;
1458                            let src_ptr = (*frame).data[plane_idx].add(src_offset);
1459                            let dst_slice = plane_data.as_mut();
1460                            // SAFETY: Copying one row of a planar float plane. Source is valid
1461                            // FFmpeg frame data, destination has sufficient capacity, no overlap.
1462                            std::ptr::copy_nonoverlapping(
1463                                src_ptr,
1464                                dst_slice[dst_offset..].as_mut_ptr(),
1465                                row_size,
1466                            );
1467                        }
1468                        planes.push(plane_data);
1469                        strides.push(row_size);
1470                    }
1471                }
1472                _ => {
1473                    return Err(DecodeError::Ffmpeg {
1474                        code: 0,
1475                        message: format!("Unsupported pixel format: {format:?}"),
1476                    });
1477                }
1478            }
1479
1480            Ok((planes, strides))
1481        }
1482    }
1483
1484    /// Converts our `PixelFormat` to FFmpeg `AVPixelFormat`.
1485    fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1486        match format {
1487            PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1488            PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1489            PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1490            PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1491            PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1492            PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1493            PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1494            PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1495            PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1496            PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1497            PixelFormat::Yuv420p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE,
1498            PixelFormat::Yuv422p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE,
1499            PixelFormat::Yuv444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE,
1500            PixelFormat::Yuva444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUVA444P10LE,
1501            PixelFormat::P010le => ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE,
1502            PixelFormat::Gbrpf32le => ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE,
1503            _ => {
1504                log::warn!(
1505                    "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1506                );
1507                ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1508            }
1509        }
1510    }
1511
1512    /// Returns the current playback position.
1513    pub(crate) fn position(&self) -> Duration {
1514        self.position
1515    }
1516
1517    /// Returns whether end of file has been reached.
1518    pub(crate) fn is_eof(&self) -> bool {
1519        self.eof
1520    }
1521
1522    /// Converts a `Duration` to a presentation timestamp (PTS) in stream time_base units.
1523    ///
1524    /// # Arguments
1525    ///
1526    /// * `duration` - The duration to convert.
1527    ///
1528    /// # Returns
1529    ///
1530    /// The timestamp in stream time_base units.
1531    ///
1532    /// # Note
1533    ///
1534    /// av_seek_frame expects timestamps in stream time_base units when using a specific stream_index.
1535    fn duration_to_pts(&self, duration: Duration) -> i64 {
1536        // Convert duration to stream time_base units for seeking
1537        // SAFETY:
1538        // - format_ctx is valid: owned by VideoDecoderInner, initialized in constructor via avformat_open_input
1539        // - stream_index is valid: validated during decoder creation (find_stream_info + codec opening)
1540        // - streams array access is valid: guaranteed by FFmpeg after successful avformat_open_input
1541        let time_base = unsafe {
1542            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1543            (*(*stream)).time_base
1544        };
1545
1546        // Convert: duration (seconds) * (time_base.den / time_base.num) = PTS
1547        let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1548        (duration.as_secs_f64() * time_base_f64) as i64
1549    }
1550
1551    /// Converts a presentation timestamp (PTS) to a `Duration`.
1552    ///
1553    /// # Arguments
1554    ///
1555    /// * `pts` - The presentation timestamp in stream time base units.
1556    ///
1557    /// # Returns
1558    ///
1559    /// The duration corresponding to the PTS.
1560    ///
1561    /// # Safety
1562    ///
1563    /// Caller must ensure that `format_ctx` and `stream_index` are valid.
1564    ///
1565    /// # Note
1566    ///
1567    /// Currently unused but kept for potential future use in more advanced seeking scenarios.
1568    #[allow(dead_code)]
1569    fn pts_to_duration(&self, pts: i64) -> Duration {
1570        // SAFETY: Caller ensures format_ctx and stream_index are valid
1571        unsafe {
1572            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1573            let time_base = (*(*stream)).time_base;
1574
1575            // Convert PTS to duration
1576            let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1577            Duration::from_secs_f64(duration_secs)
1578        }
1579    }
1580
1581    /// Seeks to a specified position in the video stream.
1582    ///
1583    /// This method performs efficient seeking without reopening the file.
1584    /// It uses `av_seek_frame` internally and flushes the decoder buffers.
1585    ///
1586    /// # Performance Characteristics
1587    ///
1588    /// - **Keyframe seek**: 5-10ms for typical GOP sizes (1-2 seconds)
1589    /// - **Exact seek**: Proportional to distance from nearest keyframe
1590    /// - **Large GOP videos**: May require sequential decoding from distant keyframe
1591    ///
1592    /// For videos with sparse keyframes (GOP > 2 seconds), the method will
1593    /// decode frames sequentially from the nearest keyframe to reach the target.
1594    /// This ensures correct frame data but may take longer (10-50ms for very large GOPs).
1595    ///
1596    /// # Arguments
1597    ///
1598    /// * `position` - Target position to seek to.
1599    /// * `mode` - Seek mode (Keyframe, Exact, or Backward).
1600    ///
1601    /// # Errors
1602    ///
1603    /// Returns [`DecodeError::SeekFailed`] if the seek operation fails.
1604    pub(crate) fn seek(
1605        &mut self,
1606        position: Duration,
1607        mode: crate::SeekMode,
1608    ) -> Result<(), DecodeError> {
1609        use crate::SeekMode;
1610
1611        let timestamp = self.duration_to_pts(position);
1612
1613        // All seek modes use BACKWARD flag to find the nearest keyframe at or before target.
1614        // The difference between modes is in the post-seek processing below.
1615        let flags = ff_sys::avformat::seek_flags::BACKWARD;
1616
1617        // 1. Clear any pending packet and frame to avoid reading stale data after seek
1618        // SAFETY:
1619        // - packet is valid: allocated in constructor, owned by VideoDecoderInner
1620        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1621        unsafe {
1622            ff_sys::av_packet_unref(self.packet);
1623            ff_sys::av_frame_unref(self.frame);
1624        }
1625
1626        // 2. Seek in the format context (file is NOT reopened)
1627        // Use av_seek_frame with the stream index and timestamp in stream time_base units
1628        // SAFETY:
1629        // - format_ctx is valid: owned by VideoDecoderInner, initialized via avformat_open_input
1630        // - stream_index is valid: validated during decoder creation
1631        // - timestamp is valid: converted from Duration using stream's time_base
1632        unsafe {
1633            ff_sys::avformat::seek_frame(
1634                self.format_ctx,
1635                self.stream_index as i32,
1636                timestamp,
1637                flags,
1638            )
1639            .map_err(|e| DecodeError::SeekFailed {
1640                target: position,
1641                reason: ff_sys::av_error_string(e),
1642            })?;
1643        }
1644
1645        // 3. Flush decoder buffers to clear any cached frames
1646        // SAFETY: codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1647        unsafe {
1648            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1649        }
1650
1651        // 4. Drain any remaining frames from the decoder after flush
1652        // This ensures no stale frames are returned after the seek
1653        // SAFETY:
1654        // - codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1655        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1656        unsafe {
1657            loop {
1658                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1659                if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1660                    // No more frames in the decoder buffer
1661                    break;
1662                } else if ret == 0 {
1663                    // Got a frame, unref it and continue draining
1664                    ff_sys::av_frame_unref(self.frame);
1665                } else {
1666                    // Other error, break out
1667                    break;
1668                }
1669            }
1670        }
1671
1672        // 5. Reset internal state
1673        self.eof = false;
1674        // Note: We don't update self.position here because it will be updated
1675        // when the next frame is decoded. This ensures position reflects actual decoded position.
1676
1677        // 6. Skip forward to the target position
1678        //
1679        // Context: av_seek_frame with BACKWARD flag seeks to the nearest keyframe *at or before*
1680        // the target timestamp. For videos with sparse keyframes (large GOP size), this may
1681        // land far from the target (e.g., at the first keyframe for GOP=entire video).
1682        //
1683        // Solution: Decode frames sequentially from the keyframe until reaching the target.
1684        // This is necessary because H.264/H.265 P-frames and B-frames depend on previous
1685        // frames for reconstruction, so we must decode all intermediate frames.
1686        //
1687        // Performance Impact:
1688        // - Typical GOP (1-2s): 30-60 frames to skip, ~5-10ms overhead
1689        // - Large GOP (5-10s): 150-300 frames to skip, ~20-50ms overhead
1690        // - Worst case (single keyframe): May decode entire video, ~100ms-1s
1691        if mode == SeekMode::Exact {
1692            // For exact mode, decode until we reach or pass the exact target
1693            self.skip_to_exact(position)?;
1694        } else {
1695            // For keyframe/backward modes, decode until we're reasonably close to the target
1696            // Rationale: Balances accuracy with performance for common use cases
1697            let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1698            let min_position = position.saturating_sub(tolerance);
1699
1700            while let Some(frame) = self.decode_one()? {
1701                let frame_time = frame.timestamp().as_duration();
1702                if frame_time >= min_position {
1703                    // We're close enough to the target
1704                    break;
1705                }
1706                // Continue decoding to get closer (frames are automatically dropped)
1707            }
1708        }
1709
1710        Ok(())
1711    }
1712
1713    /// Skips frames until reaching the exact target position.
1714    ///
1715    /// This is used by [`Self::seek`] when `SeekMode::Exact` is specified.
1716    /// It decodes and discards frames from the nearest keyframe until
1717    /// reaching the target position.
1718    ///
1719    /// # Performance
1720    ///
1721    /// Time complexity is O(n) where n is the number of frames between the
1722    /// keyframe and target. For a 30fps video with 2-second GOP:
1723    /// - Worst case: ~60 frames to decode, ~10-20ms
1724    /// - Average case: ~30 frames to decode, ~5-10ms
1725    ///
1726    /// # Arguments
1727    ///
1728    /// * `target` - The exact target position.
1729    ///
1730    /// # Errors
1731    ///
1732    /// Returns [`DecodeError::SeekFailed`] if EOF is reached before the target position.
1733    fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1734        loop {
1735            match self.decode_one()? {
1736                Some(frame) => {
1737                    let frame_time = frame.timestamp().as_duration();
1738                    if frame_time >= target {
1739                        // Reached or passed the target frame
1740                        // Position will be updated by decode_one() which was just called
1741                        break;
1742                    }
1743                    // Continue decoding (frame is automatically dropped)
1744                }
1745                None => {
1746                    // Reached EOF before finding target frame
1747                    return Err(DecodeError::SeekFailed {
1748                        target,
1749                        reason: "Reached end of stream before target position".to_string(),
1750                    });
1751                }
1752            }
1753        }
1754        Ok(())
1755    }
1756
1757    /// Flushes the decoder's internal buffers.
1758    ///
1759    /// This clears any cached frames and resets the decoder state.
1760    /// The decoder is ready to receive new packets after flushing.
1761    pub(crate) fn flush(&mut self) {
1762        // SAFETY: codec_ctx is valid and owned by this instance
1763        unsafe {
1764            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1765        }
1766        self.eof = false;
1767    }
1768
1769    /// Scales a video frame to the specified dimensions while preserving aspect ratio.
1770    ///
1771    /// This method uses SwScale to resize frames efficiently using a "fit-within"
1772    /// strategy that preserves the original aspect ratio.
1773    ///
1774    /// # Aspect Ratio Preservation
1775    ///
1776    /// The frame is scaled to fit within `(target_width, target_height)` while
1777    /// maintaining its original aspect ratio. The output dimensions will be at most
1778    /// the target size, with at least one dimension matching the target. No letterboxing
1779    /// or pillarboxing is applied - the frame is simply scaled down to fit.
1780    ///
1781    /// # Arguments
1782    ///
1783    /// * `frame` - The source frame to scale.
1784    /// * `target_width` - Desired width in pixels.
1785    /// * `target_height` - Desired height in pixels.
1786    ///
1787    /// # Returns
1788    ///
1789    /// A new `VideoFrame` scaled to fit within the target dimensions.
1790    ///
1791    /// # Errors
1792    ///
1793    /// Returns [`DecodeError`] if SwScale context creation or scaling fails.
1794    ///
1795    /// # Performance
1796    ///
1797    /// - Caches SwScale context for repeated calls with same dimensions
1798    /// - Context creation: ~0.1-0.5ms (only on first call or dimension change)
1799    /// - Typical scaling time: 1-3ms for 1080p → 320x180
1800    /// - Uses bilinear interpolation for quality/performance balance
1801    ///
1802    /// # Cache Behavior
1803    ///
1804    /// The SwScale context is cached based on source/target dimensions and format.
1805    /// When generating multiple thumbnails with the same size (e.g., via `thumbnails()`),
1806    /// the context is reused, eliminating the ~0.1-0.5ms creation overhead per thumbnail.
1807    pub(crate) fn scale_frame(
1808        &mut self,
1809        frame: &VideoFrame,
1810        target_width: u32,
1811        target_height: u32,
1812    ) -> Result<VideoFrame, DecodeError> {
1813        let src_width = frame.width();
1814        let src_height = frame.height();
1815        let src_format = frame.format();
1816
1817        // Calculate scaled dimensions to preserve aspect ratio (fit within target)
1818        let src_aspect = src_width as f64 / src_height as f64;
1819        let target_aspect = target_width as f64 / target_height as f64;
1820
1821        let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1822            // Source is wider - fit to width
1823            let height = (target_width as f64 / src_aspect).round() as u32;
1824            (target_width, height)
1825        } else {
1826            // Source is taller or equal - fit to height
1827            let width = (target_height as f64 * src_aspect).round() as u32;
1828            (width, target_height)
1829        };
1830
1831        // Convert pixel format to FFmpeg format
1832        let av_format = Self::pixel_format_to_av(src_format);
1833
1834        // Cache key: (src_width, src_height, scaled_width, scaled_height, format)
1835        let cache_key = (
1836            src_width,
1837            src_height,
1838            scaled_width,
1839            scaled_height,
1840            av_format,
1841        );
1842
1843        // SAFETY: We're creating temporary FFmpeg objects for scaling
1844        unsafe {
1845            // Check if we can reuse the cached SwScale context
1846            let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1847                (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1848            {
1849                if cached_key == cache_key {
1850                    // Cache hit - reuse existing context
1851                    (cached_ctx, true)
1852                } else {
1853                    // Cache miss - free old context and create new one
1854                    ff_sys::swscale::free_context(cached_ctx);
1855                    // Clear cache immediately to prevent dangling pointer
1856                    self.thumbnail_sws_ctx = None;
1857                    self.thumbnail_cache_key = None;
1858
1859                    let new_ctx = ff_sys::swscale::get_context(
1860                        src_width as i32,
1861                        src_height as i32,
1862                        av_format,
1863                        scaled_width as i32,
1864                        scaled_height as i32,
1865                        av_format,
1866                        ff_sys::swscale::scale_flags::BILINEAR,
1867                    )
1868                    .map_err(|e| DecodeError::Ffmpeg {
1869                        code: 0,
1870                        message: format!("Failed to create scaling context: {e}"),
1871                    })?;
1872
1873                    // Don't cache yet - will cache after successful scaling
1874                    (new_ctx, false)
1875                }
1876            } else {
1877                // No cache - create new context
1878                let new_ctx = ff_sys::swscale::get_context(
1879                    src_width as i32,
1880                    src_height as i32,
1881                    av_format,
1882                    scaled_width as i32,
1883                    scaled_height as i32,
1884                    av_format,
1885                    ff_sys::swscale::scale_flags::BILINEAR,
1886                )
1887                .map_err(|e| DecodeError::Ffmpeg {
1888                    code: 0,
1889                    message: format!("Failed to create scaling context: {e}"),
1890                })?;
1891
1892                // Don't cache yet - will cache after successful scaling
1893                (new_ctx, false)
1894            };
1895
1896            // Set up source frame with VideoFrame data
1897            let src_frame_guard = AvFrameGuard::new()?;
1898            let src_frame = src_frame_guard.as_ptr();
1899
1900            (*src_frame).width = src_width as i32;
1901            (*src_frame).height = src_height as i32;
1902            (*src_frame).format = av_format;
1903
1904            // Set up source frame data pointers directly from VideoFrame (no copy)
1905            let planes = frame.planes();
1906            let strides = frame.strides();
1907
1908            for (i, plane_data) in planes.iter().enumerate() {
1909                if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1910                    break;
1911                }
1912                (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1913                (*src_frame).linesize[i] = strides[i] as i32;
1914            }
1915
1916            // Allocate destination frame
1917            let dst_frame_guard = AvFrameGuard::new()?;
1918            let dst_frame = dst_frame_guard.as_ptr();
1919
1920            (*dst_frame).width = scaled_width as i32;
1921            (*dst_frame).height = scaled_height as i32;
1922            (*dst_frame).format = av_format;
1923
1924            // Allocate buffer for destination frame
1925            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1926            if buffer_ret < 0 {
1927                // Clean up context if not cached
1928                if !is_cached {
1929                    ff_sys::swscale::free_context(sws_ctx);
1930                }
1931                return Err(DecodeError::Ffmpeg {
1932                    code: buffer_ret,
1933                    message: format!(
1934                        "Failed to allocate destination frame buffer: {}",
1935                        ff_sys::av_error_string(buffer_ret)
1936                    ),
1937                });
1938            }
1939
1940            // Perform scaling
1941            let scale_result = ff_sys::swscale::scale(
1942                sws_ctx,
1943                (*src_frame).data.as_ptr() as *const *const u8,
1944                (*src_frame).linesize.as_ptr(),
1945                0,
1946                src_height as i32,
1947                (*dst_frame).data.as_ptr() as *const *mut u8,
1948                (*dst_frame).linesize.as_ptr(),
1949            );
1950
1951            if let Err(e) = scale_result {
1952                // Clean up context if not cached
1953                if !is_cached {
1954                    ff_sys::swscale::free_context(sws_ctx);
1955                }
1956                return Err(DecodeError::Ffmpeg {
1957                    code: 0,
1958                    message: format!("Failed to scale frame: {e}"),
1959                });
1960            }
1961
1962            // Scaling successful - cache the context if it's new
1963            if !is_cached {
1964                self.thumbnail_sws_ctx = Some(sws_ctx);
1965                self.thumbnail_cache_key = Some(cache_key);
1966            }
1967
1968            // Copy timestamp
1969            (*dst_frame).pts = frame.timestamp().pts();
1970
1971            // Convert destination frame to VideoFrame
1972            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1973
1974            Ok(video_frame)
1975        }
1976    }
1977}
1978
1979impl Drop for VideoDecoderInner {
1980    fn drop(&mut self) {
1981        // Free SwScale context if allocated
1982        if let Some(sws_ctx) = self.sws_ctx {
1983            // SAFETY: sws_ctx is valid and owned by this instance
1984            unsafe {
1985                ff_sys::swscale::free_context(sws_ctx);
1986            }
1987        }
1988
1989        // Free cached thumbnail SwScale context if allocated
1990        if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1991            // SAFETY: thumbnail_ctx is valid and owned by this instance
1992            unsafe {
1993                ff_sys::swscale::free_context(thumbnail_ctx);
1994            }
1995        }
1996
1997        // Free hardware device context if allocated
1998        if let Some(hw_ctx) = self.hw_device_ctx {
1999            // SAFETY: hw_ctx is valid and owned by this instance
2000            unsafe {
2001                ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
2002            }
2003        }
2004
2005        // Free frame and packet
2006        if !self.frame.is_null() {
2007            // SAFETY: self.frame is valid and owned by this instance
2008            unsafe {
2009                ff_sys::av_frame_free(&mut (self.frame as *mut _));
2010            }
2011        }
2012
2013        if !self.packet.is_null() {
2014            // SAFETY: self.packet is valid and owned by this instance
2015            unsafe {
2016                ff_sys::av_packet_free(&mut (self.packet as *mut _));
2017            }
2018        }
2019
2020        // Free codec context
2021        if !self.codec_ctx.is_null() {
2022            // SAFETY: self.codec_ctx is valid and owned by this instance
2023            unsafe {
2024                ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
2025            }
2026        }
2027
2028        // Close format context
2029        if !self.format_ctx.is_null() {
2030            // SAFETY: self.format_ctx is valid and owned by this instance
2031            unsafe {
2032                ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
2033            }
2034        }
2035    }
2036}
2037
2038// SAFETY: VideoDecoderInner manages FFmpeg contexts which are thread-safe when not shared.
2039// We don't expose mutable access across threads, so Send is safe.
2040unsafe impl Send for VideoDecoderInner {}
2041
2042#[cfg(test)]
2043mod tests {
2044    use ff_format::PixelFormat;
2045    use ff_format::codec::VideoCodec;
2046    use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
2047
2048    use crate::HardwareAccel;
2049
2050    use super::VideoDecoderInner;
2051
2052    // -------------------------------------------------------------------------
2053    // convert_pixel_format
2054    // -------------------------------------------------------------------------
2055
2056    #[test]
2057    fn pixel_format_yuv420p() {
2058        assert_eq!(
2059            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
2060            PixelFormat::Yuv420p
2061        );
2062    }
2063
2064    #[test]
2065    fn pixel_format_yuv422p() {
2066        assert_eq!(
2067            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
2068            PixelFormat::Yuv422p
2069        );
2070    }
2071
2072    #[test]
2073    fn pixel_format_yuv444p() {
2074        assert_eq!(
2075            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
2076            PixelFormat::Yuv444p
2077        );
2078    }
2079
2080    #[test]
2081    fn pixel_format_rgb24() {
2082        assert_eq!(
2083            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
2084            PixelFormat::Rgb24
2085        );
2086    }
2087
2088    #[test]
2089    fn pixel_format_bgr24() {
2090        assert_eq!(
2091            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
2092            PixelFormat::Bgr24
2093        );
2094    }
2095
2096    #[test]
2097    fn pixel_format_rgba() {
2098        assert_eq!(
2099            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
2100            PixelFormat::Rgba
2101        );
2102    }
2103
2104    #[test]
2105    fn pixel_format_bgra() {
2106        assert_eq!(
2107            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
2108            PixelFormat::Bgra
2109        );
2110    }
2111
2112    #[test]
2113    fn pixel_format_gray8() {
2114        assert_eq!(
2115            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
2116            PixelFormat::Gray8
2117        );
2118    }
2119
2120    #[test]
2121    fn pixel_format_nv12() {
2122        assert_eq!(
2123            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
2124            PixelFormat::Nv12
2125        );
2126    }
2127
2128    #[test]
2129    fn pixel_format_nv21() {
2130        assert_eq!(
2131            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
2132            PixelFormat::Nv21
2133        );
2134    }
2135
2136    #[test]
2137    fn pixel_format_yuv420p10le_should_return_yuv420p10le() {
2138        assert_eq!(
2139            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE),
2140            PixelFormat::Yuv420p10le
2141        );
2142    }
2143
2144    #[test]
2145    fn pixel_format_yuv422p10le_should_return_yuv422p10le() {
2146        assert_eq!(
2147            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE),
2148            PixelFormat::Yuv422p10le
2149        );
2150    }
2151
2152    #[test]
2153    fn pixel_format_yuv444p10le_should_return_yuv444p10le() {
2154        assert_eq!(
2155            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE),
2156            PixelFormat::Yuv444p10le
2157        );
2158    }
2159
2160    #[test]
2161    fn pixel_format_p010le_should_return_p010le() {
2162        assert_eq!(
2163            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE),
2164            PixelFormat::P010le
2165        );
2166    }
2167
2168    #[test]
2169    fn pixel_format_unknown_falls_back_to_yuv420p() {
2170        assert_eq!(
2171            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
2172            PixelFormat::Yuv420p
2173        );
2174    }
2175
2176    // -------------------------------------------------------------------------
2177    // convert_color_space
2178    // -------------------------------------------------------------------------
2179
2180    #[test]
2181    fn color_space_bt709() {
2182        assert_eq!(
2183            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
2184            ColorSpace::Bt709
2185        );
2186    }
2187
2188    #[test]
2189    fn color_space_bt470bg_yields_bt601() {
2190        assert_eq!(
2191            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
2192            ColorSpace::Bt601
2193        );
2194    }
2195
2196    #[test]
2197    fn color_space_smpte170m_yields_bt601() {
2198        assert_eq!(
2199            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
2200            ColorSpace::Bt601
2201        );
2202    }
2203
2204    #[test]
2205    fn color_space_bt2020_ncl() {
2206        assert_eq!(
2207            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
2208            ColorSpace::Bt2020
2209        );
2210    }
2211
2212    #[test]
2213    fn color_space_unknown_falls_back_to_bt709() {
2214        assert_eq!(
2215            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
2216            ColorSpace::Bt709
2217        );
2218    }
2219
2220    // -------------------------------------------------------------------------
2221    // convert_color_range
2222    // -------------------------------------------------------------------------
2223
2224    #[test]
2225    fn color_range_jpeg_yields_full() {
2226        assert_eq!(
2227            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
2228            ColorRange::Full
2229        );
2230    }
2231
2232    #[test]
2233    fn color_range_mpeg_yields_limited() {
2234        assert_eq!(
2235            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2236            ColorRange::Limited
2237        );
2238    }
2239
2240    #[test]
2241    fn color_range_unknown_falls_back_to_limited() {
2242        assert_eq!(
2243            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2244            ColorRange::Limited
2245        );
2246    }
2247
2248    // -------------------------------------------------------------------------
2249    // convert_color_primaries
2250    // -------------------------------------------------------------------------
2251
2252    #[test]
2253    fn color_primaries_bt709() {
2254        assert_eq!(
2255            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2256            ColorPrimaries::Bt709
2257        );
2258    }
2259
2260    #[test]
2261    fn color_primaries_bt470bg_yields_bt601() {
2262        assert_eq!(
2263            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2264            ColorPrimaries::Bt601
2265        );
2266    }
2267
2268    #[test]
2269    fn color_primaries_smpte170m_yields_bt601() {
2270        assert_eq!(
2271            VideoDecoderInner::convert_color_primaries(
2272                ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2273            ),
2274            ColorPrimaries::Bt601
2275        );
2276    }
2277
2278    #[test]
2279    fn color_primaries_bt2020() {
2280        assert_eq!(
2281            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2282            ColorPrimaries::Bt2020
2283        );
2284    }
2285
2286    #[test]
2287    fn color_primaries_unknown_falls_back_to_bt709() {
2288        assert_eq!(
2289            VideoDecoderInner::convert_color_primaries(
2290                ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2291            ),
2292            ColorPrimaries::Bt709
2293        );
2294    }
2295
2296    // -------------------------------------------------------------------------
2297    // convert_codec
2298    // -------------------------------------------------------------------------
2299
2300    #[test]
2301    fn codec_h264() {
2302        assert_eq!(
2303            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2304            VideoCodec::H264
2305        );
2306    }
2307
2308    #[test]
2309    fn codec_hevc_yields_h265() {
2310        assert_eq!(
2311            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2312            VideoCodec::H265
2313        );
2314    }
2315
2316    #[test]
2317    fn codec_vp8() {
2318        assert_eq!(
2319            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2320            VideoCodec::Vp8
2321        );
2322    }
2323
2324    #[test]
2325    fn codec_vp9() {
2326        assert_eq!(
2327            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2328            VideoCodec::Vp9
2329        );
2330    }
2331
2332    #[test]
2333    fn codec_av1() {
2334        assert_eq!(
2335            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2336            VideoCodec::Av1
2337        );
2338    }
2339
2340    #[test]
2341    fn codec_mpeg4() {
2342        assert_eq!(
2343            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2344            VideoCodec::Mpeg4
2345        );
2346    }
2347
2348    #[test]
2349    fn codec_prores() {
2350        assert_eq!(
2351            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2352            VideoCodec::ProRes
2353        );
2354    }
2355
2356    #[test]
2357    fn codec_unknown_falls_back_to_h264() {
2358        assert_eq!(
2359            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2360            VideoCodec::H264
2361        );
2362    }
2363
2364    // -------------------------------------------------------------------------
2365    // hw_accel_to_device_type
2366    // -------------------------------------------------------------------------
2367
2368    #[test]
2369    fn hw_accel_auto_yields_none() {
2370        assert_eq!(
2371            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2372            None
2373        );
2374    }
2375
2376    #[test]
2377    fn hw_accel_none_yields_none() {
2378        assert_eq!(
2379            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2380            None
2381        );
2382    }
2383
2384    #[test]
2385    fn hw_accel_nvdec_yields_cuda() {
2386        assert_eq!(
2387            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2388            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2389        );
2390    }
2391
2392    #[test]
2393    fn hw_accel_qsv_yields_qsv() {
2394        assert_eq!(
2395            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2396            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2397        );
2398    }
2399
2400    #[test]
2401    fn hw_accel_amf_yields_d3d11va() {
2402        assert_eq!(
2403            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2404            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2405        );
2406    }
2407
2408    #[test]
2409    fn hw_accel_videotoolbox() {
2410        assert_eq!(
2411            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2412            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2413        );
2414    }
2415
2416    #[test]
2417    fn hw_accel_vaapi() {
2418        assert_eq!(
2419            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2420            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2421        );
2422    }
2423
2424    // -------------------------------------------------------------------------
2425    // pixel_format_to_av — round-trip
2426    // -------------------------------------------------------------------------
2427
2428    #[test]
2429    fn pixel_format_to_av_round_trip_yuv420p() {
2430        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2431        assert_eq!(
2432            VideoDecoderInner::convert_pixel_format(av),
2433            PixelFormat::Yuv420p
2434        );
2435    }
2436
2437    #[test]
2438    fn pixel_format_to_av_round_trip_yuv422p() {
2439        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2440        assert_eq!(
2441            VideoDecoderInner::convert_pixel_format(av),
2442            PixelFormat::Yuv422p
2443        );
2444    }
2445
2446    #[test]
2447    fn pixel_format_to_av_round_trip_yuv444p() {
2448        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2449        assert_eq!(
2450            VideoDecoderInner::convert_pixel_format(av),
2451            PixelFormat::Yuv444p
2452        );
2453    }
2454
2455    #[test]
2456    fn pixel_format_to_av_round_trip_rgb24() {
2457        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2458        assert_eq!(
2459            VideoDecoderInner::convert_pixel_format(av),
2460            PixelFormat::Rgb24
2461        );
2462    }
2463
2464    #[test]
2465    fn pixel_format_to_av_round_trip_bgr24() {
2466        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2467        assert_eq!(
2468            VideoDecoderInner::convert_pixel_format(av),
2469            PixelFormat::Bgr24
2470        );
2471    }
2472
2473    #[test]
2474    fn pixel_format_to_av_round_trip_rgba() {
2475        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2476        assert_eq!(
2477            VideoDecoderInner::convert_pixel_format(av),
2478            PixelFormat::Rgba
2479        );
2480    }
2481
2482    #[test]
2483    fn pixel_format_to_av_round_trip_bgra() {
2484        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2485        assert_eq!(
2486            VideoDecoderInner::convert_pixel_format(av),
2487            PixelFormat::Bgra
2488        );
2489    }
2490
2491    #[test]
2492    fn pixel_format_to_av_round_trip_gray8() {
2493        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2494        assert_eq!(
2495            VideoDecoderInner::convert_pixel_format(av),
2496            PixelFormat::Gray8
2497        );
2498    }
2499
2500    #[test]
2501    fn pixel_format_to_av_round_trip_nv12() {
2502        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2503        assert_eq!(
2504            VideoDecoderInner::convert_pixel_format(av),
2505            PixelFormat::Nv12
2506        );
2507    }
2508
2509    #[test]
2510    fn pixel_format_to_av_round_trip_nv21() {
2511        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2512        assert_eq!(
2513            VideoDecoderInner::convert_pixel_format(av),
2514            PixelFormat::Nv21
2515        );
2516    }
2517
2518    #[test]
2519    fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2520        // Other(999) has no explicit mapping and hits the _ fallback arm.
2521        assert_eq!(
2522            VideoDecoderInner::pixel_format_to_av(PixelFormat::Other(999)),
2523            ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2524        );
2525    }
2526
2527    // -------------------------------------------------------------------------
2528    // extract_codec_name
2529    // -------------------------------------------------------------------------
2530
2531    #[test]
2532    fn codec_name_should_return_h264_for_h264_codec_id() {
2533        let name =
2534            unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_H264) };
2535        assert_eq!(name, "h264");
2536    }
2537
2538    #[test]
2539    fn codec_name_should_return_none_for_none_codec_id() {
2540        let name =
2541            unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_NONE) };
2542        assert_eq!(name, "none");
2543    }
2544
2545    #[test]
2546    fn convert_pixel_format_should_map_gbrpf32le() {
2547        assert_eq!(
2548            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE),
2549            PixelFormat::Gbrpf32le
2550        );
2551    }
2552
2553    #[test]
2554    fn unsupported_codec_error_should_include_codec_name() {
2555        let codec_id = ff_sys::AVCodecID_AV_CODEC_ID_H264;
2556        let codec_name = unsafe { VideoDecoderInner::extract_codec_name(codec_id) };
2557        let error = crate::error::DecodeError::UnsupportedCodec {
2558            codec: format!("{codec_name} (codec_id={codec_id:?})"),
2559        };
2560        let msg = error.to_string();
2561        assert!(msg.contains("h264"), "expected codec name in error: {msg}");
2562        assert!(
2563            msg.contains("codec_id="),
2564            "expected codec_id in error: {msg}"
2565        );
2566    }
2567}