Skip to main content

ff_decode/video/
decoder_inner.rs

1//! Internal video decoder implementation using FFmpeg.
2//!
3//! This module contains the low-level decoder logic that directly interacts
4//! with FFmpeg's C API through the ff-sys crate. It is not exposed publicly.
5
6// Allow unsafe code in this module as it's necessary for FFmpeg FFI
7#![allow(unsafe_code)]
8// Allow specific clippy lints for FFmpeg FFI code
9#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::ffi::CStr;
26use std::path::Path;
27use std::ptr;
28use std::sync::Arc;
29use std::time::Duration;
30
31use ff_format::NetworkOptions;
32
33use ff_format::PooledBuffer;
34use ff_format::codec::VideoCodec;
35use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
36use ff_format::container::ContainerInfo;
37use ff_format::time::{Rational, Timestamp};
38use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
39use ff_sys::{
40    AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
41    AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
42    AVPixelFormat, SwsContext,
43};
44
45use crate::HardwareAccel;
46use crate::error::DecodeError;
47use crate::video::builder::OutputScale;
48use ff_common::FramePool;
49
50/// Tolerance in seconds for keyframe/backward seek modes.
51///
52/// When seeking in Keyframe or Backward mode, frames are skipped until we're within
53/// this tolerance of the target position. This balances accuracy with performance for
54/// typical GOP sizes (1-2 seconds).
55const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
56
57/// RAII guard for `AVFormatContext` to ensure proper cleanup.
58struct AvFormatContextGuard(*mut AVFormatContext);
59
60impl AvFormatContextGuard {
61    /// Creates a new guard by opening an input file.
62    ///
63    /// # Safety
64    ///
65    /// Caller must ensure FFmpeg is initialized and path is valid.
66    unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
67        // SAFETY: Caller ensures FFmpeg is initialized and path is valid
68        let format_ctx = unsafe {
69            ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
70                code: e,
71                message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
72            })?
73        };
74        Ok(Self(format_ctx))
75    }
76
77    /// Returns the raw pointer.
78    const fn as_ptr(&self) -> *mut AVFormatContext {
79        self.0
80    }
81
82    /// Consumes the guard and returns the raw pointer without dropping.
83    fn into_raw(self) -> *mut AVFormatContext {
84        let ptr = self.0;
85        std::mem::forget(self);
86        ptr
87    }
88
89    /// Opens an image sequence using the `image2` demuxer.
90    ///
91    /// # Safety
92    ///
93    /// Caller must ensure FFmpeg is initialized and path is valid.
94    unsafe fn new_image_sequence(path: &Path, framerate: u32) -> Result<Self, DecodeError> {
95        // SAFETY: Caller ensures FFmpeg is initialized and path is a valid image-sequence pattern
96        let format_ctx = unsafe {
97            ff_sys::avformat::open_input_image_sequence(path, framerate).map_err(|e| {
98                DecodeError::Ffmpeg {
99                    code: e,
100                    message: format!(
101                        "Failed to open image sequence: {}",
102                        ff_sys::av_error_string(e)
103                    ),
104                }
105            })?
106        };
107        Ok(Self(format_ctx))
108    }
109
110    /// Opens a network URL with connect/read timeouts from `NetworkOptions`.
111    ///
112    /// # Safety
113    ///
114    /// Caller must ensure `FFmpeg` is initialized and `url` is a valid URL string.
115    unsafe fn new_url(url: &str, network: &NetworkOptions) -> Result<Self, DecodeError> {
116        // SAFETY: Caller ensures FFmpeg is initialized; url is a valid network URL.
117        let format_ctx = unsafe {
118            ff_sys::avformat::open_input_url(url, network.connect_timeout, network.read_timeout)
119                .map_err(|e| {
120                    crate::network::map_network_error(e, crate::network::sanitize_url(url))
121                })?
122        };
123        Ok(Self(format_ctx))
124    }
125}
126
127impl Drop for AvFormatContextGuard {
128    fn drop(&mut self) {
129        if !self.0.is_null() {
130            // SAFETY: self.0 is valid and owned by this guard
131            unsafe {
132                ff_sys::avformat::close_input(&mut (self.0 as *mut _));
133            }
134        }
135    }
136}
137
138/// RAII guard for `AVCodecContext` to ensure proper cleanup.
139struct AvCodecContextGuard(*mut AVCodecContext);
140
141impl AvCodecContextGuard {
142    /// Creates a new guard by allocating a codec context.
143    ///
144    /// # Safety
145    ///
146    /// Caller must ensure codec pointer is valid.
147    unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
148        // SAFETY: Caller ensures codec pointer is valid
149        let codec_ctx = unsafe {
150            ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
151                code: e,
152                message: format!("Failed to allocate codec context: {e}"),
153            })?
154        };
155        Ok(Self(codec_ctx))
156    }
157
158    /// Returns the raw pointer.
159    const fn as_ptr(&self) -> *mut AVCodecContext {
160        self.0
161    }
162
163    /// Consumes the guard and returns the raw pointer without dropping.
164    fn into_raw(self) -> *mut AVCodecContext {
165        let ptr = self.0;
166        std::mem::forget(self);
167        ptr
168    }
169}
170
171impl Drop for AvCodecContextGuard {
172    fn drop(&mut self) {
173        if !self.0.is_null() {
174            // SAFETY: self.0 is valid and owned by this guard
175            unsafe {
176                ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
177            }
178        }
179    }
180}
181
182/// RAII guard for `AVPacket` to ensure proper cleanup.
183struct AvPacketGuard(*mut AVPacket);
184
185impl AvPacketGuard {
186    /// Creates a new guard by allocating a packet.
187    ///
188    /// # Safety
189    ///
190    /// Must be called after FFmpeg initialization.
191    unsafe fn new() -> Result<Self, DecodeError> {
192        // SAFETY: Caller ensures FFmpeg is initialized
193        let packet = unsafe { ff_sys::av_packet_alloc() };
194        if packet.is_null() {
195            return Err(DecodeError::Ffmpeg {
196                code: 0,
197                message: "Failed to allocate packet".to_string(),
198            });
199        }
200        Ok(Self(packet))
201    }
202
203    /// Returns the raw pointer.
204    #[allow(dead_code)]
205    const fn as_ptr(&self) -> *mut AVPacket {
206        self.0
207    }
208
209    /// Consumes the guard and returns the raw pointer without dropping.
210    fn into_raw(self) -> *mut AVPacket {
211        let ptr = self.0;
212        std::mem::forget(self);
213        ptr
214    }
215}
216
217impl Drop for AvPacketGuard {
218    fn drop(&mut self) {
219        if !self.0.is_null() {
220            // SAFETY: self.0 is valid and owned by this guard
221            unsafe {
222                ff_sys::av_packet_free(&mut (self.0 as *mut _));
223            }
224        }
225    }
226}
227
228/// RAII guard for `AVFrame` to ensure proper cleanup.
229struct AvFrameGuard(*mut AVFrame);
230
231impl AvFrameGuard {
232    /// Creates a new guard by allocating a frame.
233    ///
234    /// # Safety
235    ///
236    /// Must be called after FFmpeg initialization.
237    unsafe fn new() -> Result<Self, DecodeError> {
238        // SAFETY: Caller ensures FFmpeg is initialized
239        let frame = unsafe { ff_sys::av_frame_alloc() };
240        if frame.is_null() {
241            return Err(DecodeError::Ffmpeg {
242                code: 0,
243                message: "Failed to allocate frame".to_string(),
244            });
245        }
246        Ok(Self(frame))
247    }
248
249    /// Returns the raw pointer.
250    const fn as_ptr(&self) -> *mut AVFrame {
251        self.0
252    }
253
254    /// Consumes the guard and returns the raw pointer without dropping.
255    fn into_raw(self) -> *mut AVFrame {
256        let ptr = self.0;
257        std::mem::forget(self);
258        ptr
259    }
260}
261
262impl Drop for AvFrameGuard {
263    fn drop(&mut self) {
264        if !self.0.is_null() {
265            // SAFETY: self.0 is valid and owned by this guard
266            unsafe {
267                ff_sys::av_frame_free(&mut (self.0 as *mut _));
268            }
269        }
270    }
271}
272
273/// Internal decoder state holding FFmpeg contexts.
274///
275/// This structure manages the lifecycle of FFmpeg objects and is responsible
276/// for proper cleanup when dropped.
277pub(crate) struct VideoDecoderInner {
278    /// Format context for reading the media file
279    format_ctx: *mut AVFormatContext,
280    /// Codec context for decoding video frames
281    codec_ctx: *mut AVCodecContext,
282    /// Video stream index in the format context
283    stream_index: i32,
284    /// SwScale context for pixel format conversion and/or scaling (optional)
285    sws_ctx: Option<*mut SwsContext>,
286    /// Cache key for the main sws_ctx: (src_w, src_h, src_fmt, dst_w, dst_h, dst_fmt)
287    sws_cache_key: Option<(u32, u32, i32, u32, u32, i32)>,
288    /// Target output pixel format (if conversion is needed)
289    output_format: Option<PixelFormat>,
290    /// Requested output scale (if resizing is needed)
291    output_scale: Option<OutputScale>,
292    /// Whether the source is a live/streaming input (seeking is not supported)
293    is_live: bool,
294    /// Whether end of file has been reached
295    eof: bool,
296    /// Current playback position
297    position: Duration,
298    /// Reusable packet for reading from file
299    packet: *mut AVPacket,
300    /// Reusable frame for decoding
301    frame: *mut AVFrame,
302    /// Cached SwScale context for thumbnail generation
303    thumbnail_sws_ctx: Option<*mut SwsContext>,
304    /// Last thumbnail dimensions (for cache invalidation)
305    thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
306    /// Hardware device context (if hardware acceleration is active)
307    hw_device_ctx: Option<*mut AVBufferRef>,
308    /// Active hardware acceleration mode
309    active_hw_accel: HardwareAccel,
310    /// Optional frame pool for memory reuse
311    frame_pool: Option<Arc<dyn FramePool>>,
312    /// URL used to open this source — `None` for file-path and image-sequence sources.
313    url: Option<String>,
314    /// Network options used for the initial open (timeouts, reconnect config).
315    network_opts: NetworkOptions,
316    /// Number of successful reconnects so far (for logging).
317    reconnect_count: u32,
318}
319
320impl VideoDecoderInner {
321    /// Maps our `HardwareAccel` enum to the corresponding FFmpeg `AVHWDeviceType`.
322    ///
323    /// Returns `None` for `Auto` and `None` variants as they require special handling.
324    fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
325        match accel {
326            HardwareAccel::Auto => None,
327            HardwareAccel::None => None,
328            HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
329            HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
330            HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), // AMF uses D3D11
331            HardwareAccel::VideoToolbox => {
332                Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
333            }
334            HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
335        }
336    }
337
338    /// Returns the hardware decoders to try in priority order for Auto mode.
339    const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
340        // Priority order: NVDEC, QSV, VideoToolbox, VA-API, AMF
341        &[
342            HardwareAccel::Nvdec,
343            HardwareAccel::Qsv,
344            HardwareAccel::VideoToolbox,
345            HardwareAccel::Vaapi,
346            HardwareAccel::Amf,
347        ]
348    }
349
350    /// Attempts to initialize hardware acceleration.
351    ///
352    /// # Arguments
353    ///
354    /// * `codec_ctx` - The codec context to configure
355    /// * `accel` - Requested hardware acceleration mode
356    ///
357    /// # Returns
358    ///
359    /// Returns `Ok((hw_device_ctx, active_accel))` if hardware acceleration was initialized,
360    /// or `Ok((None, HardwareAccel::None))` if software decoding should be used.
361    ///
362    /// # Errors
363    ///
364    /// Returns an error only if a specific hardware accelerator was requested but failed to initialize.
365    unsafe fn init_hardware_accel(
366        codec_ctx: *mut AVCodecContext,
367        accel: HardwareAccel,
368    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
369        match accel {
370            HardwareAccel::Auto => {
371                // Try hardware accelerators in priority order
372                for &hw_type in Self::hw_accel_auto_priority() {
373                    // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
374                    if let Ok((Some(ctx), active)) =
375                        unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
376                    {
377                        return Ok((Some(ctx), active));
378                    }
379                    // Ignore errors in Auto mode and try the next one
380                }
381                // All hardware accelerators failed, fall back to software
382                Ok((None, HardwareAccel::None))
383            }
384            HardwareAccel::None => {
385                // Software decoding explicitly requested
386                Ok((None, HardwareAccel::None))
387            }
388            _ => {
389                // Specific hardware accelerator requested
390                // SAFETY: Caller ensures codec_ctx is valid and not yet configured with hardware
391                unsafe { Self::try_init_hw_device(codec_ctx, accel) }
392            }
393        }
394    }
395
396    /// Tries to initialize a specific hardware device.
397    ///
398    /// # Safety
399    ///
400    /// Caller must ensure `codec_ctx` is valid and not yet configured with a hardware device.
401    unsafe fn try_init_hw_device(
402        codec_ctx: *mut AVCodecContext,
403        accel: HardwareAccel,
404    ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
405        // Get the FFmpeg device type
406        let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
407            return Ok((None, HardwareAccel::None));
408        };
409
410        // Create hardware device context
411        // SAFETY: FFmpeg is initialized, device_type is valid
412        let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
413        let ret = unsafe {
414            ff_sys::av_hwdevice_ctx_create(
415                ptr::addr_of_mut!(hw_device_ctx),
416                device_type,
417                ptr::null(),     // device: null for default device
418                ptr::null_mut(), // opts: null for default options
419                0,               // flags: currently unused by FFmpeg
420            )
421        };
422
423        if ret < 0 {
424            // Hardware device creation failed
425            return Err(DecodeError::HwAccelUnavailable { accel });
426        }
427
428        // Assign hardware device context to codec context
429        // We transfer ownership of the reference to codec_ctx
430        // SAFETY: codec_ctx and hw_device_ctx are valid
431        unsafe {
432            (*codec_ctx).hw_device_ctx = hw_device_ctx;
433        }
434
435        // We keep our own reference for cleanup in Drop
436        // SAFETY: hw_device_ctx is valid
437        let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
438        if our_ref.is_null() {
439            // Failed to create our reference
440            // codec_ctx still owns the original, so we don't need to clean it up here
441            return Err(DecodeError::HwAccelUnavailable { accel });
442        }
443
444        Ok((Some(our_ref), accel))
445    }
446
447    /// Returns the currently active hardware acceleration mode.
448    pub(crate) fn hardware_accel(&self) -> HardwareAccel {
449        self.active_hw_accel
450    }
451
452    /// Checks if a pixel format is a hardware format.
453    ///
454    /// Hardware formats include: D3D11, CUDA, VAAPI, VideoToolbox, QSV, etc.
455    const fn is_hardware_format(format: AVPixelFormat) -> bool {
456        matches!(
457            format,
458            ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
459                | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
460                | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
461                | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
462                | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
463                | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
464                | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
465                | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
466                | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
467                | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
468        )
469    }
470
471    /// Transfers a hardware frame to CPU memory if needed.
472    ///
473    /// If `self.frame` is a hardware frame, creates a new software frame
474    /// and transfers the data from GPU to CPU memory.
475    ///
476    /// # Safety
477    ///
478    /// Caller must ensure `self.frame` contains a valid decoded frame.
479    unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
480        // SAFETY: self.frame is valid and owned by this instance
481        let frame_format = unsafe { (*self.frame).format };
482
483        if !Self::is_hardware_format(frame_format) {
484            // Not a hardware frame, no transfer needed
485            return Ok(());
486        }
487
488        // Create a temporary software frame for transfer
489        // SAFETY: FFmpeg is initialized
490        let sw_frame = unsafe { ff_sys::av_frame_alloc() };
491        if sw_frame.is_null() {
492            return Err(DecodeError::Ffmpeg {
493                code: 0,
494                message: "Failed to allocate software frame for hardware transfer".to_string(),
495            });
496        }
497
498        // Transfer data from hardware frame to software frame
499        // SAFETY: self.frame and sw_frame are valid
500        let ret = unsafe {
501            ff_sys::av_hwframe_transfer_data(
502                sw_frame, self.frame, 0, // flags: currently unused
503            )
504        };
505
506        if ret < 0 {
507            // Transfer failed, clean up
508            unsafe {
509                ff_sys::av_frame_free(&mut (sw_frame as *mut _));
510            }
511            return Err(DecodeError::Ffmpeg {
512                code: ret,
513                message: format!(
514                    "Failed to transfer hardware frame to CPU memory: {}",
515                    ff_sys::av_error_string(ret)
516                ),
517            });
518        }
519
520        // Copy metadata (pts, duration, etc.) from hardware frame to software frame
521        // SAFETY: Both frames are valid
522        unsafe {
523            (*sw_frame).pts = (*self.frame).pts;
524            (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
525            (*sw_frame).duration = (*self.frame).duration;
526            (*sw_frame).time_base = (*self.frame).time_base;
527        }
528
529        // Replace self.frame with the software frame
530        // SAFETY: self.frame is valid and owned by this instance
531        unsafe {
532            ff_sys::av_frame_unref(self.frame);
533            ff_sys::av_frame_move_ref(self.frame, sw_frame);
534            ff_sys::av_frame_free(&mut (sw_frame as *mut _));
535        }
536
537        Ok(())
538    }
539
540    /// Opens a media file and initializes the decoder.
541    ///
542    /// # Arguments
543    ///
544    /// * `path` - Path to the media file
545    /// * `output_format` - Optional target pixel format for conversion
546    /// * `hardware_accel` - Hardware acceleration mode
547    /// * `thread_count` - Number of decoding threads (0 = auto)
548    ///
549    /// # Errors
550    ///
551    /// Returns an error if:
552    /// - The file cannot be opened
553    /// - No video stream is found
554    /// - The codec is not supported
555    /// - Decoder initialization fails
556    #[allow(clippy::too_many_arguments)]
557    pub(crate) fn new(
558        path: &Path,
559        output_format: Option<PixelFormat>,
560        output_scale: Option<OutputScale>,
561        hardware_accel: HardwareAccel,
562        thread_count: usize,
563        frame_rate: Option<u32>,
564        frame_pool: Option<Arc<dyn FramePool>>,
565        network_opts: Option<NetworkOptions>,
566    ) -> Result<(Self, VideoStreamInfo, ContainerInfo), DecodeError> {
567        // Ensure FFmpeg is initialized (thread-safe and idempotent)
568        ff_sys::ensure_initialized();
569
570        let path_str = path.to_str().unwrap_or("");
571        let is_image_sequence = path_str.contains('%');
572        let is_network_url = crate::network::is_url(path_str);
573
574        let url = if is_network_url {
575            Some(path_str.to_owned())
576        } else {
577            None
578        };
579        let stored_network_opts = network_opts.clone().unwrap_or_default();
580
581        // Verify SRT availability before attempting to open (feature + runtime check).
582        if is_network_url {
583            crate::network::check_srt_url(path_str)?;
584        }
585
586        // Open the input (with RAII guard for cleanup on error).
587        // SAFETY: Path/URL is valid; AvFormatContextGuard ensures cleanup.
588        let format_ctx_guard = unsafe {
589            if is_network_url {
590                let network = network_opts.unwrap_or_default();
591                log::info!(
592                    "opening network source url={} connect_timeout_ms={} read_timeout_ms={}",
593                    crate::network::sanitize_url(path_str),
594                    network.connect_timeout.as_millis(),
595                    network.read_timeout.as_millis(),
596                );
597                AvFormatContextGuard::new_url(path_str, &network)?
598            } else if is_image_sequence {
599                let fps = frame_rate.unwrap_or(25);
600                AvFormatContextGuard::new_image_sequence(path, fps)?
601            } else {
602                AvFormatContextGuard::new(path)?
603            }
604        };
605        let format_ctx = format_ctx_guard.as_ptr();
606
607        // Read stream information
608        // SAFETY: format_ctx is valid and owned by guard
609        unsafe {
610            ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
611                code: e,
612                message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
613            })?;
614        }
615
616        // Detect live/streaming source via the AVFMT_TS_DISCONT flag on AVInputFormat.
617        // SAFETY: format_ctx is valid and non-null; iformat is set by avformat_open_input
618        //         and is non-null for all successfully opened formats.
619        let is_live = unsafe {
620            let iformat = (*format_ctx).iformat;
621            !iformat.is_null() && ((*iformat).flags & ff_sys::AVFMT_TS_DISCONT) != 0
622        };
623
624        // Find the video stream
625        // SAFETY: format_ctx is valid
626        let (stream_index, codec_id) =
627            unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
628                DecodeError::NoVideoStream {
629                    path: path.to_path_buf(),
630                }
631            })?;
632
633        // Find the decoder for this codec
634        // SAFETY: codec_id is valid from FFmpeg
635        let codec_name = unsafe { Self::extract_codec_name(codec_id) };
636        let codec = unsafe {
637            ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
638                // Distinguish between a totally unknown codec ID and a known codec
639                // whose decoder was not compiled into this FFmpeg build.
640                if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_EXR {
641                    DecodeError::DecoderUnavailable {
642                        codec: "exr".to_string(),
643                        hint: "Requires FFmpeg built with EXR support \
644                               (--enable-decoder=exr)"
645                            .to_string(),
646                    }
647                } else {
648                    DecodeError::UnsupportedCodec {
649                        codec: format!("{codec_name} (codec_id={codec_id:?})"),
650                    }
651                }
652            })?
653        };
654
655        // Allocate codec context (with RAII guard)
656        // SAFETY: codec pointer is valid, AvCodecContextGuard ensures cleanup
657        let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
658        let codec_ctx = codec_ctx_guard.as_ptr();
659
660        // Copy codec parameters from stream to context
661        // SAFETY: format_ctx and codec_ctx are valid, stream_index is valid
662        unsafe {
663            let stream = (*format_ctx).streams.add(stream_index as usize);
664            let codecpar = (*(*stream)).codecpar;
665            ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
666                DecodeError::Ffmpeg {
667                    code: e,
668                    message: format!(
669                        "Failed to copy codec parameters: {}",
670                        ff_sys::av_error_string(e)
671                    ),
672                }
673            })?;
674
675            // Set thread count
676            if thread_count > 0 {
677                (*codec_ctx).thread_count = thread_count as i32;
678            }
679        }
680
681        // Initialize hardware acceleration if requested
682        // SAFETY: codec_ctx is valid and not yet opened
683        let (hw_device_ctx, active_hw_accel) =
684            unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
685
686        // Open the codec
687        // SAFETY: codec_ctx and codec are valid, hardware device context is set if requested
688        unsafe {
689            ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
690                // If codec opening failed, we still own our reference to hw_device_ctx
691                // but it will be cleaned up when codec_ctx is freed (which happens
692                // when codec_ctx_guard is dropped)
693                // Our reference in hw_device_ctx will be cleaned up here
694                if let Some(hw_ctx) = hw_device_ctx {
695                    ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
696                }
697                DecodeError::Ffmpeg {
698                    code: e,
699                    message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
700                }
701            })?;
702        }
703
704        // Extract stream information
705        // SAFETY: All pointers are valid
706        let stream_info =
707            unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
708
709        // Extract container information
710        // SAFETY: format_ctx is valid and avformat_find_stream_info has been called
711        let container_info = unsafe { Self::extract_container_info(format_ctx) };
712
713        // Allocate packet and frame (with RAII guards)
714        // SAFETY: FFmpeg is initialized, guards ensure cleanup
715        let packet_guard = unsafe { AvPacketGuard::new()? };
716        let frame_guard = unsafe { AvFrameGuard::new()? };
717
718        // All initialization successful - transfer ownership to VideoDecoderInner
719        Ok((
720            Self {
721                format_ctx: format_ctx_guard.into_raw(),
722                codec_ctx: codec_ctx_guard.into_raw(),
723                stream_index: stream_index as i32,
724                sws_ctx: None,
725                sws_cache_key: None,
726                output_format,
727                output_scale,
728                is_live,
729                eof: false,
730                position: Duration::ZERO,
731                packet: packet_guard.into_raw(),
732                frame: frame_guard.into_raw(),
733                thumbnail_sws_ctx: None,
734                thumbnail_cache_key: None,
735                hw_device_ctx,
736                active_hw_accel,
737                frame_pool,
738                url,
739                network_opts: stored_network_opts,
740                reconnect_count: 0,
741            },
742            stream_info,
743            container_info,
744        ))
745    }
746
747    /// Finds the first video stream in the format context.
748    ///
749    /// # Returns
750    ///
751    /// Returns `Some((index, codec_id))` if a video stream is found, `None` otherwise.
752    ///
753    /// # Safety
754    ///
755    /// Caller must ensure `format_ctx` is valid and initialized.
756    unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
757        // SAFETY: Caller ensures format_ctx is valid
758        unsafe {
759            let nb_streams = (*format_ctx).nb_streams as usize;
760
761            for i in 0..nb_streams {
762                let stream = (*format_ctx).streams.add(i);
763                let codecpar = (*(*stream)).codecpar;
764
765                if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
766                    return Some((i, (*codecpar).codec_id));
767                }
768            }
769
770            None
771        }
772    }
773
774    /// Returns the human-readable codec name for a given `AVCodecID`.
775    unsafe fn extract_codec_name(codec_id: ff_sys::AVCodecID) -> String {
776        // SAFETY: avcodec_get_name is safe for any codec ID value
777        let name_ptr = unsafe { ff_sys::avcodec_get_name(codec_id) };
778        if name_ptr.is_null() {
779            return String::from("unknown");
780        }
781        // SAFETY: avcodec_get_name returns a valid C string with static lifetime
782        unsafe { CStr::from_ptr(name_ptr).to_string_lossy().into_owned() }
783    }
784
785    /// Extracts video stream information from FFmpeg structures.
786    unsafe fn extract_stream_info(
787        format_ctx: *mut AVFormatContext,
788        stream_index: i32,
789        codec_ctx: *mut AVCodecContext,
790    ) -> Result<VideoStreamInfo, DecodeError> {
791        // SAFETY: Caller ensures all pointers are valid
792        let (
793            width,
794            height,
795            fps_rational,
796            duration_val,
797            pix_fmt,
798            color_space_val,
799            color_range_val,
800            color_primaries_val,
801            codec_id,
802        ) = unsafe {
803            let stream = (*format_ctx).streams.add(stream_index as usize);
804            let codecpar = (*(*stream)).codecpar;
805
806            (
807                (*codecpar).width as u32,
808                (*codecpar).height as u32,
809                (*(*stream)).avg_frame_rate,
810                (*format_ctx).duration,
811                (*codec_ctx).pix_fmt,
812                (*codecpar).color_space,
813                (*codecpar).color_range,
814                (*codecpar).color_primaries,
815                (*codecpar).codec_id,
816            )
817        };
818
819        // Extract frame rate
820        let frame_rate = if fps_rational.den != 0 {
821            Rational::new(fps_rational.num as i32, fps_rational.den as i32)
822        } else {
823            log::warn!(
824                "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
825                fps_rational.num
826            );
827            Rational::new(30, 1)
828        };
829
830        // Extract duration
831        let duration = if duration_val > 0 {
832            let duration_secs = duration_val as f64 / 1_000_000.0;
833            Some(Duration::from_secs_f64(duration_secs))
834        } else {
835            None
836        };
837
838        // Extract pixel format
839        let pixel_format = Self::convert_pixel_format(pix_fmt);
840
841        // Extract color information
842        let color_space = Self::convert_color_space(color_space_val);
843        let color_range = Self::convert_color_range(color_range_val);
844        let color_primaries = Self::convert_color_primaries(color_primaries_val);
845
846        // Extract codec
847        let codec = Self::convert_codec(codec_id);
848        let codec_name = unsafe { Self::extract_codec_name(codec_id) };
849
850        // Build stream info
851        let mut builder = VideoStreamInfo::builder()
852            .index(stream_index as u32)
853            .codec(codec)
854            .codec_name(codec_name)
855            .width(width)
856            .height(height)
857            .frame_rate(frame_rate)
858            .pixel_format(pixel_format)
859            .color_space(color_space)
860            .color_range(color_range)
861            .color_primaries(color_primaries);
862
863        if let Some(d) = duration {
864            builder = builder.duration(d);
865        }
866
867        Ok(builder.build())
868    }
869
870    /// Extracts container-level information from the `AVFormatContext`.
871    ///
872    /// # Safety
873    ///
874    /// Caller must ensure `format_ctx` is valid and `avformat_find_stream_info` has been called.
875    unsafe fn extract_container_info(format_ctx: *mut AVFormatContext) -> ContainerInfo {
876        // SAFETY: Caller ensures format_ctx is valid
877        unsafe {
878            let format_name = if (*format_ctx).iformat.is_null() {
879                String::new()
880            } else {
881                let ptr = (*(*format_ctx).iformat).name;
882                if ptr.is_null() {
883                    String::new()
884                } else {
885                    CStr::from_ptr(ptr).to_string_lossy().into_owned()
886                }
887            };
888
889            let bit_rate = {
890                let br = (*format_ctx).bit_rate;
891                if br > 0 { Some(br as u64) } else { None }
892            };
893
894            let nb_streams = (*format_ctx).nb_streams as u32;
895
896            let mut builder = ContainerInfo::builder()
897                .format_name(format_name)
898                .nb_streams(nb_streams);
899            if let Some(br) = bit_rate {
900                builder = builder.bit_rate(br);
901            }
902            builder.build()
903        }
904    }
905
906    /// Converts FFmpeg pixel format to our PixelFormat enum.
907    fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
908        if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
909            PixelFormat::Yuv420p
910        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
911            PixelFormat::Yuv422p
912        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
913            PixelFormat::Yuv444p
914        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
915            PixelFormat::Rgb24
916        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
917            PixelFormat::Bgr24
918        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
919            PixelFormat::Rgba
920        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
921            PixelFormat::Bgra
922        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
923            PixelFormat::Gray8
924        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
925            PixelFormat::Nv12
926        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
927            PixelFormat::Nv21
928        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE {
929            PixelFormat::Yuv420p10le
930        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE {
931            PixelFormat::Yuv422p10le
932        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE {
933            PixelFormat::Yuv444p10le
934        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE {
935            PixelFormat::P010le
936        } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE {
937            PixelFormat::Gbrpf32le
938        } else {
939            log::warn!(
940                "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
941            );
942            PixelFormat::Yuv420p
943        }
944    }
945
946    /// Converts FFmpeg color space to our ColorSpace enum.
947    fn convert_color_space(space: AVColorSpace) -> ColorSpace {
948        if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
949            ColorSpace::Bt709
950        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
951            || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
952        {
953            ColorSpace::Bt601
954        } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
955            ColorSpace::Bt2020
956        } else {
957            log::warn!(
958                "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
959            );
960            ColorSpace::Bt709
961        }
962    }
963
964    /// Converts FFmpeg color range to our ColorRange enum.
965    fn convert_color_range(range: AVColorRange) -> ColorRange {
966        if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
967            ColorRange::Full
968        } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
969            ColorRange::Limited
970        } else {
971            log::warn!(
972                "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
973            );
974            ColorRange::Limited
975        }
976    }
977
978    /// Converts FFmpeg color primaries to our ColorPrimaries enum.
979    fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
980        if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
981            ColorPrimaries::Bt709
982        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
983            || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
984        {
985            ColorPrimaries::Bt601
986        } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
987            ColorPrimaries::Bt2020
988        } else {
989            log::warn!(
990                "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
991            );
992            ColorPrimaries::Bt709
993        }
994    }
995
996    /// Converts FFmpeg codec ID to our VideoCodec enum.
997    fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
998        if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
999            VideoCodec::H264
1000        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
1001            VideoCodec::H265
1002        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
1003            VideoCodec::Vp8
1004        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
1005            VideoCodec::Vp9
1006        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
1007            VideoCodec::Av1
1008        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
1009            VideoCodec::Mpeg4
1010        } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
1011            VideoCodec::ProRes
1012        } else {
1013            log::warn!(
1014                "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
1015            );
1016            VideoCodec::H264
1017        }
1018    }
1019
1020    /// Decodes the next video frame.
1021    ///
1022    /// Transparently reconnects on `StreamInterrupted` when
1023    /// `NetworkOptions::reconnect_on_error` is enabled.
1024    ///
1025    /// # Returns
1026    ///
1027    /// - `Ok(Some(frame))` - Successfully decoded a frame
1028    /// - `Ok(None)` - End of stream reached
1029    /// - `Err(_)` - Decoding error occurred
1030    pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
1031        loop {
1032            match self.decode_one_inner() {
1033                Ok(frame) => return Ok(frame),
1034                Err(DecodeError::StreamInterrupted { .. })
1035                    if self.url.is_some() && self.network_opts.reconnect_on_error =>
1036                {
1037                    self.attempt_reconnect()?;
1038                }
1039                Err(e) => return Err(e),
1040            }
1041        }
1042    }
1043
1044    fn decode_one_inner(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
1045        if self.eof {
1046            return Ok(None);
1047        }
1048
1049        unsafe {
1050            loop {
1051                // Try to receive a frame from the decoder
1052                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1053
1054                if ret == 0 {
1055                    // Successfully received a frame
1056                    // Check if this is a hardware frame and transfer to CPU memory if needed
1057                    self.transfer_hardware_frame_if_needed()?;
1058
1059                    let video_frame = self.convert_frame_to_video_frame()?;
1060
1061                    // Update position based on frame timestamp
1062                    let pts = (*self.frame).pts;
1063                    if pts != ff_sys::AV_NOPTS_VALUE {
1064                        let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1065                        let time_base = (*(*stream)).time_base;
1066                        let timestamp_secs =
1067                            pts as f64 * time_base.num as f64 / time_base.den as f64;
1068                        self.position = Duration::from_secs_f64(timestamp_secs);
1069                    }
1070
1071                    return Ok(Some(video_frame));
1072                } else if ret == ff_sys::error_codes::EAGAIN {
1073                    // Need to send more packets to the decoder
1074                    // Read a packet from the file
1075                    let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
1076
1077                    if read_ret == ff_sys::error_codes::EOF {
1078                        // End of file - flush the decoder
1079                        ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
1080                        self.eof = true;
1081                        continue;
1082                    } else if read_ret < 0 {
1083                        return Err(if let Some(url) = &self.url {
1084                            // Network source: map to typed variant so reconnect can detect it.
1085                            crate::network::map_network_error(
1086                                read_ret,
1087                                crate::network::sanitize_url(url),
1088                            )
1089                        } else {
1090                            DecodeError::Ffmpeg {
1091                                code: read_ret,
1092                                message: format!(
1093                                    "Failed to read frame: {}",
1094                                    ff_sys::av_error_string(read_ret)
1095                                ),
1096                            }
1097                        });
1098                    }
1099
1100                    // Check if this packet belongs to the video stream
1101                    if (*self.packet).stream_index == self.stream_index {
1102                        // Send the packet to the decoder
1103                        let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
1104                        ff_sys::av_packet_unref(self.packet);
1105
1106                        if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
1107                            return Err(DecodeError::Ffmpeg {
1108                                code: send_ret,
1109                                message: format!(
1110                                    "Failed to send packet: {}",
1111                                    ff_sys::av_error_string(send_ret)
1112                                ),
1113                            });
1114                        }
1115                    } else {
1116                        // Not our stream, unref and continue
1117                        ff_sys::av_packet_unref(self.packet);
1118                    }
1119                } else if ret == ff_sys::error_codes::EOF {
1120                    // Decoder has been fully flushed
1121                    self.eof = true;
1122                    return Ok(None);
1123                } else {
1124                    return Err(DecodeError::DecodingFailed {
1125                        timestamp: Some(self.position),
1126                        reason: ff_sys::av_error_string(ret),
1127                    });
1128                }
1129            }
1130        }
1131    }
1132
1133    /// Converts an AVFrame to a VideoFrame, applying pixel format conversion if needed.
1134    unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
1135        // SAFETY: Caller ensures self.frame is valid
1136        unsafe {
1137            let src_width = (*self.frame).width as u32;
1138            let src_height = (*self.frame).height as u32;
1139            let src_format = (*self.frame).format;
1140
1141            // Determine output format
1142            let dst_format = if let Some(fmt) = self.output_format {
1143                Self::pixel_format_to_av(fmt)
1144            } else {
1145                src_format
1146            };
1147
1148            // Determine output dimensions
1149            let (dst_width, dst_height) = self.resolve_output_dims(src_width, src_height);
1150
1151            // Check if conversion or scaling is needed
1152            let needs_conversion =
1153                src_format != dst_format || dst_width != src_width || dst_height != src_height;
1154
1155            if needs_conversion {
1156                self.convert_with_sws(
1157                    src_width, src_height, src_format, dst_width, dst_height, dst_format,
1158                )
1159            } else {
1160                self.av_frame_to_video_frame(self.frame)
1161            }
1162        }
1163    }
1164
1165    /// Computes the destination (width, height) from `output_scale` and source dimensions.
1166    ///
1167    /// Returns `(src_width, src_height)` when no scale is set.
1168    /// All returned dimensions are rounded up to the nearest even number.
1169    fn resolve_output_dims(&self, src_width: u32, src_height: u32) -> (u32, u32) {
1170        let round_even = |n: u32| (n + 1) & !1;
1171
1172        match self.output_scale {
1173            None => (src_width, src_height),
1174            Some(OutputScale::Exact { width, height }) => (round_even(width), round_even(height)),
1175            Some(OutputScale::FitWidth(target_w)) => {
1176                let target_w = round_even(target_w);
1177                if src_width == 0 {
1178                    return (target_w, target_w);
1179                }
1180                let h = (target_w as u64 * src_height as u64 / src_width as u64) as u32;
1181                (target_w, round_even(h.max(2)))
1182            }
1183            Some(OutputScale::FitHeight(target_h)) => {
1184                let target_h = round_even(target_h);
1185                if src_height == 0 {
1186                    return (target_h, target_h);
1187                }
1188                let w = (target_h as u64 * src_width as u64 / src_height as u64) as u32;
1189                (round_even(w.max(2)), target_h)
1190            }
1191        }
1192    }
1193
1194    /// Converts pixel format and/or scales a frame using `libswscale`.
1195    ///
1196    /// The `sws_ctx` is cached and recreated only when the source/destination
1197    /// parameters change (cache key: `(src_w, src_h, src_fmt, dst_w, dst_h, dst_fmt)`).
1198    unsafe fn convert_with_sws(
1199        &mut self,
1200        src_width: u32,
1201        src_height: u32,
1202        src_format: i32,
1203        dst_width: u32,
1204        dst_height: u32,
1205        dst_format: i32,
1206    ) -> Result<VideoFrame, DecodeError> {
1207        // SAFETY: Caller ensures frame and context pointers are valid
1208        unsafe {
1209            // Get or create SwScale context, invalidating cache when parameters change.
1210            let cache_key = (
1211                src_width, src_height, src_format, dst_width, dst_height, dst_format,
1212            );
1213            if self.sws_cache_key != Some(cache_key) {
1214                // Free the old context if it exists.
1215                if let Some(old_ctx) = self.sws_ctx.take() {
1216                    ff_sys::swscale::free_context(old_ctx);
1217                }
1218
1219                let ctx = ff_sys::swscale::get_context(
1220                    src_width as i32,
1221                    src_height as i32,
1222                    src_format,
1223                    dst_width as i32,
1224                    dst_height as i32,
1225                    dst_format,
1226                    ff_sys::swscale::scale_flags::BILINEAR,
1227                )
1228                .map_err(|e| DecodeError::Ffmpeg {
1229                    code: 0,
1230                    message: format!("Failed to create sws context: {e}"),
1231                })?;
1232
1233                self.sws_ctx = Some(ctx);
1234                self.sws_cache_key = Some(cache_key);
1235            }
1236
1237            let Some(sws_ctx) = self.sws_ctx else {
1238                return Err(DecodeError::Ffmpeg {
1239                    code: 0,
1240                    message: "SwsContext not initialized".to_string(),
1241                });
1242            };
1243
1244            // Allocate destination frame (with RAII guard)
1245            let dst_frame_guard = AvFrameGuard::new()?;
1246            let dst_frame = dst_frame_guard.as_ptr();
1247
1248            (*dst_frame).width = dst_width as i32;
1249            (*dst_frame).height = dst_height as i32;
1250            (*dst_frame).format = dst_format;
1251
1252            // Allocate buffer for destination frame
1253            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1254            if buffer_ret < 0 {
1255                return Err(DecodeError::Ffmpeg {
1256                    code: buffer_ret,
1257                    message: format!(
1258                        "Failed to allocate frame buffer: {}",
1259                        ff_sys::av_error_string(buffer_ret)
1260                    ),
1261                });
1262            }
1263
1264            // Perform conversion/scaling (src_height is the number of input rows to process)
1265            ff_sys::swscale::scale(
1266                sws_ctx,
1267                (*self.frame).data.as_ptr() as *const *const u8,
1268                (*self.frame).linesize.as_ptr(),
1269                0,
1270                src_height as i32,
1271                (*dst_frame).data.as_ptr() as *const *mut u8,
1272                (*dst_frame).linesize.as_ptr(),
1273            )
1274            .map_err(|e| DecodeError::Ffmpeg {
1275                code: 0,
1276                message: format!("Failed to scale frame: {e}"),
1277            })?;
1278
1279            // Copy timestamp
1280            (*dst_frame).pts = (*self.frame).pts;
1281
1282            // Convert to VideoFrame
1283            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1284
1285            // dst_frame is automatically freed when guard drops
1286
1287            Ok(video_frame)
1288        }
1289    }
1290
1291    /// Converts an AVFrame to a VideoFrame.
1292    unsafe fn av_frame_to_video_frame(
1293        &self,
1294        frame: *const AVFrame,
1295    ) -> Result<VideoFrame, DecodeError> {
1296        // SAFETY: Caller ensures frame and format_ctx are valid
1297        unsafe {
1298            let width = (*frame).width as u32;
1299            let height = (*frame).height as u32;
1300            let format = Self::convert_pixel_format((*frame).format);
1301
1302            // Extract timestamp
1303            let pts = (*frame).pts;
1304            let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1305                let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1306                let time_base = (*(*stream)).time_base;
1307                Timestamp::new(
1308                    pts as i64,
1309                    Rational::new(time_base.num as i32, time_base.den as i32),
1310                )
1311            } else {
1312                Timestamp::default()
1313            };
1314
1315            // Convert frame to planes and strides
1316            let (planes, strides) =
1317                self.extract_planes_and_strides(frame, width, height, format)?;
1318
1319            VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1320                DecodeError::Ffmpeg {
1321                    code: 0,
1322                    message: format!("Failed to create VideoFrame: {e}"),
1323                }
1324            })
1325        }
1326    }
1327
1328    /// Allocates a buffer, optionally using the frame pool.
1329    ///
1330    /// If a frame pool is configured and has available buffers, uses the pool.
1331    /// Otherwise, allocates a new Vec<u8>.
1332    ///
1333    /// Allocates a buffer for decoded frame data.
1334    ///
1335    /// If a frame pool is configured, attempts to acquire a buffer from the pool.
1336    /// The returned PooledBuffer will automatically be returned to the pool when dropped.
1337    fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1338        if let Some(ref pool) = self.frame_pool {
1339            if let Some(pooled_buffer) = pool.acquire(size) {
1340                return pooled_buffer;
1341            }
1342            // Pool is configured but currently empty (or has no buffer large
1343            // enough). Allocate fresh memory and attach it to the pool so
1344            // that when the VideoFrame is dropped the buffer is returned via
1345            // pool.release() and becomes available for the next frame.
1346            return PooledBuffer::new(vec![0u8; size], Arc::downgrade(pool));
1347        }
1348        PooledBuffer::standalone(vec![0u8; size])
1349    }
1350
1351    /// Extracts planes and strides from an AVFrame.
1352    unsafe fn extract_planes_and_strides(
1353        &self,
1354        frame: *const AVFrame,
1355        width: u32,
1356        height: u32,
1357        format: PixelFormat,
1358    ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1359        // Bytes per pixel constants for different pixel formats
1360        const BYTES_PER_PIXEL_RGBA: usize = 4;
1361        const BYTES_PER_PIXEL_RGB24: usize = 3;
1362
1363        // SAFETY: Caller ensures frame is valid and format matches actual frame format
1364        unsafe {
1365            let mut planes = Vec::new();
1366            let mut strides = Vec::new();
1367
1368            #[allow(clippy::match_same_arms)]
1369            match format {
1370                PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1371                    // Packed formats - single plane
1372                    let stride = (*frame).linesize[0] as usize;
1373                    let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1374                    {
1375                        BYTES_PER_PIXEL_RGBA
1376                    } else {
1377                        BYTES_PER_PIXEL_RGB24
1378                    };
1379                    let row_size = (width as usize) * bytes_per_pixel;
1380                    let buffer_size = row_size * height as usize;
1381                    let mut plane_data = self.allocate_buffer(buffer_size);
1382
1383                    for y in 0..height as usize {
1384                        let src_offset = y * stride;
1385                        let dst_offset = y * row_size;
1386                        let src_ptr = (*frame).data[0].add(src_offset);
1387                        let plane_slice = plane_data.as_mut();
1388                        // SAFETY: We copy exactly `row_size` bytes per row. The source pointer
1389                        // is valid (from FFmpeg frame data), destination has sufficient capacity
1390                        // (allocated with height * row_size), and ranges don't overlap.
1391                        std::ptr::copy_nonoverlapping(
1392                            src_ptr,
1393                            plane_slice[dst_offset..].as_mut_ptr(),
1394                            row_size,
1395                        );
1396                    }
1397
1398                    planes.push(plane_data);
1399                    strides.push(row_size);
1400                }
1401                PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1402                    // Planar YUV formats
1403                    let (chroma_width, chroma_height) = match format {
1404                        PixelFormat::Yuv420p => (width / 2, height / 2),
1405                        PixelFormat::Yuv422p => (width / 2, height),
1406                        PixelFormat::Yuv444p => (width, height),
1407                        _ => unreachable!(),
1408                    };
1409
1410                    // Y plane
1411                    let y_stride = width as usize;
1412                    let y_size = y_stride * height as usize;
1413                    let mut y_data = self.allocate_buffer(y_size);
1414                    for y in 0..height as usize {
1415                        let src_offset = y * (*frame).linesize[0] as usize;
1416                        let dst_offset = y * y_stride;
1417                        let src_ptr = (*frame).data[0].add(src_offset);
1418                        let y_slice = y_data.as_mut();
1419                        // SAFETY: Copying Y plane row-by-row. Source is valid FFmpeg data,
1420                        // destination has sufficient capacity, no overlap.
1421                        std::ptr::copy_nonoverlapping(
1422                            src_ptr,
1423                            y_slice[dst_offset..].as_mut_ptr(),
1424                            width as usize,
1425                        );
1426                    }
1427                    planes.push(y_data);
1428                    strides.push(y_stride);
1429
1430                    // U plane
1431                    let u_stride = chroma_width as usize;
1432                    let u_size = u_stride * chroma_height as usize;
1433                    let mut u_data = self.allocate_buffer(u_size);
1434                    for y in 0..chroma_height as usize {
1435                        let src_offset = y * (*frame).linesize[1] as usize;
1436                        let dst_offset = y * u_stride;
1437                        let src_ptr = (*frame).data[1].add(src_offset);
1438                        let u_slice = u_data.as_mut();
1439                        // SAFETY: Copying U (chroma) plane row-by-row. Valid source,
1440                        // sufficient destination capacity, no overlap.
1441                        std::ptr::copy_nonoverlapping(
1442                            src_ptr,
1443                            u_slice[dst_offset..].as_mut_ptr(),
1444                            chroma_width as usize,
1445                        );
1446                    }
1447                    planes.push(u_data);
1448                    strides.push(u_stride);
1449
1450                    // V plane
1451                    let v_stride = chroma_width as usize;
1452                    let v_size = v_stride * chroma_height as usize;
1453                    let mut v_data = self.allocate_buffer(v_size);
1454                    for y in 0..chroma_height as usize {
1455                        let src_offset = y * (*frame).linesize[2] as usize;
1456                        let dst_offset = y * v_stride;
1457                        let src_ptr = (*frame).data[2].add(src_offset);
1458                        let v_slice = v_data.as_mut();
1459                        // SAFETY: Copying V (chroma) plane row-by-row. Valid source,
1460                        // sufficient destination capacity, no overlap.
1461                        std::ptr::copy_nonoverlapping(
1462                            src_ptr,
1463                            v_slice[dst_offset..].as_mut_ptr(),
1464                            chroma_width as usize,
1465                        );
1466                    }
1467                    planes.push(v_data);
1468                    strides.push(v_stride);
1469                }
1470                PixelFormat::Gray8 => {
1471                    // Single plane grayscale
1472                    let stride = width as usize;
1473                    let mut plane_data = self.allocate_buffer(stride * height as usize);
1474
1475                    for y in 0..height as usize {
1476                        let src_offset = y * (*frame).linesize[0] as usize;
1477                        let dst_offset = y * stride;
1478                        let src_ptr = (*frame).data[0].add(src_offset);
1479                        let plane_slice = plane_data.as_mut();
1480                        // SAFETY: Copying grayscale plane row-by-row. Valid source,
1481                        // sufficient destination capacity, no overlap.
1482                        std::ptr::copy_nonoverlapping(
1483                            src_ptr,
1484                            plane_slice[dst_offset..].as_mut_ptr(),
1485                            width as usize,
1486                        );
1487                    }
1488
1489                    planes.push(plane_data);
1490                    strides.push(stride);
1491                }
1492                PixelFormat::Nv12 | PixelFormat::Nv21 => {
1493                    // Semi-planar formats
1494                    let uv_height = height / 2;
1495
1496                    // Y plane
1497                    let y_stride = width as usize;
1498                    let mut y_data = self.allocate_buffer(y_stride * height as usize);
1499                    for y in 0..height as usize {
1500                        let src_offset = y * (*frame).linesize[0] as usize;
1501                        let dst_offset = y * y_stride;
1502                        let src_ptr = (*frame).data[0].add(src_offset);
1503                        let y_slice = y_data.as_mut();
1504                        // SAFETY: Copying Y plane (semi-planar) row-by-row. Valid source,
1505                        // sufficient destination capacity, no overlap.
1506                        std::ptr::copy_nonoverlapping(
1507                            src_ptr,
1508                            y_slice[dst_offset..].as_mut_ptr(),
1509                            width as usize,
1510                        );
1511                    }
1512                    planes.push(y_data);
1513                    strides.push(y_stride);
1514
1515                    // UV plane
1516                    let uv_stride = width as usize;
1517                    let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1518                    for y in 0..uv_height as usize {
1519                        let src_offset = y * (*frame).linesize[1] as usize;
1520                        let dst_offset = y * uv_stride;
1521                        let src_ptr = (*frame).data[1].add(src_offset);
1522                        let uv_slice = uv_data.as_mut();
1523                        // SAFETY: Copying interleaved UV plane (semi-planar) row-by-row.
1524                        // Valid source, sufficient destination capacity, no overlap.
1525                        std::ptr::copy_nonoverlapping(
1526                            src_ptr,
1527                            uv_slice[dst_offset..].as_mut_ptr(),
1528                            width as usize,
1529                        );
1530                    }
1531                    planes.push(uv_data);
1532                    strides.push(uv_stride);
1533                }
1534                PixelFormat::Gbrpf32le => {
1535                    // Planar GBR float: 3 full-resolution planes, 4 bytes per sample (f32)
1536                    const BYTES_PER_SAMPLE: usize = 4;
1537                    let row_size = width as usize * BYTES_PER_SAMPLE;
1538                    let size = row_size * height as usize;
1539
1540                    for plane_idx in 0..3usize {
1541                        let src_linesize = (*frame).linesize[plane_idx] as usize;
1542                        let mut plane_data = self.allocate_buffer(size);
1543                        for y in 0..height as usize {
1544                            let src_offset = y * src_linesize;
1545                            let dst_offset = y * row_size;
1546                            let src_ptr = (*frame).data[plane_idx].add(src_offset);
1547                            let dst_slice = plane_data.as_mut();
1548                            // SAFETY: Copying one row of a planar float plane. Source is valid
1549                            // FFmpeg frame data, destination has sufficient capacity, no overlap.
1550                            std::ptr::copy_nonoverlapping(
1551                                src_ptr,
1552                                dst_slice[dst_offset..].as_mut_ptr(),
1553                                row_size,
1554                            );
1555                        }
1556                        planes.push(plane_data);
1557                        strides.push(row_size);
1558                    }
1559                }
1560                _ => {
1561                    return Err(DecodeError::Ffmpeg {
1562                        code: 0,
1563                        message: format!("Unsupported pixel format: {format:?}"),
1564                    });
1565                }
1566            }
1567
1568            Ok((planes, strides))
1569        }
1570    }
1571
1572    /// Converts our `PixelFormat` to FFmpeg `AVPixelFormat`.
1573    fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1574        match format {
1575            PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1576            PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1577            PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1578            PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1579            PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1580            PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1581            PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1582            PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1583            PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1584            PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1585            PixelFormat::Yuv420p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE,
1586            PixelFormat::Yuv422p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE,
1587            PixelFormat::Yuv444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE,
1588            PixelFormat::Yuva444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUVA444P10LE,
1589            PixelFormat::P010le => ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE,
1590            PixelFormat::Gbrpf32le => ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE,
1591            _ => {
1592                log::warn!(
1593                    "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1594                );
1595                ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1596            }
1597        }
1598    }
1599
1600    /// Returns the current playback position.
1601    pub(crate) fn position(&self) -> Duration {
1602        self.position
1603    }
1604
1605    /// Returns whether end of file has been reached.
1606    pub(crate) fn is_eof(&self) -> bool {
1607        self.eof
1608    }
1609
1610    /// Returns whether the source is a live or streaming input.
1611    ///
1612    /// Live sources have the `AVFMT_TS_DISCONT` flag set on their `AVInputFormat`.
1613    /// Seeking is not meaningful on live sources.
1614    pub(crate) fn is_live(&self) -> bool {
1615        self.is_live
1616    }
1617
1618    /// Converts a `Duration` to a presentation timestamp (PTS) in stream time_base units.
1619    ///
1620    /// # Arguments
1621    ///
1622    /// * `duration` - The duration to convert.
1623    ///
1624    /// # Returns
1625    ///
1626    /// The timestamp in stream time_base units.
1627    ///
1628    /// # Note
1629    ///
1630    /// av_seek_frame expects timestamps in stream time_base units when using a specific stream_index.
1631    fn duration_to_pts(&self, duration: Duration) -> i64 {
1632        // Convert duration to stream time_base units for seeking
1633        // SAFETY:
1634        // - format_ctx is valid: owned by VideoDecoderInner, initialized in constructor via avformat_open_input
1635        // - stream_index is valid: validated during decoder creation (find_stream_info + codec opening)
1636        // - streams array access is valid: guaranteed by FFmpeg after successful avformat_open_input
1637        let time_base = unsafe {
1638            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1639            (*(*stream)).time_base
1640        };
1641
1642        // Convert: duration (seconds) * (time_base.den / time_base.num) = PTS
1643        let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1644        (duration.as_secs_f64() * time_base_f64) as i64
1645    }
1646
1647    /// Converts a presentation timestamp (PTS) to a `Duration`.
1648    ///
1649    /// # Arguments
1650    ///
1651    /// * `pts` - The presentation timestamp in stream time base units.
1652    ///
1653    /// # Returns
1654    ///
1655    /// The duration corresponding to the PTS.
1656    ///
1657    /// # Safety
1658    ///
1659    /// Caller must ensure that `format_ctx` and `stream_index` are valid.
1660    ///
1661    /// # Note
1662    ///
1663    /// Currently unused but kept for potential future use in more advanced seeking scenarios.
1664    #[allow(dead_code)]
1665    fn pts_to_duration(&self, pts: i64) -> Duration {
1666        // SAFETY: Caller ensures format_ctx and stream_index are valid
1667        unsafe {
1668            let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1669            let time_base = (*(*stream)).time_base;
1670
1671            // Convert PTS to duration
1672            let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1673            Duration::from_secs_f64(duration_secs)
1674        }
1675    }
1676
1677    /// Seeks to a specified position in the video stream.
1678    ///
1679    /// This method performs efficient seeking without reopening the file.
1680    /// It uses `av_seek_frame` internally and flushes the decoder buffers.
1681    ///
1682    /// # Performance Characteristics
1683    ///
1684    /// - **Keyframe seek**: 5-10ms for typical GOP sizes (1-2 seconds)
1685    /// - **Exact seek**: Proportional to distance from nearest keyframe
1686    /// - **Large GOP videos**: May require sequential decoding from distant keyframe
1687    ///
1688    /// For videos with sparse keyframes (GOP > 2 seconds), the method will
1689    /// decode frames sequentially from the nearest keyframe to reach the target.
1690    /// This ensures correct frame data but may take longer (10-50ms for very large GOPs).
1691    ///
1692    /// # Arguments
1693    ///
1694    /// * `position` - Target position to seek to.
1695    /// * `mode` - Seek mode (Keyframe, Exact, or Backward).
1696    ///
1697    /// # Errors
1698    ///
1699    /// Returns [`DecodeError::SeekFailed`] if the seek operation fails.
1700    pub(crate) fn seek(
1701        &mut self,
1702        position: Duration,
1703        mode: crate::SeekMode,
1704    ) -> Result<(), DecodeError> {
1705        use crate::SeekMode;
1706
1707        let timestamp = self.duration_to_pts(position);
1708
1709        // All seek modes use BACKWARD flag to find the nearest keyframe at or before target.
1710        // The difference between modes is in the post-seek processing below.
1711        let flags = ff_sys::avformat::seek_flags::BACKWARD;
1712
1713        // 1. Clear any pending packet and frame to avoid reading stale data after seek
1714        // SAFETY:
1715        // - packet is valid: allocated in constructor, owned by VideoDecoderInner
1716        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1717        unsafe {
1718            ff_sys::av_packet_unref(self.packet);
1719            ff_sys::av_frame_unref(self.frame);
1720        }
1721
1722        // 2. Seek in the format context (file is NOT reopened)
1723        // Use av_seek_frame with the stream index and timestamp in stream time_base units
1724        // SAFETY:
1725        // - format_ctx is valid: owned by VideoDecoderInner, initialized via avformat_open_input
1726        // - stream_index is valid: validated during decoder creation
1727        // - timestamp is valid: converted from Duration using stream's time_base
1728        unsafe {
1729            ff_sys::avformat::seek_frame(
1730                self.format_ctx,
1731                self.stream_index as i32,
1732                timestamp,
1733                flags,
1734            )
1735            .map_err(|e| DecodeError::SeekFailed {
1736                target: position,
1737                reason: ff_sys::av_error_string(e),
1738            })?;
1739        }
1740
1741        // 3. Flush decoder buffers to clear any cached frames
1742        // SAFETY: codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1743        unsafe {
1744            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1745        }
1746
1747        // 4. Drain any remaining frames from the decoder after flush
1748        // This ensures no stale frames are returned after the seek
1749        // SAFETY:
1750        // - codec_ctx is valid: owned by VideoDecoderInner, initialized via avcodec_open2
1751        // - frame is valid: allocated in constructor, owned by VideoDecoderInner
1752        unsafe {
1753            loop {
1754                let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1755                if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1756                    // No more frames in the decoder buffer
1757                    break;
1758                } else if ret == 0 {
1759                    // Got a frame, unref it and continue draining
1760                    ff_sys::av_frame_unref(self.frame);
1761                } else {
1762                    // Other error, break out
1763                    break;
1764                }
1765            }
1766        }
1767
1768        // 5. Reset internal state
1769        self.eof = false;
1770        // Note: We don't update self.position here because it will be updated
1771        // when the next frame is decoded. This ensures position reflects actual decoded position.
1772
1773        // 6. Skip forward to the target position
1774        //
1775        // Context: av_seek_frame with BACKWARD flag seeks to the nearest keyframe *at or before*
1776        // the target timestamp. For videos with sparse keyframes (large GOP size), this may
1777        // land far from the target (e.g., at the first keyframe for GOP=entire video).
1778        //
1779        // Solution: Decode frames sequentially from the keyframe until reaching the target.
1780        // This is necessary because H.264/H.265 P-frames and B-frames depend on previous
1781        // frames for reconstruction, so we must decode all intermediate frames.
1782        //
1783        // Performance Impact:
1784        // - Typical GOP (1-2s): 30-60 frames to skip, ~5-10ms overhead
1785        // - Large GOP (5-10s): 150-300 frames to skip, ~20-50ms overhead
1786        // - Worst case (single keyframe): May decode entire video, ~100ms-1s
1787        if mode == SeekMode::Exact {
1788            // For exact mode, decode until we reach or pass the exact target
1789            self.skip_to_exact(position)?;
1790        } else {
1791            // For keyframe/backward modes, decode until we're reasonably close to the target
1792            // Rationale: Balances accuracy with performance for common use cases
1793            let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1794            let min_position = position.saturating_sub(tolerance);
1795
1796            while let Some(frame) = self.decode_one()? {
1797                let frame_time = frame.timestamp().as_duration();
1798                if frame_time >= min_position {
1799                    // We're close enough to the target
1800                    break;
1801                }
1802                // Continue decoding to get closer (frames are automatically dropped)
1803            }
1804        }
1805
1806        Ok(())
1807    }
1808
1809    /// Skips frames until reaching the exact target position.
1810    ///
1811    /// This is used by [`Self::seek`] when `SeekMode::Exact` is specified.
1812    /// It decodes and discards frames from the nearest keyframe until
1813    /// reaching the target position.
1814    ///
1815    /// # Performance
1816    ///
1817    /// Time complexity is O(n) where n is the number of frames between the
1818    /// keyframe and target. For a 30fps video with 2-second GOP:
1819    /// - Worst case: ~60 frames to decode, ~10-20ms
1820    /// - Average case: ~30 frames to decode, ~5-10ms
1821    ///
1822    /// # Arguments
1823    ///
1824    /// * `target` - The exact target position.
1825    ///
1826    /// # Errors
1827    ///
1828    /// Returns [`DecodeError::SeekFailed`] if EOF is reached before the target position.
1829    fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1830        loop {
1831            match self.decode_one()? {
1832                Some(frame) => {
1833                    let frame_time = frame.timestamp().as_duration();
1834                    if frame_time >= target {
1835                        // Reached or passed the target frame
1836                        // Position will be updated by decode_one() which was just called
1837                        break;
1838                    }
1839                    // Continue decoding (frame is automatically dropped)
1840                }
1841                None => {
1842                    // Reached EOF before finding target frame
1843                    return Err(DecodeError::SeekFailed {
1844                        target,
1845                        reason: "Reached end of stream before target position".to_string(),
1846                    });
1847                }
1848            }
1849        }
1850        Ok(())
1851    }
1852
1853    /// Flushes the decoder's internal buffers.
1854    ///
1855    /// This clears any cached frames and resets the decoder state.
1856    /// The decoder is ready to receive new packets after flushing.
1857    pub(crate) fn flush(&mut self) {
1858        // SAFETY: codec_ctx is valid and owned by this instance
1859        unsafe {
1860            ff_sys::avcodec::flush_buffers(self.codec_ctx);
1861        }
1862        self.eof = false;
1863    }
1864
1865    /// Scales a video frame to the specified dimensions while preserving aspect ratio.
1866    ///
1867    /// This method uses SwScale to resize frames efficiently using a "fit-within"
1868    /// strategy that preserves the original aspect ratio.
1869    ///
1870    /// # Aspect Ratio Preservation
1871    ///
1872    /// The frame is scaled to fit within `(target_width, target_height)` while
1873    /// maintaining its original aspect ratio. The output dimensions will be at most
1874    /// the target size, with at least one dimension matching the target. No letterboxing
1875    /// or pillarboxing is applied - the frame is simply scaled down to fit.
1876    ///
1877    /// # Arguments
1878    ///
1879    /// * `frame` - The source frame to scale.
1880    /// * `target_width` - Desired width in pixels.
1881    /// * `target_height` - Desired height in pixels.
1882    ///
1883    /// # Returns
1884    ///
1885    /// A new `VideoFrame` scaled to fit within the target dimensions.
1886    ///
1887    /// # Errors
1888    ///
1889    /// Returns [`DecodeError`] if SwScale context creation or scaling fails.
1890    ///
1891    /// # Performance
1892    ///
1893    /// - Caches SwScale context for repeated calls with same dimensions
1894    /// - Context creation: ~0.1-0.5ms (only on first call or dimension change)
1895    /// - Typical scaling time: 1-3ms for 1080p → 320x180
1896    /// - Uses bilinear interpolation for quality/performance balance
1897    ///
1898    /// # Cache Behavior
1899    ///
1900    /// The SwScale context is cached based on source/target dimensions and format.
1901    /// When generating multiple thumbnails with the same size (e.g., via `thumbnails()`),
1902    /// the context is reused, eliminating the ~0.1-0.5ms creation overhead per thumbnail.
1903    pub(crate) fn scale_frame(
1904        &mut self,
1905        frame: &VideoFrame,
1906        target_width: u32,
1907        target_height: u32,
1908    ) -> Result<VideoFrame, DecodeError> {
1909        let src_width = frame.width();
1910        let src_height = frame.height();
1911        let src_format = frame.format();
1912
1913        // Calculate scaled dimensions to preserve aspect ratio (fit within target)
1914        let src_aspect = src_width as f64 / src_height as f64;
1915        let target_aspect = target_width as f64 / target_height as f64;
1916
1917        let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1918            // Source is wider - fit to width
1919            let height = (target_width as f64 / src_aspect).round() as u32;
1920            (target_width, height)
1921        } else {
1922            // Source is taller or equal - fit to height
1923            let width = (target_height as f64 * src_aspect).round() as u32;
1924            (width, target_height)
1925        };
1926
1927        // Convert pixel format to FFmpeg format
1928        let av_format = Self::pixel_format_to_av(src_format);
1929
1930        // Cache key: (src_width, src_height, scaled_width, scaled_height, format)
1931        let cache_key = (
1932            src_width,
1933            src_height,
1934            scaled_width,
1935            scaled_height,
1936            av_format,
1937        );
1938
1939        // SAFETY: We're creating temporary FFmpeg objects for scaling
1940        unsafe {
1941            // Check if we can reuse the cached SwScale context
1942            let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1943                (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1944            {
1945                if cached_key == cache_key {
1946                    // Cache hit - reuse existing context
1947                    (cached_ctx, true)
1948                } else {
1949                    // Cache miss - free old context and create new one
1950                    ff_sys::swscale::free_context(cached_ctx);
1951                    // Clear cache immediately to prevent dangling pointer
1952                    self.thumbnail_sws_ctx = None;
1953                    self.thumbnail_cache_key = None;
1954
1955                    let new_ctx = ff_sys::swscale::get_context(
1956                        src_width as i32,
1957                        src_height as i32,
1958                        av_format,
1959                        scaled_width as i32,
1960                        scaled_height as i32,
1961                        av_format,
1962                        ff_sys::swscale::scale_flags::BILINEAR,
1963                    )
1964                    .map_err(|e| DecodeError::Ffmpeg {
1965                        code: 0,
1966                        message: format!("Failed to create scaling context: {e}"),
1967                    })?;
1968
1969                    // Don't cache yet - will cache after successful scaling
1970                    (new_ctx, false)
1971                }
1972            } else {
1973                // No cache - create new context
1974                let new_ctx = ff_sys::swscale::get_context(
1975                    src_width as i32,
1976                    src_height as i32,
1977                    av_format,
1978                    scaled_width as i32,
1979                    scaled_height as i32,
1980                    av_format,
1981                    ff_sys::swscale::scale_flags::BILINEAR,
1982                )
1983                .map_err(|e| DecodeError::Ffmpeg {
1984                    code: 0,
1985                    message: format!("Failed to create scaling context: {e}"),
1986                })?;
1987
1988                // Don't cache yet - will cache after successful scaling
1989                (new_ctx, false)
1990            };
1991
1992            // Set up source frame with VideoFrame data
1993            let src_frame_guard = AvFrameGuard::new()?;
1994            let src_frame = src_frame_guard.as_ptr();
1995
1996            (*src_frame).width = src_width as i32;
1997            (*src_frame).height = src_height as i32;
1998            (*src_frame).format = av_format;
1999
2000            // Set up source frame data pointers directly from VideoFrame (no copy)
2001            let planes = frame.planes();
2002            let strides = frame.strides();
2003
2004            for (i, plane_data) in planes.iter().enumerate() {
2005                if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
2006                    break;
2007                }
2008                (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
2009                (*src_frame).linesize[i] = strides[i] as i32;
2010            }
2011
2012            // Allocate destination frame
2013            let dst_frame_guard = AvFrameGuard::new()?;
2014            let dst_frame = dst_frame_guard.as_ptr();
2015
2016            (*dst_frame).width = scaled_width as i32;
2017            (*dst_frame).height = scaled_height as i32;
2018            (*dst_frame).format = av_format;
2019
2020            // Allocate buffer for destination frame
2021            let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
2022            if buffer_ret < 0 {
2023                // Clean up context if not cached
2024                if !is_cached {
2025                    ff_sys::swscale::free_context(sws_ctx);
2026                }
2027                return Err(DecodeError::Ffmpeg {
2028                    code: buffer_ret,
2029                    message: format!(
2030                        "Failed to allocate destination frame buffer: {}",
2031                        ff_sys::av_error_string(buffer_ret)
2032                    ),
2033                });
2034            }
2035
2036            // Perform scaling
2037            let scale_result = ff_sys::swscale::scale(
2038                sws_ctx,
2039                (*src_frame).data.as_ptr() as *const *const u8,
2040                (*src_frame).linesize.as_ptr(),
2041                0,
2042                src_height as i32,
2043                (*dst_frame).data.as_ptr() as *const *mut u8,
2044                (*dst_frame).linesize.as_ptr(),
2045            );
2046
2047            if let Err(e) = scale_result {
2048                // Clean up context if not cached
2049                if !is_cached {
2050                    ff_sys::swscale::free_context(sws_ctx);
2051                }
2052                return Err(DecodeError::Ffmpeg {
2053                    code: 0,
2054                    message: format!("Failed to scale frame: {e}"),
2055                });
2056            }
2057
2058            // Scaling successful - cache the context if it's new
2059            if !is_cached {
2060                self.thumbnail_sws_ctx = Some(sws_ctx);
2061                self.thumbnail_cache_key = Some(cache_key);
2062            }
2063
2064            // Copy timestamp
2065            (*dst_frame).pts = frame.timestamp().pts();
2066
2067            // Convert destination frame to VideoFrame
2068            let video_frame = self.av_frame_to_video_frame(dst_frame)?;
2069
2070            Ok(video_frame)
2071        }
2072    }
2073
2074    // ── Reconnect helpers ─────────────────────────────────────────────────────
2075
2076    /// Attempts to reconnect to the stream URL using exponential backoff.
2077    ///
2078    /// Called from `decode_one()` when `StreamInterrupted` is received and
2079    /// `NetworkOptions::reconnect_on_error` is `true`. After all attempts fail,
2080    /// returns a `StreamInterrupted` error.
2081    fn attempt_reconnect(&mut self) -> Result<(), DecodeError> {
2082        let url = match self.url.as_deref() {
2083            Some(u) => u.to_owned(),
2084            None => return Ok(()), // file-path source: no reconnect
2085        };
2086        let max = self.network_opts.max_reconnect_attempts;
2087
2088        for attempt in 1..=max {
2089            let backoff_ms = 100u64 * (1u64 << (attempt - 1).min(10));
2090            log::warn!(
2091                "reconnecting attempt={attempt} url={} backoff_ms={backoff_ms}",
2092                crate::network::sanitize_url(&url)
2093            );
2094            std::thread::sleep(Duration::from_millis(backoff_ms));
2095            match self.reopen(&url) {
2096                Ok(()) => {
2097                    self.reconnect_count += 1;
2098                    log::info!(
2099                        "reconnected attempt={attempt} url={} total_reconnects={}",
2100                        crate::network::sanitize_url(&url),
2101                        self.reconnect_count
2102                    );
2103                    return Ok(());
2104                }
2105                Err(e) => log::warn!("reconnect attempt={attempt} failed err={e}"),
2106            }
2107        }
2108
2109        Err(DecodeError::StreamInterrupted {
2110            code: 0,
2111            endpoint: crate::network::sanitize_url(&url),
2112            message: format!("stream did not recover after {max} attempts"),
2113        })
2114    }
2115
2116    /// Closes the current `AVFormatContext`, re-opens the URL, re-reads stream info,
2117    /// re-finds the video stream, and flushes the codec.
2118    fn reopen(&mut self, url: &str) -> Result<(), DecodeError> {
2119        // Close the current format context. `avformat_close_input` sets the pointer
2120        // to null — this matches the null check in Drop so no double-free occurs.
2121        // SAFETY: self.format_ctx is valid and owned exclusively by self.
2122        unsafe {
2123            ff_sys::avformat::close_input(std::ptr::addr_of_mut!(self.format_ctx));
2124        }
2125
2126        // Re-open the URL with the stored network timeouts.
2127        // SAFETY: url is a valid UTF-8 network URL string.
2128        self.format_ctx = unsafe {
2129            ff_sys::avformat::open_input_url(
2130                url,
2131                self.network_opts.connect_timeout,
2132                self.network_opts.read_timeout,
2133            )
2134            .map_err(|e| crate::network::map_network_error(e, crate::network::sanitize_url(url)))?
2135        };
2136
2137        // Re-read stream information.
2138        // SAFETY: self.format_ctx is valid and freshly opened.
2139        unsafe {
2140            ff_sys::avformat::find_stream_info(self.format_ctx).map_err(|e| {
2141                DecodeError::Ffmpeg {
2142                    code: e,
2143                    message: format!(
2144                        "reconnect find_stream_info failed: {}",
2145                        ff_sys::av_error_string(e)
2146                    ),
2147                }
2148            })?;
2149        }
2150
2151        // Re-find the video stream (index may differ in theory after reconnect).
2152        // SAFETY: self.format_ctx is valid.
2153        let (stream_index, _) = unsafe { Self::find_video_stream(self.format_ctx) }
2154            .ok_or_else(|| DecodeError::NoVideoStream { path: url.into() })?;
2155        self.stream_index = stream_index as i32;
2156
2157        // Flush codec buffers to discard stale decoded state from before the drop.
2158        // SAFETY: self.codec_ctx is valid and has not been freed.
2159        unsafe {
2160            ff_sys::avcodec::flush_buffers(self.codec_ctx);
2161        }
2162
2163        self.eof = false;
2164        Ok(())
2165    }
2166}
2167
2168impl Drop for VideoDecoderInner {
2169    fn drop(&mut self) {
2170        // Free SwScale context if allocated
2171        if let Some(sws_ctx) = self.sws_ctx {
2172            // SAFETY: sws_ctx is valid and owned by this instance
2173            unsafe {
2174                ff_sys::swscale::free_context(sws_ctx);
2175            }
2176        }
2177
2178        // Free cached thumbnail SwScale context if allocated
2179        if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
2180            // SAFETY: thumbnail_ctx is valid and owned by this instance
2181            unsafe {
2182                ff_sys::swscale::free_context(thumbnail_ctx);
2183            }
2184        }
2185
2186        // Free hardware device context if allocated
2187        if let Some(hw_ctx) = self.hw_device_ctx {
2188            // SAFETY: hw_ctx is valid and owned by this instance
2189            unsafe {
2190                ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
2191            }
2192        }
2193
2194        // Free frame and packet
2195        if !self.frame.is_null() {
2196            // SAFETY: self.frame is valid and owned by this instance
2197            unsafe {
2198                ff_sys::av_frame_free(&mut (self.frame as *mut _));
2199            }
2200        }
2201
2202        if !self.packet.is_null() {
2203            // SAFETY: self.packet is valid and owned by this instance
2204            unsafe {
2205                ff_sys::av_packet_free(&mut (self.packet as *mut _));
2206            }
2207        }
2208
2209        // Free codec context
2210        if !self.codec_ctx.is_null() {
2211            // SAFETY: self.codec_ctx is valid and owned by this instance
2212            unsafe {
2213                ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
2214            }
2215        }
2216
2217        // Close format context
2218        if !self.format_ctx.is_null() {
2219            // SAFETY: self.format_ctx is valid and owned by this instance
2220            unsafe {
2221                ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
2222            }
2223        }
2224    }
2225}
2226
2227// SAFETY: VideoDecoderInner manages FFmpeg contexts which are thread-safe when not shared.
2228// We don't expose mutable access across threads, so Send is safe.
2229unsafe impl Send for VideoDecoderInner {}
2230
2231#[cfg(test)]
2232mod tests {
2233    use ff_format::PixelFormat;
2234    use ff_format::codec::VideoCodec;
2235    use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
2236
2237    use crate::HardwareAccel;
2238
2239    use super::VideoDecoderInner;
2240
2241    // -------------------------------------------------------------------------
2242    // convert_pixel_format
2243    // -------------------------------------------------------------------------
2244
2245    #[test]
2246    fn pixel_format_yuv420p() {
2247        assert_eq!(
2248            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
2249            PixelFormat::Yuv420p
2250        );
2251    }
2252
2253    #[test]
2254    fn pixel_format_yuv422p() {
2255        assert_eq!(
2256            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
2257            PixelFormat::Yuv422p
2258        );
2259    }
2260
2261    #[test]
2262    fn pixel_format_yuv444p() {
2263        assert_eq!(
2264            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
2265            PixelFormat::Yuv444p
2266        );
2267    }
2268
2269    #[test]
2270    fn pixel_format_rgb24() {
2271        assert_eq!(
2272            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
2273            PixelFormat::Rgb24
2274        );
2275    }
2276
2277    #[test]
2278    fn pixel_format_bgr24() {
2279        assert_eq!(
2280            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
2281            PixelFormat::Bgr24
2282        );
2283    }
2284
2285    #[test]
2286    fn pixel_format_rgba() {
2287        assert_eq!(
2288            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
2289            PixelFormat::Rgba
2290        );
2291    }
2292
2293    #[test]
2294    fn pixel_format_bgra() {
2295        assert_eq!(
2296            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
2297            PixelFormat::Bgra
2298        );
2299    }
2300
2301    #[test]
2302    fn pixel_format_gray8() {
2303        assert_eq!(
2304            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
2305            PixelFormat::Gray8
2306        );
2307    }
2308
2309    #[test]
2310    fn pixel_format_nv12() {
2311        assert_eq!(
2312            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
2313            PixelFormat::Nv12
2314        );
2315    }
2316
2317    #[test]
2318    fn pixel_format_nv21() {
2319        assert_eq!(
2320            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
2321            PixelFormat::Nv21
2322        );
2323    }
2324
2325    #[test]
2326    fn pixel_format_yuv420p10le_should_return_yuv420p10le() {
2327        assert_eq!(
2328            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE),
2329            PixelFormat::Yuv420p10le
2330        );
2331    }
2332
2333    #[test]
2334    fn pixel_format_yuv422p10le_should_return_yuv422p10le() {
2335        assert_eq!(
2336            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE),
2337            PixelFormat::Yuv422p10le
2338        );
2339    }
2340
2341    #[test]
2342    fn pixel_format_yuv444p10le_should_return_yuv444p10le() {
2343        assert_eq!(
2344            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE),
2345            PixelFormat::Yuv444p10le
2346        );
2347    }
2348
2349    #[test]
2350    fn pixel_format_p010le_should_return_p010le() {
2351        assert_eq!(
2352            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE),
2353            PixelFormat::P010le
2354        );
2355    }
2356
2357    #[test]
2358    fn pixel_format_unknown_falls_back_to_yuv420p() {
2359        assert_eq!(
2360            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
2361            PixelFormat::Yuv420p
2362        );
2363    }
2364
2365    // -------------------------------------------------------------------------
2366    // convert_color_space
2367    // -------------------------------------------------------------------------
2368
2369    #[test]
2370    fn color_space_bt709() {
2371        assert_eq!(
2372            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
2373            ColorSpace::Bt709
2374        );
2375    }
2376
2377    #[test]
2378    fn color_space_bt470bg_yields_bt601() {
2379        assert_eq!(
2380            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
2381            ColorSpace::Bt601
2382        );
2383    }
2384
2385    #[test]
2386    fn color_space_smpte170m_yields_bt601() {
2387        assert_eq!(
2388            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
2389            ColorSpace::Bt601
2390        );
2391    }
2392
2393    #[test]
2394    fn color_space_bt2020_ncl() {
2395        assert_eq!(
2396            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
2397            ColorSpace::Bt2020
2398        );
2399    }
2400
2401    #[test]
2402    fn color_space_unknown_falls_back_to_bt709() {
2403        assert_eq!(
2404            VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
2405            ColorSpace::Bt709
2406        );
2407    }
2408
2409    // -------------------------------------------------------------------------
2410    // convert_color_range
2411    // -------------------------------------------------------------------------
2412
2413    #[test]
2414    fn color_range_jpeg_yields_full() {
2415        assert_eq!(
2416            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
2417            ColorRange::Full
2418        );
2419    }
2420
2421    #[test]
2422    fn color_range_mpeg_yields_limited() {
2423        assert_eq!(
2424            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2425            ColorRange::Limited
2426        );
2427    }
2428
2429    #[test]
2430    fn color_range_unknown_falls_back_to_limited() {
2431        assert_eq!(
2432            VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2433            ColorRange::Limited
2434        );
2435    }
2436
2437    // -------------------------------------------------------------------------
2438    // convert_color_primaries
2439    // -------------------------------------------------------------------------
2440
2441    #[test]
2442    fn color_primaries_bt709() {
2443        assert_eq!(
2444            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2445            ColorPrimaries::Bt709
2446        );
2447    }
2448
2449    #[test]
2450    fn color_primaries_bt470bg_yields_bt601() {
2451        assert_eq!(
2452            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2453            ColorPrimaries::Bt601
2454        );
2455    }
2456
2457    #[test]
2458    fn color_primaries_smpte170m_yields_bt601() {
2459        assert_eq!(
2460            VideoDecoderInner::convert_color_primaries(
2461                ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2462            ),
2463            ColorPrimaries::Bt601
2464        );
2465    }
2466
2467    #[test]
2468    fn color_primaries_bt2020() {
2469        assert_eq!(
2470            VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2471            ColorPrimaries::Bt2020
2472        );
2473    }
2474
2475    #[test]
2476    fn color_primaries_unknown_falls_back_to_bt709() {
2477        assert_eq!(
2478            VideoDecoderInner::convert_color_primaries(
2479                ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2480            ),
2481            ColorPrimaries::Bt709
2482        );
2483    }
2484
2485    // -------------------------------------------------------------------------
2486    // convert_codec
2487    // -------------------------------------------------------------------------
2488
2489    #[test]
2490    fn codec_h264() {
2491        assert_eq!(
2492            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2493            VideoCodec::H264
2494        );
2495    }
2496
2497    #[test]
2498    fn codec_hevc_yields_h265() {
2499        assert_eq!(
2500            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2501            VideoCodec::H265
2502        );
2503    }
2504
2505    #[test]
2506    fn codec_vp8() {
2507        assert_eq!(
2508            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2509            VideoCodec::Vp8
2510        );
2511    }
2512
2513    #[test]
2514    fn codec_vp9() {
2515        assert_eq!(
2516            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2517            VideoCodec::Vp9
2518        );
2519    }
2520
2521    #[test]
2522    fn codec_av1() {
2523        assert_eq!(
2524            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2525            VideoCodec::Av1
2526        );
2527    }
2528
2529    #[test]
2530    fn codec_mpeg4() {
2531        assert_eq!(
2532            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2533            VideoCodec::Mpeg4
2534        );
2535    }
2536
2537    #[test]
2538    fn codec_prores() {
2539        assert_eq!(
2540            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2541            VideoCodec::ProRes
2542        );
2543    }
2544
2545    #[test]
2546    fn codec_unknown_falls_back_to_h264() {
2547        assert_eq!(
2548            VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2549            VideoCodec::H264
2550        );
2551    }
2552
2553    // -------------------------------------------------------------------------
2554    // hw_accel_to_device_type
2555    // -------------------------------------------------------------------------
2556
2557    #[test]
2558    fn hw_accel_auto_yields_none() {
2559        assert_eq!(
2560            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2561            None
2562        );
2563    }
2564
2565    #[test]
2566    fn hw_accel_none_yields_none() {
2567        assert_eq!(
2568            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2569            None
2570        );
2571    }
2572
2573    #[test]
2574    fn hw_accel_nvdec_yields_cuda() {
2575        assert_eq!(
2576            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2577            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2578        );
2579    }
2580
2581    #[test]
2582    fn hw_accel_qsv_yields_qsv() {
2583        assert_eq!(
2584            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2585            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2586        );
2587    }
2588
2589    #[test]
2590    fn hw_accel_amf_yields_d3d11va() {
2591        assert_eq!(
2592            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2593            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2594        );
2595    }
2596
2597    #[test]
2598    fn hw_accel_videotoolbox() {
2599        assert_eq!(
2600            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2601            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2602        );
2603    }
2604
2605    #[test]
2606    fn hw_accel_vaapi() {
2607        assert_eq!(
2608            VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2609            Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2610        );
2611    }
2612
2613    // -------------------------------------------------------------------------
2614    // pixel_format_to_av — round-trip
2615    // -------------------------------------------------------------------------
2616
2617    #[test]
2618    fn pixel_format_to_av_round_trip_yuv420p() {
2619        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2620        assert_eq!(
2621            VideoDecoderInner::convert_pixel_format(av),
2622            PixelFormat::Yuv420p
2623        );
2624    }
2625
2626    #[test]
2627    fn pixel_format_to_av_round_trip_yuv422p() {
2628        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2629        assert_eq!(
2630            VideoDecoderInner::convert_pixel_format(av),
2631            PixelFormat::Yuv422p
2632        );
2633    }
2634
2635    #[test]
2636    fn pixel_format_to_av_round_trip_yuv444p() {
2637        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2638        assert_eq!(
2639            VideoDecoderInner::convert_pixel_format(av),
2640            PixelFormat::Yuv444p
2641        );
2642    }
2643
2644    #[test]
2645    fn pixel_format_to_av_round_trip_rgb24() {
2646        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2647        assert_eq!(
2648            VideoDecoderInner::convert_pixel_format(av),
2649            PixelFormat::Rgb24
2650        );
2651    }
2652
2653    #[test]
2654    fn pixel_format_to_av_round_trip_bgr24() {
2655        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2656        assert_eq!(
2657            VideoDecoderInner::convert_pixel_format(av),
2658            PixelFormat::Bgr24
2659        );
2660    }
2661
2662    #[test]
2663    fn pixel_format_to_av_round_trip_rgba() {
2664        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2665        assert_eq!(
2666            VideoDecoderInner::convert_pixel_format(av),
2667            PixelFormat::Rgba
2668        );
2669    }
2670
2671    #[test]
2672    fn pixel_format_to_av_round_trip_bgra() {
2673        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2674        assert_eq!(
2675            VideoDecoderInner::convert_pixel_format(av),
2676            PixelFormat::Bgra
2677        );
2678    }
2679
2680    #[test]
2681    fn pixel_format_to_av_round_trip_gray8() {
2682        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2683        assert_eq!(
2684            VideoDecoderInner::convert_pixel_format(av),
2685            PixelFormat::Gray8
2686        );
2687    }
2688
2689    #[test]
2690    fn pixel_format_to_av_round_trip_nv12() {
2691        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2692        assert_eq!(
2693            VideoDecoderInner::convert_pixel_format(av),
2694            PixelFormat::Nv12
2695        );
2696    }
2697
2698    #[test]
2699    fn pixel_format_to_av_round_trip_nv21() {
2700        let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2701        assert_eq!(
2702            VideoDecoderInner::convert_pixel_format(av),
2703            PixelFormat::Nv21
2704        );
2705    }
2706
2707    #[test]
2708    fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2709        // Other(999) has no explicit mapping and hits the _ fallback arm.
2710        assert_eq!(
2711            VideoDecoderInner::pixel_format_to_av(PixelFormat::Other(999)),
2712            ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2713        );
2714    }
2715
2716    // -------------------------------------------------------------------------
2717    // extract_codec_name
2718    // -------------------------------------------------------------------------
2719
2720    #[test]
2721    fn codec_name_should_return_h264_for_h264_codec_id() {
2722        let name =
2723            unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_H264) };
2724        assert_eq!(name, "h264");
2725    }
2726
2727    #[test]
2728    fn codec_name_should_return_none_for_none_codec_id() {
2729        let name =
2730            unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_NONE) };
2731        assert_eq!(name, "none");
2732    }
2733
2734    #[test]
2735    fn convert_pixel_format_should_map_gbrpf32le() {
2736        assert_eq!(
2737            VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE),
2738            PixelFormat::Gbrpf32le
2739        );
2740    }
2741
2742    #[test]
2743    fn unsupported_codec_error_should_include_codec_name() {
2744        let codec_id = ff_sys::AVCodecID_AV_CODEC_ID_H264;
2745        let codec_name = unsafe { VideoDecoderInner::extract_codec_name(codec_id) };
2746        let error = crate::error::DecodeError::UnsupportedCodec {
2747            codec: format!("{codec_name} (codec_id={codec_id:?})"),
2748        };
2749        let msg = error.to_string();
2750        assert!(msg.contains("h264"), "expected codec name in error: {msg}");
2751        assert!(
2752            msg.contains("codec_id="),
2753            "expected codec_id in error: {msg}"
2754        );
2755    }
2756}