1#![allow(unsafe_code)]
8#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::ffi::CStr;
26use std::path::Path;
27use std::ptr;
28use std::sync::Arc;
29use std::time::Duration;
30
31use ff_format::NetworkOptions;
32
33use ff_format::PooledBuffer;
34use ff_format::codec::VideoCodec;
35use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
36use ff_format::container::ContainerInfo;
37use ff_format::time::{Rational, Timestamp};
38use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
39use ff_sys::{
40 AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
41 AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
42 AVPixelFormat, SwsContext,
43};
44
45use crate::HardwareAccel;
46use crate::error::DecodeError;
47use crate::video::builder::OutputScale;
48use ff_common::FramePool;
49
50const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
56
57struct AvFormatContextGuard(*mut AVFormatContext);
59
60impl AvFormatContextGuard {
61 unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
67 let format_ctx = unsafe {
69 ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
70 code: e,
71 message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
72 })?
73 };
74 Ok(Self(format_ctx))
75 }
76
77 const fn as_ptr(&self) -> *mut AVFormatContext {
79 self.0
80 }
81
82 fn into_raw(self) -> *mut AVFormatContext {
84 let ptr = self.0;
85 std::mem::forget(self);
86 ptr
87 }
88
89 unsafe fn new_image_sequence(path: &Path, framerate: u32) -> Result<Self, DecodeError> {
95 let format_ctx = unsafe {
97 ff_sys::avformat::open_input_image_sequence(path, framerate).map_err(|e| {
98 DecodeError::Ffmpeg {
99 code: e,
100 message: format!(
101 "Failed to open image sequence: {}",
102 ff_sys::av_error_string(e)
103 ),
104 }
105 })?
106 };
107 Ok(Self(format_ctx))
108 }
109
110 unsafe fn new_url(url: &str, network: &NetworkOptions) -> Result<Self, DecodeError> {
116 let format_ctx = unsafe {
118 ff_sys::avformat::open_input_url(url, network.connect_timeout, network.read_timeout)
119 .map_err(|e| {
120 crate::network::map_network_error(e, crate::network::sanitize_url(url))
121 })?
122 };
123 Ok(Self(format_ctx))
124 }
125}
126
127impl Drop for AvFormatContextGuard {
128 fn drop(&mut self) {
129 if !self.0.is_null() {
130 unsafe {
132 ff_sys::avformat::close_input(&mut (self.0 as *mut _));
133 }
134 }
135 }
136}
137
138struct AvCodecContextGuard(*mut AVCodecContext);
140
141impl AvCodecContextGuard {
142 unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
148 let codec_ctx = unsafe {
150 ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
151 code: e,
152 message: format!("Failed to allocate codec context: {e}"),
153 })?
154 };
155 Ok(Self(codec_ctx))
156 }
157
158 const fn as_ptr(&self) -> *mut AVCodecContext {
160 self.0
161 }
162
163 fn into_raw(self) -> *mut AVCodecContext {
165 let ptr = self.0;
166 std::mem::forget(self);
167 ptr
168 }
169}
170
171impl Drop for AvCodecContextGuard {
172 fn drop(&mut self) {
173 if !self.0.is_null() {
174 unsafe {
176 ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
177 }
178 }
179 }
180}
181
182struct AvPacketGuard(*mut AVPacket);
184
185impl AvPacketGuard {
186 unsafe fn new() -> Result<Self, DecodeError> {
192 let packet = unsafe { ff_sys::av_packet_alloc() };
194 if packet.is_null() {
195 return Err(DecodeError::Ffmpeg {
196 code: 0,
197 message: "Failed to allocate packet".to_string(),
198 });
199 }
200 Ok(Self(packet))
201 }
202
203 #[allow(dead_code)]
205 const fn as_ptr(&self) -> *mut AVPacket {
206 self.0
207 }
208
209 fn into_raw(self) -> *mut AVPacket {
211 let ptr = self.0;
212 std::mem::forget(self);
213 ptr
214 }
215}
216
217impl Drop for AvPacketGuard {
218 fn drop(&mut self) {
219 if !self.0.is_null() {
220 unsafe {
222 ff_sys::av_packet_free(&mut (self.0 as *mut _));
223 }
224 }
225 }
226}
227
228struct AvFrameGuard(*mut AVFrame);
230
231impl AvFrameGuard {
232 unsafe fn new() -> Result<Self, DecodeError> {
238 let frame = unsafe { ff_sys::av_frame_alloc() };
240 if frame.is_null() {
241 return Err(DecodeError::Ffmpeg {
242 code: 0,
243 message: "Failed to allocate frame".to_string(),
244 });
245 }
246 Ok(Self(frame))
247 }
248
249 const fn as_ptr(&self) -> *mut AVFrame {
251 self.0
252 }
253
254 fn into_raw(self) -> *mut AVFrame {
256 let ptr = self.0;
257 std::mem::forget(self);
258 ptr
259 }
260}
261
262impl Drop for AvFrameGuard {
263 fn drop(&mut self) {
264 if !self.0.is_null() {
265 unsafe {
267 ff_sys::av_frame_free(&mut (self.0 as *mut _));
268 }
269 }
270 }
271}
272
273pub(crate) struct VideoDecoderInner {
278 format_ctx: *mut AVFormatContext,
280 codec_ctx: *mut AVCodecContext,
282 stream_index: i32,
284 sws_ctx: Option<*mut SwsContext>,
286 sws_cache_key: Option<(u32, u32, i32, u32, u32, i32)>,
288 output_format: Option<PixelFormat>,
290 output_scale: Option<OutputScale>,
292 is_live: bool,
294 eof: bool,
296 position: Duration,
298 packet: *mut AVPacket,
300 frame: *mut AVFrame,
302 thumbnail_sws_ctx: Option<*mut SwsContext>,
304 thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
306 hw_device_ctx: Option<*mut AVBufferRef>,
308 active_hw_accel: HardwareAccel,
310 frame_pool: Option<Arc<dyn FramePool>>,
312 url: Option<String>,
314 network_opts: NetworkOptions,
316 reconnect_count: u32,
318}
319
320impl VideoDecoderInner {
321 fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
325 match accel {
326 HardwareAccel::Auto => None,
327 HardwareAccel::None => None,
328 HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
329 HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
330 HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), HardwareAccel::VideoToolbox => {
332 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
333 }
334 HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
335 }
336 }
337
338 const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
340 &[
342 HardwareAccel::Nvdec,
343 HardwareAccel::Qsv,
344 HardwareAccel::VideoToolbox,
345 HardwareAccel::Vaapi,
346 HardwareAccel::Amf,
347 ]
348 }
349
350 unsafe fn init_hardware_accel(
366 codec_ctx: *mut AVCodecContext,
367 accel: HardwareAccel,
368 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
369 match accel {
370 HardwareAccel::Auto => {
371 for &hw_type in Self::hw_accel_auto_priority() {
373 if let Ok((Some(ctx), active)) =
375 unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
376 {
377 return Ok((Some(ctx), active));
378 }
379 }
381 Ok((None, HardwareAccel::None))
383 }
384 HardwareAccel::None => {
385 Ok((None, HardwareAccel::None))
387 }
388 _ => {
389 unsafe { Self::try_init_hw_device(codec_ctx, accel) }
392 }
393 }
394 }
395
396 unsafe fn try_init_hw_device(
402 codec_ctx: *mut AVCodecContext,
403 accel: HardwareAccel,
404 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
405 let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
407 return Ok((None, HardwareAccel::None));
408 };
409
410 let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
413 let ret = unsafe {
414 ff_sys::av_hwdevice_ctx_create(
415 ptr::addr_of_mut!(hw_device_ctx),
416 device_type,
417 ptr::null(), ptr::null_mut(), 0, )
421 };
422
423 if ret < 0 {
424 return Err(DecodeError::HwAccelUnavailable { accel });
426 }
427
428 unsafe {
432 (*codec_ctx).hw_device_ctx = hw_device_ctx;
433 }
434
435 let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
438 if our_ref.is_null() {
439 return Err(DecodeError::HwAccelUnavailable { accel });
442 }
443
444 Ok((Some(our_ref), accel))
445 }
446
447 pub(crate) fn hardware_accel(&self) -> HardwareAccel {
449 self.active_hw_accel
450 }
451
452 const fn is_hardware_format(format: AVPixelFormat) -> bool {
456 matches!(
457 format,
458 ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
459 | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
460 | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
461 | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
462 | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
463 | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
464 | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
465 | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
466 | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
467 | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
468 )
469 }
470
471 unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
480 let frame_format = unsafe { (*self.frame).format };
482
483 if !Self::is_hardware_format(frame_format) {
484 return Ok(());
486 }
487
488 let sw_frame = unsafe { ff_sys::av_frame_alloc() };
491 if sw_frame.is_null() {
492 return Err(DecodeError::Ffmpeg {
493 code: 0,
494 message: "Failed to allocate software frame for hardware transfer".to_string(),
495 });
496 }
497
498 let ret = unsafe {
501 ff_sys::av_hwframe_transfer_data(
502 sw_frame, self.frame, 0, )
504 };
505
506 if ret < 0 {
507 unsafe {
509 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
510 }
511 return Err(DecodeError::Ffmpeg {
512 code: ret,
513 message: format!(
514 "Failed to transfer hardware frame to CPU memory: {}",
515 ff_sys::av_error_string(ret)
516 ),
517 });
518 }
519
520 unsafe {
523 (*sw_frame).pts = (*self.frame).pts;
524 (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
525 (*sw_frame).duration = (*self.frame).duration;
526 (*sw_frame).time_base = (*self.frame).time_base;
527 }
528
529 unsafe {
532 ff_sys::av_frame_unref(self.frame);
533 ff_sys::av_frame_move_ref(self.frame, sw_frame);
534 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
535 }
536
537 Ok(())
538 }
539
540 #[allow(clippy::too_many_arguments)]
557 pub(crate) fn new(
558 path: &Path,
559 output_format: Option<PixelFormat>,
560 output_scale: Option<OutputScale>,
561 hardware_accel: HardwareAccel,
562 thread_count: usize,
563 frame_rate: Option<u32>,
564 frame_pool: Option<Arc<dyn FramePool>>,
565 network_opts: Option<NetworkOptions>,
566 ) -> Result<(Self, VideoStreamInfo, ContainerInfo), DecodeError> {
567 ff_sys::ensure_initialized();
569
570 let path_str = path.to_str().unwrap_or("");
571 let is_image_sequence = path_str.contains('%');
572 let is_network_url = crate::network::is_url(path_str);
573
574 let url = if is_network_url {
575 Some(path_str.to_owned())
576 } else {
577 None
578 };
579 let stored_network_opts = network_opts.clone().unwrap_or_default();
580
581 if is_network_url {
583 crate::network::check_srt_url(path_str)?;
584 }
585
586 let format_ctx_guard = unsafe {
589 if is_network_url {
590 let network = network_opts.unwrap_or_default();
591 log::info!(
592 "opening network source url={} connect_timeout_ms={} read_timeout_ms={}",
593 crate::network::sanitize_url(path_str),
594 network.connect_timeout.as_millis(),
595 network.read_timeout.as_millis(),
596 );
597 AvFormatContextGuard::new_url(path_str, &network)?
598 } else if is_image_sequence {
599 let fps = frame_rate.unwrap_or(25);
600 AvFormatContextGuard::new_image_sequence(path, fps)?
601 } else {
602 AvFormatContextGuard::new(path)?
603 }
604 };
605 let format_ctx = format_ctx_guard.as_ptr();
606
607 unsafe {
610 ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
611 code: e,
612 message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
613 })?;
614 }
615
616 let is_live = unsafe {
620 let iformat = (*format_ctx).iformat;
621 !iformat.is_null() && ((*iformat).flags & ff_sys::AVFMT_TS_DISCONT) != 0
622 };
623
624 let (stream_index, codec_id) =
627 unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
628 DecodeError::NoVideoStream {
629 path: path.to_path_buf(),
630 }
631 })?;
632
633 let codec_name = unsafe { Self::extract_codec_name(codec_id) };
636 let codec = unsafe {
637 ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
638 if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_EXR {
641 DecodeError::DecoderUnavailable {
642 codec: "exr".to_string(),
643 hint: "Requires FFmpeg built with EXR support \
644 (--enable-decoder=exr)"
645 .to_string(),
646 }
647 } else {
648 DecodeError::UnsupportedCodec {
649 codec: format!("{codec_name} (codec_id={codec_id:?})"),
650 }
651 }
652 })?
653 };
654
655 let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
658 let codec_ctx = codec_ctx_guard.as_ptr();
659
660 unsafe {
663 let stream = (*format_ctx).streams.add(stream_index as usize);
664 let codecpar = (*(*stream)).codecpar;
665 ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
666 DecodeError::Ffmpeg {
667 code: e,
668 message: format!(
669 "Failed to copy codec parameters: {}",
670 ff_sys::av_error_string(e)
671 ),
672 }
673 })?;
674
675 if thread_count > 0 {
677 (*codec_ctx).thread_count = thread_count as i32;
678 }
679 }
680
681 let (hw_device_ctx, active_hw_accel) =
684 unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
685
686 unsafe {
689 ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
690 if let Some(hw_ctx) = hw_device_ctx {
695 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
696 }
697 DecodeError::Ffmpeg {
698 code: e,
699 message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
700 }
701 })?;
702 }
703
704 let stream_info =
707 unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
708
709 let container_info = unsafe { Self::extract_container_info(format_ctx) };
712
713 let packet_guard = unsafe { AvPacketGuard::new()? };
716 let frame_guard = unsafe { AvFrameGuard::new()? };
717
718 Ok((
720 Self {
721 format_ctx: format_ctx_guard.into_raw(),
722 codec_ctx: codec_ctx_guard.into_raw(),
723 stream_index: stream_index as i32,
724 sws_ctx: None,
725 sws_cache_key: None,
726 output_format,
727 output_scale,
728 is_live,
729 eof: false,
730 position: Duration::ZERO,
731 packet: packet_guard.into_raw(),
732 frame: frame_guard.into_raw(),
733 thumbnail_sws_ctx: None,
734 thumbnail_cache_key: None,
735 hw_device_ctx,
736 active_hw_accel,
737 frame_pool,
738 url,
739 network_opts: stored_network_opts,
740 reconnect_count: 0,
741 },
742 stream_info,
743 container_info,
744 ))
745 }
746
747 unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
757 unsafe {
759 let nb_streams = (*format_ctx).nb_streams as usize;
760
761 for i in 0..nb_streams {
762 let stream = (*format_ctx).streams.add(i);
763 let codecpar = (*(*stream)).codecpar;
764
765 if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
766 return Some((i, (*codecpar).codec_id));
767 }
768 }
769
770 None
771 }
772 }
773
774 unsafe fn extract_codec_name(codec_id: ff_sys::AVCodecID) -> String {
776 let name_ptr = unsafe { ff_sys::avcodec_get_name(codec_id) };
778 if name_ptr.is_null() {
779 return String::from("unknown");
780 }
781 unsafe { CStr::from_ptr(name_ptr).to_string_lossy().into_owned() }
783 }
784
785 unsafe fn extract_stream_info(
787 format_ctx: *mut AVFormatContext,
788 stream_index: i32,
789 codec_ctx: *mut AVCodecContext,
790 ) -> Result<VideoStreamInfo, DecodeError> {
791 let (
793 width,
794 height,
795 fps_rational,
796 duration_val,
797 pix_fmt,
798 color_space_val,
799 color_range_val,
800 color_primaries_val,
801 codec_id,
802 ) = unsafe {
803 let stream = (*format_ctx).streams.add(stream_index as usize);
804 let codecpar = (*(*stream)).codecpar;
805
806 (
807 (*codecpar).width as u32,
808 (*codecpar).height as u32,
809 (*(*stream)).avg_frame_rate,
810 (*format_ctx).duration,
811 (*codec_ctx).pix_fmt,
812 (*codecpar).color_space,
813 (*codecpar).color_range,
814 (*codecpar).color_primaries,
815 (*codecpar).codec_id,
816 )
817 };
818
819 let frame_rate = if fps_rational.den != 0 {
821 Rational::new(fps_rational.num as i32, fps_rational.den as i32)
822 } else {
823 log::warn!(
824 "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
825 fps_rational.num
826 );
827 Rational::new(30, 1)
828 };
829
830 let duration = if duration_val > 0 {
832 let duration_secs = duration_val as f64 / 1_000_000.0;
833 Some(Duration::from_secs_f64(duration_secs))
834 } else {
835 None
836 };
837
838 let pixel_format = Self::convert_pixel_format(pix_fmt);
840
841 let color_space = Self::convert_color_space(color_space_val);
843 let color_range = Self::convert_color_range(color_range_val);
844 let color_primaries = Self::convert_color_primaries(color_primaries_val);
845
846 let codec = Self::convert_codec(codec_id);
848 let codec_name = unsafe { Self::extract_codec_name(codec_id) };
849
850 let mut builder = VideoStreamInfo::builder()
852 .index(stream_index as u32)
853 .codec(codec)
854 .codec_name(codec_name)
855 .width(width)
856 .height(height)
857 .frame_rate(frame_rate)
858 .pixel_format(pixel_format)
859 .color_space(color_space)
860 .color_range(color_range)
861 .color_primaries(color_primaries);
862
863 if let Some(d) = duration {
864 builder = builder.duration(d);
865 }
866
867 Ok(builder.build())
868 }
869
870 unsafe fn extract_container_info(format_ctx: *mut AVFormatContext) -> ContainerInfo {
876 unsafe {
878 let format_name = if (*format_ctx).iformat.is_null() {
879 String::new()
880 } else {
881 let ptr = (*(*format_ctx).iformat).name;
882 if ptr.is_null() {
883 String::new()
884 } else {
885 CStr::from_ptr(ptr).to_string_lossy().into_owned()
886 }
887 };
888
889 let bit_rate = {
890 let br = (*format_ctx).bit_rate;
891 if br > 0 { Some(br as u64) } else { None }
892 };
893
894 let nb_streams = (*format_ctx).nb_streams as u32;
895
896 let mut builder = ContainerInfo::builder()
897 .format_name(format_name)
898 .nb_streams(nb_streams);
899 if let Some(br) = bit_rate {
900 builder = builder.bit_rate(br);
901 }
902 builder.build()
903 }
904 }
905
906 fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
908 if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
909 PixelFormat::Yuv420p
910 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
911 PixelFormat::Yuv422p
912 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
913 PixelFormat::Yuv444p
914 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
915 PixelFormat::Rgb24
916 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
917 PixelFormat::Bgr24
918 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
919 PixelFormat::Rgba
920 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
921 PixelFormat::Bgra
922 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
923 PixelFormat::Gray8
924 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
925 PixelFormat::Nv12
926 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
927 PixelFormat::Nv21
928 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE {
929 PixelFormat::Yuv420p10le
930 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE {
931 PixelFormat::Yuv422p10le
932 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE {
933 PixelFormat::Yuv444p10le
934 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE {
935 PixelFormat::P010le
936 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE {
937 PixelFormat::Gbrpf32le
938 } else {
939 log::warn!(
940 "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
941 );
942 PixelFormat::Yuv420p
943 }
944 }
945
946 fn convert_color_space(space: AVColorSpace) -> ColorSpace {
948 if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
949 ColorSpace::Bt709
950 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
951 || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
952 {
953 ColorSpace::Bt601
954 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
955 ColorSpace::Bt2020
956 } else {
957 log::warn!(
958 "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
959 );
960 ColorSpace::Bt709
961 }
962 }
963
964 fn convert_color_range(range: AVColorRange) -> ColorRange {
966 if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
967 ColorRange::Full
968 } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
969 ColorRange::Limited
970 } else {
971 log::warn!(
972 "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
973 );
974 ColorRange::Limited
975 }
976 }
977
978 fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
980 if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
981 ColorPrimaries::Bt709
982 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
983 || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
984 {
985 ColorPrimaries::Bt601
986 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
987 ColorPrimaries::Bt2020
988 } else {
989 log::warn!(
990 "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
991 );
992 ColorPrimaries::Bt709
993 }
994 }
995
996 fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
998 if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
999 VideoCodec::H264
1000 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
1001 VideoCodec::H265
1002 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
1003 VideoCodec::Vp8
1004 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
1005 VideoCodec::Vp9
1006 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
1007 VideoCodec::Av1
1008 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
1009 VideoCodec::Mpeg4
1010 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
1011 VideoCodec::ProRes
1012 } else {
1013 log::warn!(
1014 "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
1015 );
1016 VideoCodec::H264
1017 }
1018 }
1019
1020 pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
1031 loop {
1032 match self.decode_one_inner() {
1033 Ok(frame) => return Ok(frame),
1034 Err(DecodeError::StreamInterrupted { .. })
1035 if self.url.is_some() && self.network_opts.reconnect_on_error =>
1036 {
1037 self.attempt_reconnect()?;
1038 }
1039 Err(e) => return Err(e),
1040 }
1041 }
1042 }
1043
1044 fn decode_one_inner(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
1045 if self.eof {
1046 return Ok(None);
1047 }
1048
1049 unsafe {
1050 loop {
1051 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1053
1054 if ret == 0 {
1055 self.transfer_hardware_frame_if_needed()?;
1058
1059 let video_frame = self.convert_frame_to_video_frame()?;
1060
1061 let pts = (*self.frame).pts;
1063 if pts != ff_sys::AV_NOPTS_VALUE {
1064 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1065 let time_base = (*(*stream)).time_base;
1066 let timestamp_secs =
1067 pts as f64 * time_base.num as f64 / time_base.den as f64;
1068 self.position = Duration::from_secs_f64(timestamp_secs);
1069 }
1070
1071 return Ok(Some(video_frame));
1072 } else if ret == ff_sys::error_codes::EAGAIN {
1073 let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
1076
1077 if read_ret == ff_sys::error_codes::EOF {
1078 ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
1080 self.eof = true;
1081 continue;
1082 } else if read_ret < 0 {
1083 return Err(if let Some(url) = &self.url {
1084 crate::network::map_network_error(
1086 read_ret,
1087 crate::network::sanitize_url(url),
1088 )
1089 } else {
1090 DecodeError::Ffmpeg {
1091 code: read_ret,
1092 message: format!(
1093 "Failed to read frame: {}",
1094 ff_sys::av_error_string(read_ret)
1095 ),
1096 }
1097 });
1098 }
1099
1100 if (*self.packet).stream_index == self.stream_index {
1102 let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
1104 ff_sys::av_packet_unref(self.packet);
1105
1106 if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
1107 return Err(DecodeError::Ffmpeg {
1108 code: send_ret,
1109 message: format!(
1110 "Failed to send packet: {}",
1111 ff_sys::av_error_string(send_ret)
1112 ),
1113 });
1114 }
1115 } else {
1116 ff_sys::av_packet_unref(self.packet);
1118 }
1119 } else if ret == ff_sys::error_codes::EOF {
1120 self.eof = true;
1122 return Ok(None);
1123 } else {
1124 return Err(DecodeError::DecodingFailed {
1125 timestamp: Some(self.position),
1126 reason: ff_sys::av_error_string(ret),
1127 });
1128 }
1129 }
1130 }
1131 }
1132
1133 unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
1135 unsafe {
1137 let src_width = (*self.frame).width as u32;
1138 let src_height = (*self.frame).height as u32;
1139 let src_format = (*self.frame).format;
1140
1141 let dst_format = if let Some(fmt) = self.output_format {
1143 Self::pixel_format_to_av(fmt)
1144 } else {
1145 src_format
1146 };
1147
1148 let (dst_width, dst_height) = self.resolve_output_dims(src_width, src_height);
1150
1151 let needs_conversion =
1153 src_format != dst_format || dst_width != src_width || dst_height != src_height;
1154
1155 if needs_conversion {
1156 self.convert_with_sws(
1157 src_width, src_height, src_format, dst_width, dst_height, dst_format,
1158 )
1159 } else {
1160 self.av_frame_to_video_frame(self.frame)
1161 }
1162 }
1163 }
1164
1165 fn resolve_output_dims(&self, src_width: u32, src_height: u32) -> (u32, u32) {
1170 let round_even = |n: u32| (n + 1) & !1;
1171
1172 match self.output_scale {
1173 None => (src_width, src_height),
1174 Some(OutputScale::Exact { width, height }) => (round_even(width), round_even(height)),
1175 Some(OutputScale::FitWidth(target_w)) => {
1176 let target_w = round_even(target_w);
1177 if src_width == 0 {
1178 return (target_w, target_w);
1179 }
1180 let h = (target_w as u64 * src_height as u64 / src_width as u64) as u32;
1181 (target_w, round_even(h.max(2)))
1182 }
1183 Some(OutputScale::FitHeight(target_h)) => {
1184 let target_h = round_even(target_h);
1185 if src_height == 0 {
1186 return (target_h, target_h);
1187 }
1188 let w = (target_h as u64 * src_width as u64 / src_height as u64) as u32;
1189 (round_even(w.max(2)), target_h)
1190 }
1191 }
1192 }
1193
1194 unsafe fn convert_with_sws(
1199 &mut self,
1200 src_width: u32,
1201 src_height: u32,
1202 src_format: i32,
1203 dst_width: u32,
1204 dst_height: u32,
1205 dst_format: i32,
1206 ) -> Result<VideoFrame, DecodeError> {
1207 unsafe {
1209 let cache_key = (
1211 src_width, src_height, src_format, dst_width, dst_height, dst_format,
1212 );
1213 if self.sws_cache_key != Some(cache_key) {
1214 if let Some(old_ctx) = self.sws_ctx.take() {
1216 ff_sys::swscale::free_context(old_ctx);
1217 }
1218
1219 let ctx = ff_sys::swscale::get_context(
1220 src_width as i32,
1221 src_height as i32,
1222 src_format,
1223 dst_width as i32,
1224 dst_height as i32,
1225 dst_format,
1226 ff_sys::swscale::scale_flags::BILINEAR,
1227 )
1228 .map_err(|e| DecodeError::Ffmpeg {
1229 code: 0,
1230 message: format!("Failed to create sws context: {e}"),
1231 })?;
1232
1233 self.sws_ctx = Some(ctx);
1234 self.sws_cache_key = Some(cache_key);
1235 }
1236
1237 let Some(sws_ctx) = self.sws_ctx else {
1238 return Err(DecodeError::Ffmpeg {
1239 code: 0,
1240 message: "SwsContext not initialized".to_string(),
1241 });
1242 };
1243
1244 let dst_frame_guard = AvFrameGuard::new()?;
1246 let dst_frame = dst_frame_guard.as_ptr();
1247
1248 (*dst_frame).width = dst_width as i32;
1249 (*dst_frame).height = dst_height as i32;
1250 (*dst_frame).format = dst_format;
1251
1252 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1254 if buffer_ret < 0 {
1255 return Err(DecodeError::Ffmpeg {
1256 code: buffer_ret,
1257 message: format!(
1258 "Failed to allocate frame buffer: {}",
1259 ff_sys::av_error_string(buffer_ret)
1260 ),
1261 });
1262 }
1263
1264 ff_sys::swscale::scale(
1266 sws_ctx,
1267 (*self.frame).data.as_ptr() as *const *const u8,
1268 (*self.frame).linesize.as_ptr(),
1269 0,
1270 src_height as i32,
1271 (*dst_frame).data.as_ptr() as *const *mut u8,
1272 (*dst_frame).linesize.as_ptr(),
1273 )
1274 .map_err(|e| DecodeError::Ffmpeg {
1275 code: 0,
1276 message: format!("Failed to scale frame: {e}"),
1277 })?;
1278
1279 (*dst_frame).pts = (*self.frame).pts;
1281
1282 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1284
1285 Ok(video_frame)
1288 }
1289 }
1290
1291 unsafe fn av_frame_to_video_frame(
1293 &self,
1294 frame: *const AVFrame,
1295 ) -> Result<VideoFrame, DecodeError> {
1296 unsafe {
1298 let width = (*frame).width as u32;
1299 let height = (*frame).height as u32;
1300 let format = Self::convert_pixel_format((*frame).format);
1301
1302 let pts = (*frame).pts;
1304 let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1305 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1306 let time_base = (*(*stream)).time_base;
1307 Timestamp::new(
1308 pts as i64,
1309 Rational::new(time_base.num as i32, time_base.den as i32),
1310 )
1311 } else {
1312 Timestamp::default()
1313 };
1314
1315 let (planes, strides) =
1317 self.extract_planes_and_strides(frame, width, height, format)?;
1318
1319 VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1320 DecodeError::Ffmpeg {
1321 code: 0,
1322 message: format!("Failed to create VideoFrame: {e}"),
1323 }
1324 })
1325 }
1326 }
1327
1328 fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1338 if let Some(ref pool) = self.frame_pool {
1339 if let Some(pooled_buffer) = pool.acquire(size) {
1340 return pooled_buffer;
1341 }
1342 return PooledBuffer::new(vec![0u8; size], Arc::downgrade(pool));
1347 }
1348 PooledBuffer::standalone(vec![0u8; size])
1349 }
1350
1351 unsafe fn extract_planes_and_strides(
1353 &self,
1354 frame: *const AVFrame,
1355 width: u32,
1356 height: u32,
1357 format: PixelFormat,
1358 ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1359 const BYTES_PER_PIXEL_RGBA: usize = 4;
1361 const BYTES_PER_PIXEL_RGB24: usize = 3;
1362
1363 unsafe {
1365 let mut planes = Vec::new();
1366 let mut strides = Vec::new();
1367
1368 #[allow(clippy::match_same_arms)]
1369 match format {
1370 PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1371 let stride = (*frame).linesize[0] as usize;
1373 let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1374 {
1375 BYTES_PER_PIXEL_RGBA
1376 } else {
1377 BYTES_PER_PIXEL_RGB24
1378 };
1379 let row_size = (width as usize) * bytes_per_pixel;
1380 let buffer_size = row_size * height as usize;
1381 let mut plane_data = self.allocate_buffer(buffer_size);
1382
1383 for y in 0..height as usize {
1384 let src_offset = y * stride;
1385 let dst_offset = y * row_size;
1386 let src_ptr = (*frame).data[0].add(src_offset);
1387 let plane_slice = plane_data.as_mut();
1388 std::ptr::copy_nonoverlapping(
1392 src_ptr,
1393 plane_slice[dst_offset..].as_mut_ptr(),
1394 row_size,
1395 );
1396 }
1397
1398 planes.push(plane_data);
1399 strides.push(row_size);
1400 }
1401 PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1402 let (chroma_width, chroma_height) = match format {
1404 PixelFormat::Yuv420p => (width / 2, height / 2),
1405 PixelFormat::Yuv422p => (width / 2, height),
1406 PixelFormat::Yuv444p => (width, height),
1407 _ => unreachable!(),
1408 };
1409
1410 let y_stride = width as usize;
1412 let y_size = y_stride * height as usize;
1413 let mut y_data = self.allocate_buffer(y_size);
1414 for y in 0..height as usize {
1415 let src_offset = y * (*frame).linesize[0] as usize;
1416 let dst_offset = y * y_stride;
1417 let src_ptr = (*frame).data[0].add(src_offset);
1418 let y_slice = y_data.as_mut();
1419 std::ptr::copy_nonoverlapping(
1422 src_ptr,
1423 y_slice[dst_offset..].as_mut_ptr(),
1424 width as usize,
1425 );
1426 }
1427 planes.push(y_data);
1428 strides.push(y_stride);
1429
1430 let u_stride = chroma_width as usize;
1432 let u_size = u_stride * chroma_height as usize;
1433 let mut u_data = self.allocate_buffer(u_size);
1434 for y in 0..chroma_height as usize {
1435 let src_offset = y * (*frame).linesize[1] as usize;
1436 let dst_offset = y * u_stride;
1437 let src_ptr = (*frame).data[1].add(src_offset);
1438 let u_slice = u_data.as_mut();
1439 std::ptr::copy_nonoverlapping(
1442 src_ptr,
1443 u_slice[dst_offset..].as_mut_ptr(),
1444 chroma_width as usize,
1445 );
1446 }
1447 planes.push(u_data);
1448 strides.push(u_stride);
1449
1450 let v_stride = chroma_width as usize;
1452 let v_size = v_stride * chroma_height as usize;
1453 let mut v_data = self.allocate_buffer(v_size);
1454 for y in 0..chroma_height as usize {
1455 let src_offset = y * (*frame).linesize[2] as usize;
1456 let dst_offset = y * v_stride;
1457 let src_ptr = (*frame).data[2].add(src_offset);
1458 let v_slice = v_data.as_mut();
1459 std::ptr::copy_nonoverlapping(
1462 src_ptr,
1463 v_slice[dst_offset..].as_mut_ptr(),
1464 chroma_width as usize,
1465 );
1466 }
1467 planes.push(v_data);
1468 strides.push(v_stride);
1469 }
1470 PixelFormat::Gray8 => {
1471 let stride = width as usize;
1473 let mut plane_data = self.allocate_buffer(stride * height as usize);
1474
1475 for y in 0..height as usize {
1476 let src_offset = y * (*frame).linesize[0] as usize;
1477 let dst_offset = y * stride;
1478 let src_ptr = (*frame).data[0].add(src_offset);
1479 let plane_slice = plane_data.as_mut();
1480 std::ptr::copy_nonoverlapping(
1483 src_ptr,
1484 plane_slice[dst_offset..].as_mut_ptr(),
1485 width as usize,
1486 );
1487 }
1488
1489 planes.push(plane_data);
1490 strides.push(stride);
1491 }
1492 PixelFormat::Nv12 | PixelFormat::Nv21 => {
1493 let uv_height = height / 2;
1495
1496 let y_stride = width as usize;
1498 let mut y_data = self.allocate_buffer(y_stride * height as usize);
1499 for y in 0..height as usize {
1500 let src_offset = y * (*frame).linesize[0] as usize;
1501 let dst_offset = y * y_stride;
1502 let src_ptr = (*frame).data[0].add(src_offset);
1503 let y_slice = y_data.as_mut();
1504 std::ptr::copy_nonoverlapping(
1507 src_ptr,
1508 y_slice[dst_offset..].as_mut_ptr(),
1509 width as usize,
1510 );
1511 }
1512 planes.push(y_data);
1513 strides.push(y_stride);
1514
1515 let uv_stride = width as usize;
1517 let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1518 for y in 0..uv_height as usize {
1519 let src_offset = y * (*frame).linesize[1] as usize;
1520 let dst_offset = y * uv_stride;
1521 let src_ptr = (*frame).data[1].add(src_offset);
1522 let uv_slice = uv_data.as_mut();
1523 std::ptr::copy_nonoverlapping(
1526 src_ptr,
1527 uv_slice[dst_offset..].as_mut_ptr(),
1528 width as usize,
1529 );
1530 }
1531 planes.push(uv_data);
1532 strides.push(uv_stride);
1533 }
1534 PixelFormat::Gbrpf32le => {
1535 const BYTES_PER_SAMPLE: usize = 4;
1537 let row_size = width as usize * BYTES_PER_SAMPLE;
1538 let size = row_size * height as usize;
1539
1540 for plane_idx in 0..3usize {
1541 let src_linesize = (*frame).linesize[plane_idx] as usize;
1542 let mut plane_data = self.allocate_buffer(size);
1543 for y in 0..height as usize {
1544 let src_offset = y * src_linesize;
1545 let dst_offset = y * row_size;
1546 let src_ptr = (*frame).data[plane_idx].add(src_offset);
1547 let dst_slice = plane_data.as_mut();
1548 std::ptr::copy_nonoverlapping(
1551 src_ptr,
1552 dst_slice[dst_offset..].as_mut_ptr(),
1553 row_size,
1554 );
1555 }
1556 planes.push(plane_data);
1557 strides.push(row_size);
1558 }
1559 }
1560 _ => {
1561 return Err(DecodeError::Ffmpeg {
1562 code: 0,
1563 message: format!("Unsupported pixel format: {format:?}"),
1564 });
1565 }
1566 }
1567
1568 Ok((planes, strides))
1569 }
1570 }
1571
1572 fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1574 match format {
1575 PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1576 PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1577 PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1578 PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1579 PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1580 PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1581 PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1582 PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1583 PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1584 PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1585 PixelFormat::Yuv420p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE,
1586 PixelFormat::Yuv422p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE,
1587 PixelFormat::Yuv444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE,
1588 PixelFormat::Yuva444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUVA444P10LE,
1589 PixelFormat::P010le => ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE,
1590 PixelFormat::Gbrpf32le => ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE,
1591 _ => {
1592 log::warn!(
1593 "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1594 );
1595 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1596 }
1597 }
1598 }
1599
1600 pub(crate) fn position(&self) -> Duration {
1602 self.position
1603 }
1604
1605 pub(crate) fn is_eof(&self) -> bool {
1607 self.eof
1608 }
1609
1610 pub(crate) fn is_live(&self) -> bool {
1615 self.is_live
1616 }
1617
1618 fn duration_to_pts(&self, duration: Duration) -> i64 {
1632 let time_base = unsafe {
1638 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1639 (*(*stream)).time_base
1640 };
1641
1642 let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1644 (duration.as_secs_f64() * time_base_f64) as i64
1645 }
1646
1647 #[allow(dead_code)]
1665 fn pts_to_duration(&self, pts: i64) -> Duration {
1666 unsafe {
1668 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1669 let time_base = (*(*stream)).time_base;
1670
1671 let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1673 Duration::from_secs_f64(duration_secs)
1674 }
1675 }
1676
1677 pub(crate) fn seek(
1701 &mut self,
1702 position: Duration,
1703 mode: crate::SeekMode,
1704 ) -> Result<(), DecodeError> {
1705 use crate::SeekMode;
1706
1707 let timestamp = self.duration_to_pts(position);
1708
1709 let flags = ff_sys::avformat::seek_flags::BACKWARD;
1712
1713 unsafe {
1718 ff_sys::av_packet_unref(self.packet);
1719 ff_sys::av_frame_unref(self.frame);
1720 }
1721
1722 unsafe {
1729 ff_sys::avformat::seek_frame(
1730 self.format_ctx,
1731 self.stream_index as i32,
1732 timestamp,
1733 flags,
1734 )
1735 .map_err(|e| DecodeError::SeekFailed {
1736 target: position,
1737 reason: ff_sys::av_error_string(e),
1738 })?;
1739 }
1740
1741 unsafe {
1744 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1745 }
1746
1747 unsafe {
1753 loop {
1754 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1755 if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1756 break;
1758 } else if ret == 0 {
1759 ff_sys::av_frame_unref(self.frame);
1761 } else {
1762 break;
1764 }
1765 }
1766 }
1767
1768 self.eof = false;
1770 if mode == SeekMode::Exact {
1788 self.skip_to_exact(position)?;
1790 } else {
1791 let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1794 let min_position = position.saturating_sub(tolerance);
1795
1796 while let Some(frame) = self.decode_one()? {
1797 let frame_time = frame.timestamp().as_duration();
1798 if frame_time >= min_position {
1799 break;
1801 }
1802 }
1804 }
1805
1806 Ok(())
1807 }
1808
1809 fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1830 loop {
1831 match self.decode_one()? {
1832 Some(frame) => {
1833 let frame_time = frame.timestamp().as_duration();
1834 if frame_time >= target {
1835 break;
1838 }
1839 }
1841 None => {
1842 return Err(DecodeError::SeekFailed {
1844 target,
1845 reason: "Reached end of stream before target position".to_string(),
1846 });
1847 }
1848 }
1849 }
1850 Ok(())
1851 }
1852
1853 pub(crate) fn flush(&mut self) {
1858 unsafe {
1860 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1861 }
1862 self.eof = false;
1863 }
1864
1865 pub(crate) fn scale_frame(
1904 &mut self,
1905 frame: &VideoFrame,
1906 target_width: u32,
1907 target_height: u32,
1908 ) -> Result<VideoFrame, DecodeError> {
1909 let src_width = frame.width();
1910 let src_height = frame.height();
1911 let src_format = frame.format();
1912
1913 let src_aspect = src_width as f64 / src_height as f64;
1915 let target_aspect = target_width as f64 / target_height as f64;
1916
1917 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1918 let height = (target_width as f64 / src_aspect).round() as u32;
1920 (target_width, height)
1921 } else {
1922 let width = (target_height as f64 * src_aspect).round() as u32;
1924 (width, target_height)
1925 };
1926
1927 let av_format = Self::pixel_format_to_av(src_format);
1929
1930 let cache_key = (
1932 src_width,
1933 src_height,
1934 scaled_width,
1935 scaled_height,
1936 av_format,
1937 );
1938
1939 unsafe {
1941 let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1943 (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1944 {
1945 if cached_key == cache_key {
1946 (cached_ctx, true)
1948 } else {
1949 ff_sys::swscale::free_context(cached_ctx);
1951 self.thumbnail_sws_ctx = None;
1953 self.thumbnail_cache_key = None;
1954
1955 let new_ctx = ff_sys::swscale::get_context(
1956 src_width as i32,
1957 src_height as i32,
1958 av_format,
1959 scaled_width as i32,
1960 scaled_height as i32,
1961 av_format,
1962 ff_sys::swscale::scale_flags::BILINEAR,
1963 )
1964 .map_err(|e| DecodeError::Ffmpeg {
1965 code: 0,
1966 message: format!("Failed to create scaling context: {e}"),
1967 })?;
1968
1969 (new_ctx, false)
1971 }
1972 } else {
1973 let new_ctx = ff_sys::swscale::get_context(
1975 src_width as i32,
1976 src_height as i32,
1977 av_format,
1978 scaled_width as i32,
1979 scaled_height as i32,
1980 av_format,
1981 ff_sys::swscale::scale_flags::BILINEAR,
1982 )
1983 .map_err(|e| DecodeError::Ffmpeg {
1984 code: 0,
1985 message: format!("Failed to create scaling context: {e}"),
1986 })?;
1987
1988 (new_ctx, false)
1990 };
1991
1992 let src_frame_guard = AvFrameGuard::new()?;
1994 let src_frame = src_frame_guard.as_ptr();
1995
1996 (*src_frame).width = src_width as i32;
1997 (*src_frame).height = src_height as i32;
1998 (*src_frame).format = av_format;
1999
2000 let planes = frame.planes();
2002 let strides = frame.strides();
2003
2004 for (i, plane_data) in planes.iter().enumerate() {
2005 if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
2006 break;
2007 }
2008 (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
2009 (*src_frame).linesize[i] = strides[i] as i32;
2010 }
2011
2012 let dst_frame_guard = AvFrameGuard::new()?;
2014 let dst_frame = dst_frame_guard.as_ptr();
2015
2016 (*dst_frame).width = scaled_width as i32;
2017 (*dst_frame).height = scaled_height as i32;
2018 (*dst_frame).format = av_format;
2019
2020 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
2022 if buffer_ret < 0 {
2023 if !is_cached {
2025 ff_sys::swscale::free_context(sws_ctx);
2026 }
2027 return Err(DecodeError::Ffmpeg {
2028 code: buffer_ret,
2029 message: format!(
2030 "Failed to allocate destination frame buffer: {}",
2031 ff_sys::av_error_string(buffer_ret)
2032 ),
2033 });
2034 }
2035
2036 let scale_result = ff_sys::swscale::scale(
2038 sws_ctx,
2039 (*src_frame).data.as_ptr() as *const *const u8,
2040 (*src_frame).linesize.as_ptr(),
2041 0,
2042 src_height as i32,
2043 (*dst_frame).data.as_ptr() as *const *mut u8,
2044 (*dst_frame).linesize.as_ptr(),
2045 );
2046
2047 if let Err(e) = scale_result {
2048 if !is_cached {
2050 ff_sys::swscale::free_context(sws_ctx);
2051 }
2052 return Err(DecodeError::Ffmpeg {
2053 code: 0,
2054 message: format!("Failed to scale frame: {e}"),
2055 });
2056 }
2057
2058 if !is_cached {
2060 self.thumbnail_sws_ctx = Some(sws_ctx);
2061 self.thumbnail_cache_key = Some(cache_key);
2062 }
2063
2064 (*dst_frame).pts = frame.timestamp().pts();
2066
2067 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
2069
2070 Ok(video_frame)
2071 }
2072 }
2073
2074 fn attempt_reconnect(&mut self) -> Result<(), DecodeError> {
2082 let url = match self.url.as_deref() {
2083 Some(u) => u.to_owned(),
2084 None => return Ok(()), };
2086 let max = self.network_opts.max_reconnect_attempts;
2087
2088 for attempt in 1..=max {
2089 let backoff_ms = 100u64 * (1u64 << (attempt - 1).min(10));
2090 log::warn!(
2091 "reconnecting attempt={attempt} url={} backoff_ms={backoff_ms}",
2092 crate::network::sanitize_url(&url)
2093 );
2094 std::thread::sleep(Duration::from_millis(backoff_ms));
2095 match self.reopen(&url) {
2096 Ok(()) => {
2097 self.reconnect_count += 1;
2098 log::info!(
2099 "reconnected attempt={attempt} url={} total_reconnects={}",
2100 crate::network::sanitize_url(&url),
2101 self.reconnect_count
2102 );
2103 return Ok(());
2104 }
2105 Err(e) => log::warn!("reconnect attempt={attempt} failed err={e}"),
2106 }
2107 }
2108
2109 Err(DecodeError::StreamInterrupted {
2110 code: 0,
2111 endpoint: crate::network::sanitize_url(&url),
2112 message: format!("stream did not recover after {max} attempts"),
2113 })
2114 }
2115
2116 fn reopen(&mut self, url: &str) -> Result<(), DecodeError> {
2119 unsafe {
2123 ff_sys::avformat::close_input(std::ptr::addr_of_mut!(self.format_ctx));
2124 }
2125
2126 self.format_ctx = unsafe {
2129 ff_sys::avformat::open_input_url(
2130 url,
2131 self.network_opts.connect_timeout,
2132 self.network_opts.read_timeout,
2133 )
2134 .map_err(|e| crate::network::map_network_error(e, crate::network::sanitize_url(url)))?
2135 };
2136
2137 unsafe {
2140 ff_sys::avformat::find_stream_info(self.format_ctx).map_err(|e| {
2141 DecodeError::Ffmpeg {
2142 code: e,
2143 message: format!(
2144 "reconnect find_stream_info failed: {}",
2145 ff_sys::av_error_string(e)
2146 ),
2147 }
2148 })?;
2149 }
2150
2151 let (stream_index, _) = unsafe { Self::find_video_stream(self.format_ctx) }
2154 .ok_or_else(|| DecodeError::NoVideoStream { path: url.into() })?;
2155 self.stream_index = stream_index as i32;
2156
2157 unsafe {
2160 ff_sys::avcodec::flush_buffers(self.codec_ctx);
2161 }
2162
2163 self.eof = false;
2164 Ok(())
2165 }
2166}
2167
2168impl Drop for VideoDecoderInner {
2169 fn drop(&mut self) {
2170 if let Some(sws_ctx) = self.sws_ctx {
2172 unsafe {
2174 ff_sys::swscale::free_context(sws_ctx);
2175 }
2176 }
2177
2178 if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
2180 unsafe {
2182 ff_sys::swscale::free_context(thumbnail_ctx);
2183 }
2184 }
2185
2186 if let Some(hw_ctx) = self.hw_device_ctx {
2188 unsafe {
2190 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
2191 }
2192 }
2193
2194 if !self.frame.is_null() {
2196 unsafe {
2198 ff_sys::av_frame_free(&mut (self.frame as *mut _));
2199 }
2200 }
2201
2202 if !self.packet.is_null() {
2203 unsafe {
2205 ff_sys::av_packet_free(&mut (self.packet as *mut _));
2206 }
2207 }
2208
2209 if !self.codec_ctx.is_null() {
2211 unsafe {
2213 ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
2214 }
2215 }
2216
2217 if !self.format_ctx.is_null() {
2219 unsafe {
2221 ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
2222 }
2223 }
2224 }
2225}
2226
2227unsafe impl Send for VideoDecoderInner {}
2230
2231#[cfg(test)]
2232mod tests {
2233 use ff_format::PixelFormat;
2234 use ff_format::codec::VideoCodec;
2235 use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
2236
2237 use crate::HardwareAccel;
2238
2239 use super::VideoDecoderInner;
2240
2241 #[test]
2246 fn pixel_format_yuv420p() {
2247 assert_eq!(
2248 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
2249 PixelFormat::Yuv420p
2250 );
2251 }
2252
2253 #[test]
2254 fn pixel_format_yuv422p() {
2255 assert_eq!(
2256 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
2257 PixelFormat::Yuv422p
2258 );
2259 }
2260
2261 #[test]
2262 fn pixel_format_yuv444p() {
2263 assert_eq!(
2264 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
2265 PixelFormat::Yuv444p
2266 );
2267 }
2268
2269 #[test]
2270 fn pixel_format_rgb24() {
2271 assert_eq!(
2272 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
2273 PixelFormat::Rgb24
2274 );
2275 }
2276
2277 #[test]
2278 fn pixel_format_bgr24() {
2279 assert_eq!(
2280 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
2281 PixelFormat::Bgr24
2282 );
2283 }
2284
2285 #[test]
2286 fn pixel_format_rgba() {
2287 assert_eq!(
2288 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
2289 PixelFormat::Rgba
2290 );
2291 }
2292
2293 #[test]
2294 fn pixel_format_bgra() {
2295 assert_eq!(
2296 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
2297 PixelFormat::Bgra
2298 );
2299 }
2300
2301 #[test]
2302 fn pixel_format_gray8() {
2303 assert_eq!(
2304 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
2305 PixelFormat::Gray8
2306 );
2307 }
2308
2309 #[test]
2310 fn pixel_format_nv12() {
2311 assert_eq!(
2312 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
2313 PixelFormat::Nv12
2314 );
2315 }
2316
2317 #[test]
2318 fn pixel_format_nv21() {
2319 assert_eq!(
2320 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
2321 PixelFormat::Nv21
2322 );
2323 }
2324
2325 #[test]
2326 fn pixel_format_yuv420p10le_should_return_yuv420p10le() {
2327 assert_eq!(
2328 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE),
2329 PixelFormat::Yuv420p10le
2330 );
2331 }
2332
2333 #[test]
2334 fn pixel_format_yuv422p10le_should_return_yuv422p10le() {
2335 assert_eq!(
2336 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE),
2337 PixelFormat::Yuv422p10le
2338 );
2339 }
2340
2341 #[test]
2342 fn pixel_format_yuv444p10le_should_return_yuv444p10le() {
2343 assert_eq!(
2344 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE),
2345 PixelFormat::Yuv444p10le
2346 );
2347 }
2348
2349 #[test]
2350 fn pixel_format_p010le_should_return_p010le() {
2351 assert_eq!(
2352 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE),
2353 PixelFormat::P010le
2354 );
2355 }
2356
2357 #[test]
2358 fn pixel_format_unknown_falls_back_to_yuv420p() {
2359 assert_eq!(
2360 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
2361 PixelFormat::Yuv420p
2362 );
2363 }
2364
2365 #[test]
2370 fn color_space_bt709() {
2371 assert_eq!(
2372 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
2373 ColorSpace::Bt709
2374 );
2375 }
2376
2377 #[test]
2378 fn color_space_bt470bg_yields_bt601() {
2379 assert_eq!(
2380 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
2381 ColorSpace::Bt601
2382 );
2383 }
2384
2385 #[test]
2386 fn color_space_smpte170m_yields_bt601() {
2387 assert_eq!(
2388 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
2389 ColorSpace::Bt601
2390 );
2391 }
2392
2393 #[test]
2394 fn color_space_bt2020_ncl() {
2395 assert_eq!(
2396 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
2397 ColorSpace::Bt2020
2398 );
2399 }
2400
2401 #[test]
2402 fn color_space_unknown_falls_back_to_bt709() {
2403 assert_eq!(
2404 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
2405 ColorSpace::Bt709
2406 );
2407 }
2408
2409 #[test]
2414 fn color_range_jpeg_yields_full() {
2415 assert_eq!(
2416 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
2417 ColorRange::Full
2418 );
2419 }
2420
2421 #[test]
2422 fn color_range_mpeg_yields_limited() {
2423 assert_eq!(
2424 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2425 ColorRange::Limited
2426 );
2427 }
2428
2429 #[test]
2430 fn color_range_unknown_falls_back_to_limited() {
2431 assert_eq!(
2432 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2433 ColorRange::Limited
2434 );
2435 }
2436
2437 #[test]
2442 fn color_primaries_bt709() {
2443 assert_eq!(
2444 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2445 ColorPrimaries::Bt709
2446 );
2447 }
2448
2449 #[test]
2450 fn color_primaries_bt470bg_yields_bt601() {
2451 assert_eq!(
2452 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2453 ColorPrimaries::Bt601
2454 );
2455 }
2456
2457 #[test]
2458 fn color_primaries_smpte170m_yields_bt601() {
2459 assert_eq!(
2460 VideoDecoderInner::convert_color_primaries(
2461 ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2462 ),
2463 ColorPrimaries::Bt601
2464 );
2465 }
2466
2467 #[test]
2468 fn color_primaries_bt2020() {
2469 assert_eq!(
2470 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2471 ColorPrimaries::Bt2020
2472 );
2473 }
2474
2475 #[test]
2476 fn color_primaries_unknown_falls_back_to_bt709() {
2477 assert_eq!(
2478 VideoDecoderInner::convert_color_primaries(
2479 ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2480 ),
2481 ColorPrimaries::Bt709
2482 );
2483 }
2484
2485 #[test]
2490 fn codec_h264() {
2491 assert_eq!(
2492 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2493 VideoCodec::H264
2494 );
2495 }
2496
2497 #[test]
2498 fn codec_hevc_yields_h265() {
2499 assert_eq!(
2500 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2501 VideoCodec::H265
2502 );
2503 }
2504
2505 #[test]
2506 fn codec_vp8() {
2507 assert_eq!(
2508 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2509 VideoCodec::Vp8
2510 );
2511 }
2512
2513 #[test]
2514 fn codec_vp9() {
2515 assert_eq!(
2516 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2517 VideoCodec::Vp9
2518 );
2519 }
2520
2521 #[test]
2522 fn codec_av1() {
2523 assert_eq!(
2524 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2525 VideoCodec::Av1
2526 );
2527 }
2528
2529 #[test]
2530 fn codec_mpeg4() {
2531 assert_eq!(
2532 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2533 VideoCodec::Mpeg4
2534 );
2535 }
2536
2537 #[test]
2538 fn codec_prores() {
2539 assert_eq!(
2540 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2541 VideoCodec::ProRes
2542 );
2543 }
2544
2545 #[test]
2546 fn codec_unknown_falls_back_to_h264() {
2547 assert_eq!(
2548 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2549 VideoCodec::H264
2550 );
2551 }
2552
2553 #[test]
2558 fn hw_accel_auto_yields_none() {
2559 assert_eq!(
2560 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2561 None
2562 );
2563 }
2564
2565 #[test]
2566 fn hw_accel_none_yields_none() {
2567 assert_eq!(
2568 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2569 None
2570 );
2571 }
2572
2573 #[test]
2574 fn hw_accel_nvdec_yields_cuda() {
2575 assert_eq!(
2576 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2577 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2578 );
2579 }
2580
2581 #[test]
2582 fn hw_accel_qsv_yields_qsv() {
2583 assert_eq!(
2584 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2585 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2586 );
2587 }
2588
2589 #[test]
2590 fn hw_accel_amf_yields_d3d11va() {
2591 assert_eq!(
2592 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2593 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2594 );
2595 }
2596
2597 #[test]
2598 fn hw_accel_videotoolbox() {
2599 assert_eq!(
2600 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2601 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2602 );
2603 }
2604
2605 #[test]
2606 fn hw_accel_vaapi() {
2607 assert_eq!(
2608 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2609 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2610 );
2611 }
2612
2613 #[test]
2618 fn pixel_format_to_av_round_trip_yuv420p() {
2619 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2620 assert_eq!(
2621 VideoDecoderInner::convert_pixel_format(av),
2622 PixelFormat::Yuv420p
2623 );
2624 }
2625
2626 #[test]
2627 fn pixel_format_to_av_round_trip_yuv422p() {
2628 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2629 assert_eq!(
2630 VideoDecoderInner::convert_pixel_format(av),
2631 PixelFormat::Yuv422p
2632 );
2633 }
2634
2635 #[test]
2636 fn pixel_format_to_av_round_trip_yuv444p() {
2637 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2638 assert_eq!(
2639 VideoDecoderInner::convert_pixel_format(av),
2640 PixelFormat::Yuv444p
2641 );
2642 }
2643
2644 #[test]
2645 fn pixel_format_to_av_round_trip_rgb24() {
2646 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2647 assert_eq!(
2648 VideoDecoderInner::convert_pixel_format(av),
2649 PixelFormat::Rgb24
2650 );
2651 }
2652
2653 #[test]
2654 fn pixel_format_to_av_round_trip_bgr24() {
2655 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2656 assert_eq!(
2657 VideoDecoderInner::convert_pixel_format(av),
2658 PixelFormat::Bgr24
2659 );
2660 }
2661
2662 #[test]
2663 fn pixel_format_to_av_round_trip_rgba() {
2664 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2665 assert_eq!(
2666 VideoDecoderInner::convert_pixel_format(av),
2667 PixelFormat::Rgba
2668 );
2669 }
2670
2671 #[test]
2672 fn pixel_format_to_av_round_trip_bgra() {
2673 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2674 assert_eq!(
2675 VideoDecoderInner::convert_pixel_format(av),
2676 PixelFormat::Bgra
2677 );
2678 }
2679
2680 #[test]
2681 fn pixel_format_to_av_round_trip_gray8() {
2682 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2683 assert_eq!(
2684 VideoDecoderInner::convert_pixel_format(av),
2685 PixelFormat::Gray8
2686 );
2687 }
2688
2689 #[test]
2690 fn pixel_format_to_av_round_trip_nv12() {
2691 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2692 assert_eq!(
2693 VideoDecoderInner::convert_pixel_format(av),
2694 PixelFormat::Nv12
2695 );
2696 }
2697
2698 #[test]
2699 fn pixel_format_to_av_round_trip_nv21() {
2700 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2701 assert_eq!(
2702 VideoDecoderInner::convert_pixel_format(av),
2703 PixelFormat::Nv21
2704 );
2705 }
2706
2707 #[test]
2708 fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2709 assert_eq!(
2711 VideoDecoderInner::pixel_format_to_av(PixelFormat::Other(999)),
2712 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2713 );
2714 }
2715
2716 #[test]
2721 fn codec_name_should_return_h264_for_h264_codec_id() {
2722 let name =
2723 unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_H264) };
2724 assert_eq!(name, "h264");
2725 }
2726
2727 #[test]
2728 fn codec_name_should_return_none_for_none_codec_id() {
2729 let name =
2730 unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_NONE) };
2731 assert_eq!(name, "none");
2732 }
2733
2734 #[test]
2735 fn convert_pixel_format_should_map_gbrpf32le() {
2736 assert_eq!(
2737 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE),
2738 PixelFormat::Gbrpf32le
2739 );
2740 }
2741
2742 #[test]
2743 fn unsupported_codec_error_should_include_codec_name() {
2744 let codec_id = ff_sys::AVCodecID_AV_CODEC_ID_H264;
2745 let codec_name = unsafe { VideoDecoderInner::extract_codec_name(codec_id) };
2746 let error = crate::error::DecodeError::UnsupportedCodec {
2747 codec: format!("{codec_name} (codec_id={codec_id:?})"),
2748 };
2749 let msg = error.to_string();
2750 assert!(msg.contains("h264"), "expected codec name in error: {msg}");
2751 assert!(
2752 msg.contains("codec_id="),
2753 "expected codec_id in error: {msg}"
2754 );
2755 }
2756}