1#![allow(unsafe_code)]
8#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::ffi::CStr;
26use std::path::Path;
27use std::ptr;
28use std::sync::Arc;
29use std::time::Duration;
30
31use ff_format::PooledBuffer;
32use ff_format::codec::VideoCodec;
33use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
34use ff_format::container::ContainerInfo;
35use ff_format::time::{Rational, Timestamp};
36use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
37use ff_sys::{
38 AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
39 AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
40 AVPixelFormat, SwsContext,
41};
42
43use crate::HardwareAccel;
44use crate::error::DecodeError;
45use crate::video::builder::OutputScale;
46use ff_common::FramePool;
47
48const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
54
55struct AvFormatContextGuard(*mut AVFormatContext);
57
58impl AvFormatContextGuard {
59 unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
65 let format_ctx = unsafe {
67 ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
68 code: e,
69 message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
70 })?
71 };
72 Ok(Self(format_ctx))
73 }
74
75 const fn as_ptr(&self) -> *mut AVFormatContext {
77 self.0
78 }
79
80 fn into_raw(self) -> *mut AVFormatContext {
82 let ptr = self.0;
83 std::mem::forget(self);
84 ptr
85 }
86
87 unsafe fn new_image_sequence(path: &Path, framerate: u32) -> Result<Self, DecodeError> {
93 let format_ctx = unsafe {
95 ff_sys::avformat::open_input_image_sequence(path, framerate).map_err(|e| {
96 DecodeError::Ffmpeg {
97 code: e,
98 message: format!(
99 "Failed to open image sequence: {}",
100 ff_sys::av_error_string(e)
101 ),
102 }
103 })?
104 };
105 Ok(Self(format_ctx))
106 }
107}
108
109impl Drop for AvFormatContextGuard {
110 fn drop(&mut self) {
111 if !self.0.is_null() {
112 unsafe {
114 ff_sys::avformat::close_input(&mut (self.0 as *mut _));
115 }
116 }
117 }
118}
119
120struct AvCodecContextGuard(*mut AVCodecContext);
122
123impl AvCodecContextGuard {
124 unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
130 let codec_ctx = unsafe {
132 ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
133 code: e,
134 message: format!("Failed to allocate codec context: {e}"),
135 })?
136 };
137 Ok(Self(codec_ctx))
138 }
139
140 const fn as_ptr(&self) -> *mut AVCodecContext {
142 self.0
143 }
144
145 fn into_raw(self) -> *mut AVCodecContext {
147 let ptr = self.0;
148 std::mem::forget(self);
149 ptr
150 }
151}
152
153impl Drop for AvCodecContextGuard {
154 fn drop(&mut self) {
155 if !self.0.is_null() {
156 unsafe {
158 ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
159 }
160 }
161 }
162}
163
164struct AvPacketGuard(*mut AVPacket);
166
167impl AvPacketGuard {
168 unsafe fn new() -> Result<Self, DecodeError> {
174 let packet = unsafe { ff_sys::av_packet_alloc() };
176 if packet.is_null() {
177 return Err(DecodeError::Ffmpeg {
178 code: 0,
179 message: "Failed to allocate packet".to_string(),
180 });
181 }
182 Ok(Self(packet))
183 }
184
185 #[allow(dead_code)]
187 const fn as_ptr(&self) -> *mut AVPacket {
188 self.0
189 }
190
191 fn into_raw(self) -> *mut AVPacket {
193 let ptr = self.0;
194 std::mem::forget(self);
195 ptr
196 }
197}
198
199impl Drop for AvPacketGuard {
200 fn drop(&mut self) {
201 if !self.0.is_null() {
202 unsafe {
204 ff_sys::av_packet_free(&mut (self.0 as *mut _));
205 }
206 }
207 }
208}
209
210struct AvFrameGuard(*mut AVFrame);
212
213impl AvFrameGuard {
214 unsafe fn new() -> Result<Self, DecodeError> {
220 let frame = unsafe { ff_sys::av_frame_alloc() };
222 if frame.is_null() {
223 return Err(DecodeError::Ffmpeg {
224 code: 0,
225 message: "Failed to allocate frame".to_string(),
226 });
227 }
228 Ok(Self(frame))
229 }
230
231 const fn as_ptr(&self) -> *mut AVFrame {
233 self.0
234 }
235
236 fn into_raw(self) -> *mut AVFrame {
238 let ptr = self.0;
239 std::mem::forget(self);
240 ptr
241 }
242}
243
244impl Drop for AvFrameGuard {
245 fn drop(&mut self) {
246 if !self.0.is_null() {
247 unsafe {
249 ff_sys::av_frame_free(&mut (self.0 as *mut _));
250 }
251 }
252 }
253}
254
255pub(crate) struct VideoDecoderInner {
260 format_ctx: *mut AVFormatContext,
262 codec_ctx: *mut AVCodecContext,
264 stream_index: i32,
266 sws_ctx: Option<*mut SwsContext>,
268 sws_cache_key: Option<(u32, u32, i32, u32, u32, i32)>,
270 output_format: Option<PixelFormat>,
272 output_scale: Option<OutputScale>,
274 eof: bool,
276 position: Duration,
278 packet: *mut AVPacket,
280 frame: *mut AVFrame,
282 thumbnail_sws_ctx: Option<*mut SwsContext>,
284 thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
286 hw_device_ctx: Option<*mut AVBufferRef>,
288 active_hw_accel: HardwareAccel,
290 frame_pool: Option<Arc<dyn FramePool>>,
292}
293
294impl VideoDecoderInner {
295 fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
299 match accel {
300 HardwareAccel::Auto => None,
301 HardwareAccel::None => None,
302 HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
303 HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
304 HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), HardwareAccel::VideoToolbox => {
306 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
307 }
308 HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
309 }
310 }
311
312 const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
314 &[
316 HardwareAccel::Nvdec,
317 HardwareAccel::Qsv,
318 HardwareAccel::VideoToolbox,
319 HardwareAccel::Vaapi,
320 HardwareAccel::Amf,
321 ]
322 }
323
324 unsafe fn init_hardware_accel(
340 codec_ctx: *mut AVCodecContext,
341 accel: HardwareAccel,
342 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
343 match accel {
344 HardwareAccel::Auto => {
345 for &hw_type in Self::hw_accel_auto_priority() {
347 if let Ok((Some(ctx), active)) =
349 unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
350 {
351 return Ok((Some(ctx), active));
352 }
353 }
355 Ok((None, HardwareAccel::None))
357 }
358 HardwareAccel::None => {
359 Ok((None, HardwareAccel::None))
361 }
362 _ => {
363 unsafe { Self::try_init_hw_device(codec_ctx, accel) }
366 }
367 }
368 }
369
370 unsafe fn try_init_hw_device(
376 codec_ctx: *mut AVCodecContext,
377 accel: HardwareAccel,
378 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
379 let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
381 return Ok((None, HardwareAccel::None));
382 };
383
384 let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
387 let ret = unsafe {
388 ff_sys::av_hwdevice_ctx_create(
389 ptr::addr_of_mut!(hw_device_ctx),
390 device_type,
391 ptr::null(), ptr::null_mut(), 0, )
395 };
396
397 if ret < 0 {
398 return Err(DecodeError::HwAccelUnavailable { accel });
400 }
401
402 unsafe {
406 (*codec_ctx).hw_device_ctx = hw_device_ctx;
407 }
408
409 let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
412 if our_ref.is_null() {
413 return Err(DecodeError::HwAccelUnavailable { accel });
416 }
417
418 Ok((Some(our_ref), accel))
419 }
420
421 pub(crate) fn hardware_accel(&self) -> HardwareAccel {
423 self.active_hw_accel
424 }
425
426 const fn is_hardware_format(format: AVPixelFormat) -> bool {
430 matches!(
431 format,
432 ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
433 | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
434 | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
435 | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
436 | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
437 | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
438 | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
439 | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
440 | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
441 | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
442 )
443 }
444
445 unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
454 let frame_format = unsafe { (*self.frame).format };
456
457 if !Self::is_hardware_format(frame_format) {
458 return Ok(());
460 }
461
462 let sw_frame = unsafe { ff_sys::av_frame_alloc() };
465 if sw_frame.is_null() {
466 return Err(DecodeError::Ffmpeg {
467 code: 0,
468 message: "Failed to allocate software frame for hardware transfer".to_string(),
469 });
470 }
471
472 let ret = unsafe {
475 ff_sys::av_hwframe_transfer_data(
476 sw_frame, self.frame, 0, )
478 };
479
480 if ret < 0 {
481 unsafe {
483 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
484 }
485 return Err(DecodeError::Ffmpeg {
486 code: ret,
487 message: format!(
488 "Failed to transfer hardware frame to CPU memory: {}",
489 ff_sys::av_error_string(ret)
490 ),
491 });
492 }
493
494 unsafe {
497 (*sw_frame).pts = (*self.frame).pts;
498 (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
499 (*sw_frame).duration = (*self.frame).duration;
500 (*sw_frame).time_base = (*self.frame).time_base;
501 }
502
503 unsafe {
506 ff_sys::av_frame_unref(self.frame);
507 ff_sys::av_frame_move_ref(self.frame, sw_frame);
508 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
509 }
510
511 Ok(())
512 }
513
514 pub(crate) fn new(
531 path: &Path,
532 output_format: Option<PixelFormat>,
533 output_scale: Option<OutputScale>,
534 hardware_accel: HardwareAccel,
535 thread_count: usize,
536 frame_rate: Option<u32>,
537 frame_pool: Option<Arc<dyn FramePool>>,
538 ) -> Result<(Self, VideoStreamInfo, ContainerInfo), DecodeError> {
539 ff_sys::ensure_initialized();
541
542 let is_image_sequence = path.to_str().is_some_and(|s| s.contains('%'));
545 let format_ctx_guard = unsafe {
547 if is_image_sequence {
548 let fps = frame_rate.unwrap_or(25);
549 AvFormatContextGuard::new_image_sequence(path, fps)?
550 } else {
551 AvFormatContextGuard::new(path)?
552 }
553 };
554 let format_ctx = format_ctx_guard.as_ptr();
555
556 unsafe {
559 ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
560 code: e,
561 message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
562 })?;
563 }
564
565 let (stream_index, codec_id) =
568 unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
569 DecodeError::NoVideoStream {
570 path: path.to_path_buf(),
571 }
572 })?;
573
574 let codec_name = unsafe { Self::extract_codec_name(codec_id) };
577 let codec = unsafe {
578 ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
579 if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_EXR {
582 DecodeError::DecoderUnavailable {
583 codec: "exr".to_string(),
584 hint: "Requires FFmpeg built with EXR support \
585 (--enable-decoder=exr)"
586 .to_string(),
587 }
588 } else {
589 DecodeError::UnsupportedCodec {
590 codec: format!("{codec_name} (codec_id={codec_id:?})"),
591 }
592 }
593 })?
594 };
595
596 let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
599 let codec_ctx = codec_ctx_guard.as_ptr();
600
601 unsafe {
604 let stream = (*format_ctx).streams.add(stream_index as usize);
605 let codecpar = (*(*stream)).codecpar;
606 ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
607 DecodeError::Ffmpeg {
608 code: e,
609 message: format!(
610 "Failed to copy codec parameters: {}",
611 ff_sys::av_error_string(e)
612 ),
613 }
614 })?;
615
616 if thread_count > 0 {
618 (*codec_ctx).thread_count = thread_count as i32;
619 }
620 }
621
622 let (hw_device_ctx, active_hw_accel) =
625 unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
626
627 unsafe {
630 ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
631 if let Some(hw_ctx) = hw_device_ctx {
636 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
637 }
638 DecodeError::Ffmpeg {
639 code: e,
640 message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
641 }
642 })?;
643 }
644
645 let stream_info =
648 unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
649
650 let container_info = unsafe { Self::extract_container_info(format_ctx) };
653
654 let packet_guard = unsafe { AvPacketGuard::new()? };
657 let frame_guard = unsafe { AvFrameGuard::new()? };
658
659 Ok((
661 Self {
662 format_ctx: format_ctx_guard.into_raw(),
663 codec_ctx: codec_ctx_guard.into_raw(),
664 stream_index: stream_index as i32,
665 sws_ctx: None,
666 sws_cache_key: None,
667 output_format,
668 output_scale,
669 eof: false,
670 position: Duration::ZERO,
671 packet: packet_guard.into_raw(),
672 frame: frame_guard.into_raw(),
673 thumbnail_sws_ctx: None,
674 thumbnail_cache_key: None,
675 hw_device_ctx,
676 active_hw_accel,
677 frame_pool,
678 },
679 stream_info,
680 container_info,
681 ))
682 }
683
684 unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
694 unsafe {
696 let nb_streams = (*format_ctx).nb_streams as usize;
697
698 for i in 0..nb_streams {
699 let stream = (*format_ctx).streams.add(i);
700 let codecpar = (*(*stream)).codecpar;
701
702 if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
703 return Some((i, (*codecpar).codec_id));
704 }
705 }
706
707 None
708 }
709 }
710
711 unsafe fn extract_codec_name(codec_id: ff_sys::AVCodecID) -> String {
713 let name_ptr = unsafe { ff_sys::avcodec_get_name(codec_id) };
715 if name_ptr.is_null() {
716 return String::from("unknown");
717 }
718 unsafe { CStr::from_ptr(name_ptr).to_string_lossy().into_owned() }
720 }
721
722 unsafe fn extract_stream_info(
724 format_ctx: *mut AVFormatContext,
725 stream_index: i32,
726 codec_ctx: *mut AVCodecContext,
727 ) -> Result<VideoStreamInfo, DecodeError> {
728 let (
730 width,
731 height,
732 fps_rational,
733 duration_val,
734 pix_fmt,
735 color_space_val,
736 color_range_val,
737 color_primaries_val,
738 codec_id,
739 ) = unsafe {
740 let stream = (*format_ctx).streams.add(stream_index as usize);
741 let codecpar = (*(*stream)).codecpar;
742
743 (
744 (*codecpar).width as u32,
745 (*codecpar).height as u32,
746 (*(*stream)).avg_frame_rate,
747 (*format_ctx).duration,
748 (*codec_ctx).pix_fmt,
749 (*codecpar).color_space,
750 (*codecpar).color_range,
751 (*codecpar).color_primaries,
752 (*codecpar).codec_id,
753 )
754 };
755
756 let frame_rate = if fps_rational.den != 0 {
758 Rational::new(fps_rational.num as i32, fps_rational.den as i32)
759 } else {
760 log::warn!(
761 "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
762 fps_rational.num
763 );
764 Rational::new(30, 1)
765 };
766
767 let duration = if duration_val > 0 {
769 let duration_secs = duration_val as f64 / 1_000_000.0;
770 Some(Duration::from_secs_f64(duration_secs))
771 } else {
772 None
773 };
774
775 let pixel_format = Self::convert_pixel_format(pix_fmt);
777
778 let color_space = Self::convert_color_space(color_space_val);
780 let color_range = Self::convert_color_range(color_range_val);
781 let color_primaries = Self::convert_color_primaries(color_primaries_val);
782
783 let codec = Self::convert_codec(codec_id);
785 let codec_name = unsafe { Self::extract_codec_name(codec_id) };
786
787 let mut builder = VideoStreamInfo::builder()
789 .index(stream_index as u32)
790 .codec(codec)
791 .codec_name(codec_name)
792 .width(width)
793 .height(height)
794 .frame_rate(frame_rate)
795 .pixel_format(pixel_format)
796 .color_space(color_space)
797 .color_range(color_range)
798 .color_primaries(color_primaries);
799
800 if let Some(d) = duration {
801 builder = builder.duration(d);
802 }
803
804 Ok(builder.build())
805 }
806
807 unsafe fn extract_container_info(format_ctx: *mut AVFormatContext) -> ContainerInfo {
813 unsafe {
815 let format_name = if (*format_ctx).iformat.is_null() {
816 String::new()
817 } else {
818 let ptr = (*(*format_ctx).iformat).name;
819 if ptr.is_null() {
820 String::new()
821 } else {
822 CStr::from_ptr(ptr).to_string_lossy().into_owned()
823 }
824 };
825
826 let bit_rate = {
827 let br = (*format_ctx).bit_rate;
828 if br > 0 { Some(br as u64) } else { None }
829 };
830
831 let nb_streams = (*format_ctx).nb_streams as u32;
832
833 let mut builder = ContainerInfo::builder()
834 .format_name(format_name)
835 .nb_streams(nb_streams);
836 if let Some(br) = bit_rate {
837 builder = builder.bit_rate(br);
838 }
839 builder.build()
840 }
841 }
842
843 fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
845 if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
846 PixelFormat::Yuv420p
847 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
848 PixelFormat::Yuv422p
849 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
850 PixelFormat::Yuv444p
851 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
852 PixelFormat::Rgb24
853 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
854 PixelFormat::Bgr24
855 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
856 PixelFormat::Rgba
857 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
858 PixelFormat::Bgra
859 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
860 PixelFormat::Gray8
861 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
862 PixelFormat::Nv12
863 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
864 PixelFormat::Nv21
865 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE {
866 PixelFormat::Yuv420p10le
867 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE {
868 PixelFormat::Yuv422p10le
869 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE {
870 PixelFormat::Yuv444p10le
871 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE {
872 PixelFormat::P010le
873 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE {
874 PixelFormat::Gbrpf32le
875 } else {
876 log::warn!(
877 "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
878 );
879 PixelFormat::Yuv420p
880 }
881 }
882
883 fn convert_color_space(space: AVColorSpace) -> ColorSpace {
885 if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
886 ColorSpace::Bt709
887 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
888 || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
889 {
890 ColorSpace::Bt601
891 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
892 ColorSpace::Bt2020
893 } else {
894 log::warn!(
895 "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
896 );
897 ColorSpace::Bt709
898 }
899 }
900
901 fn convert_color_range(range: AVColorRange) -> ColorRange {
903 if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
904 ColorRange::Full
905 } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
906 ColorRange::Limited
907 } else {
908 log::warn!(
909 "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
910 );
911 ColorRange::Limited
912 }
913 }
914
915 fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
917 if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
918 ColorPrimaries::Bt709
919 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
920 || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
921 {
922 ColorPrimaries::Bt601
923 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
924 ColorPrimaries::Bt2020
925 } else {
926 log::warn!(
927 "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
928 );
929 ColorPrimaries::Bt709
930 }
931 }
932
933 fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
935 if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
936 VideoCodec::H264
937 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
938 VideoCodec::H265
939 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
940 VideoCodec::Vp8
941 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
942 VideoCodec::Vp9
943 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
944 VideoCodec::Av1
945 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
946 VideoCodec::Mpeg4
947 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
948 VideoCodec::ProRes
949 } else {
950 log::warn!(
951 "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
952 );
953 VideoCodec::H264
954 }
955 }
956
957 pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
965 if self.eof {
966 return Ok(None);
967 }
968
969 unsafe {
970 loop {
971 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
973
974 if ret == 0 {
975 self.transfer_hardware_frame_if_needed()?;
978
979 let video_frame = self.convert_frame_to_video_frame()?;
980
981 let pts = (*self.frame).pts;
983 if pts != ff_sys::AV_NOPTS_VALUE {
984 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
985 let time_base = (*(*stream)).time_base;
986 let timestamp_secs =
987 pts as f64 * time_base.num as f64 / time_base.den as f64;
988 self.position = Duration::from_secs_f64(timestamp_secs);
989 }
990
991 return Ok(Some(video_frame));
992 } else if ret == ff_sys::error_codes::EAGAIN {
993 let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
996
997 if read_ret == ff_sys::error_codes::EOF {
998 ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
1000 self.eof = true;
1001 continue;
1002 } else if read_ret < 0 {
1003 return Err(DecodeError::Ffmpeg {
1004 code: read_ret,
1005 message: format!(
1006 "Failed to read frame: {}",
1007 ff_sys::av_error_string(read_ret)
1008 ),
1009 });
1010 }
1011
1012 if (*self.packet).stream_index == self.stream_index {
1014 let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
1016 ff_sys::av_packet_unref(self.packet);
1017
1018 if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
1019 return Err(DecodeError::Ffmpeg {
1020 code: send_ret,
1021 message: format!(
1022 "Failed to send packet: {}",
1023 ff_sys::av_error_string(send_ret)
1024 ),
1025 });
1026 }
1027 } else {
1028 ff_sys::av_packet_unref(self.packet);
1030 }
1031 } else if ret == ff_sys::error_codes::EOF {
1032 self.eof = true;
1034 return Ok(None);
1035 } else {
1036 return Err(DecodeError::DecodingFailed {
1037 timestamp: Some(self.position),
1038 reason: ff_sys::av_error_string(ret),
1039 });
1040 }
1041 }
1042 }
1043 }
1044
1045 unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
1047 unsafe {
1049 let src_width = (*self.frame).width as u32;
1050 let src_height = (*self.frame).height as u32;
1051 let src_format = (*self.frame).format;
1052
1053 let dst_format = if let Some(fmt) = self.output_format {
1055 Self::pixel_format_to_av(fmt)
1056 } else {
1057 src_format
1058 };
1059
1060 let (dst_width, dst_height) = self.resolve_output_dims(src_width, src_height);
1062
1063 let needs_conversion =
1065 src_format != dst_format || dst_width != src_width || dst_height != src_height;
1066
1067 if needs_conversion {
1068 self.convert_with_sws(
1069 src_width, src_height, src_format, dst_width, dst_height, dst_format,
1070 )
1071 } else {
1072 self.av_frame_to_video_frame(self.frame)
1073 }
1074 }
1075 }
1076
1077 fn resolve_output_dims(&self, src_width: u32, src_height: u32) -> (u32, u32) {
1082 let round_even = |n: u32| (n + 1) & !1;
1083
1084 match self.output_scale {
1085 None => (src_width, src_height),
1086 Some(OutputScale::Exact { width, height }) => (round_even(width), round_even(height)),
1087 Some(OutputScale::FitWidth(target_w)) => {
1088 let target_w = round_even(target_w);
1089 if src_width == 0 {
1090 return (target_w, target_w);
1091 }
1092 let h = (target_w as u64 * src_height as u64 / src_width as u64) as u32;
1093 (target_w, round_even(h.max(2)))
1094 }
1095 Some(OutputScale::FitHeight(target_h)) => {
1096 let target_h = round_even(target_h);
1097 if src_height == 0 {
1098 return (target_h, target_h);
1099 }
1100 let w = (target_h as u64 * src_width as u64 / src_height as u64) as u32;
1101 (round_even(w.max(2)), target_h)
1102 }
1103 }
1104 }
1105
1106 unsafe fn convert_with_sws(
1111 &mut self,
1112 src_width: u32,
1113 src_height: u32,
1114 src_format: i32,
1115 dst_width: u32,
1116 dst_height: u32,
1117 dst_format: i32,
1118 ) -> Result<VideoFrame, DecodeError> {
1119 unsafe {
1121 let cache_key = (
1123 src_width, src_height, src_format, dst_width, dst_height, dst_format,
1124 );
1125 if self.sws_cache_key != Some(cache_key) {
1126 if let Some(old_ctx) = self.sws_ctx.take() {
1128 ff_sys::swscale::free_context(old_ctx);
1129 }
1130
1131 let ctx = ff_sys::swscale::get_context(
1132 src_width as i32,
1133 src_height as i32,
1134 src_format,
1135 dst_width as i32,
1136 dst_height as i32,
1137 dst_format,
1138 ff_sys::swscale::scale_flags::BILINEAR,
1139 )
1140 .map_err(|e| DecodeError::Ffmpeg {
1141 code: 0,
1142 message: format!("Failed to create sws context: {e}"),
1143 })?;
1144
1145 self.sws_ctx = Some(ctx);
1146 self.sws_cache_key = Some(cache_key);
1147 }
1148
1149 let Some(sws_ctx) = self.sws_ctx else {
1150 return Err(DecodeError::Ffmpeg {
1151 code: 0,
1152 message: "SwsContext not initialized".to_string(),
1153 });
1154 };
1155
1156 let dst_frame_guard = AvFrameGuard::new()?;
1158 let dst_frame = dst_frame_guard.as_ptr();
1159
1160 (*dst_frame).width = dst_width as i32;
1161 (*dst_frame).height = dst_height as i32;
1162 (*dst_frame).format = dst_format;
1163
1164 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1166 if buffer_ret < 0 {
1167 return Err(DecodeError::Ffmpeg {
1168 code: buffer_ret,
1169 message: format!(
1170 "Failed to allocate frame buffer: {}",
1171 ff_sys::av_error_string(buffer_ret)
1172 ),
1173 });
1174 }
1175
1176 ff_sys::swscale::scale(
1178 sws_ctx,
1179 (*self.frame).data.as_ptr() as *const *const u8,
1180 (*self.frame).linesize.as_ptr(),
1181 0,
1182 src_height as i32,
1183 (*dst_frame).data.as_ptr() as *const *mut u8,
1184 (*dst_frame).linesize.as_ptr(),
1185 )
1186 .map_err(|e| DecodeError::Ffmpeg {
1187 code: 0,
1188 message: format!("Failed to scale frame: {e}"),
1189 })?;
1190
1191 (*dst_frame).pts = (*self.frame).pts;
1193
1194 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1196
1197 Ok(video_frame)
1200 }
1201 }
1202
1203 unsafe fn av_frame_to_video_frame(
1205 &self,
1206 frame: *const AVFrame,
1207 ) -> Result<VideoFrame, DecodeError> {
1208 unsafe {
1210 let width = (*frame).width as u32;
1211 let height = (*frame).height as u32;
1212 let format = Self::convert_pixel_format((*frame).format);
1213
1214 let pts = (*frame).pts;
1216 let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1217 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1218 let time_base = (*(*stream)).time_base;
1219 Timestamp::new(
1220 pts as i64,
1221 Rational::new(time_base.num as i32, time_base.den as i32),
1222 )
1223 } else {
1224 Timestamp::default()
1225 };
1226
1227 let (planes, strides) =
1229 self.extract_planes_and_strides(frame, width, height, format)?;
1230
1231 VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1232 DecodeError::Ffmpeg {
1233 code: 0,
1234 message: format!("Failed to create VideoFrame: {e}"),
1235 }
1236 })
1237 }
1238 }
1239
1240 fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1250 if let Some(ref pool) = self.frame_pool {
1251 if let Some(pooled_buffer) = pool.acquire(size) {
1252 return pooled_buffer;
1253 }
1254 return PooledBuffer::new(vec![0u8; size], Arc::downgrade(pool));
1259 }
1260 PooledBuffer::standalone(vec![0u8; size])
1261 }
1262
1263 unsafe fn extract_planes_and_strides(
1265 &self,
1266 frame: *const AVFrame,
1267 width: u32,
1268 height: u32,
1269 format: PixelFormat,
1270 ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1271 const BYTES_PER_PIXEL_RGBA: usize = 4;
1273 const BYTES_PER_PIXEL_RGB24: usize = 3;
1274
1275 unsafe {
1277 let mut planes = Vec::new();
1278 let mut strides = Vec::new();
1279
1280 #[allow(clippy::match_same_arms)]
1281 match format {
1282 PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1283 let stride = (*frame).linesize[0] as usize;
1285 let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1286 {
1287 BYTES_PER_PIXEL_RGBA
1288 } else {
1289 BYTES_PER_PIXEL_RGB24
1290 };
1291 let row_size = (width as usize) * bytes_per_pixel;
1292 let buffer_size = row_size * height as usize;
1293 let mut plane_data = self.allocate_buffer(buffer_size);
1294
1295 for y in 0..height as usize {
1296 let src_offset = y * stride;
1297 let dst_offset = y * row_size;
1298 let src_ptr = (*frame).data[0].add(src_offset);
1299 let plane_slice = plane_data.as_mut();
1300 std::ptr::copy_nonoverlapping(
1304 src_ptr,
1305 plane_slice[dst_offset..].as_mut_ptr(),
1306 row_size,
1307 );
1308 }
1309
1310 planes.push(plane_data);
1311 strides.push(row_size);
1312 }
1313 PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1314 let (chroma_width, chroma_height) = match format {
1316 PixelFormat::Yuv420p => (width / 2, height / 2),
1317 PixelFormat::Yuv422p => (width / 2, height),
1318 PixelFormat::Yuv444p => (width, height),
1319 _ => unreachable!(),
1320 };
1321
1322 let y_stride = width as usize;
1324 let y_size = y_stride * height as usize;
1325 let mut y_data = self.allocate_buffer(y_size);
1326 for y in 0..height as usize {
1327 let src_offset = y * (*frame).linesize[0] as usize;
1328 let dst_offset = y * y_stride;
1329 let src_ptr = (*frame).data[0].add(src_offset);
1330 let y_slice = y_data.as_mut();
1331 std::ptr::copy_nonoverlapping(
1334 src_ptr,
1335 y_slice[dst_offset..].as_mut_ptr(),
1336 width as usize,
1337 );
1338 }
1339 planes.push(y_data);
1340 strides.push(y_stride);
1341
1342 let u_stride = chroma_width as usize;
1344 let u_size = u_stride * chroma_height as usize;
1345 let mut u_data = self.allocate_buffer(u_size);
1346 for y in 0..chroma_height as usize {
1347 let src_offset = y * (*frame).linesize[1] as usize;
1348 let dst_offset = y * u_stride;
1349 let src_ptr = (*frame).data[1].add(src_offset);
1350 let u_slice = u_data.as_mut();
1351 std::ptr::copy_nonoverlapping(
1354 src_ptr,
1355 u_slice[dst_offset..].as_mut_ptr(),
1356 chroma_width as usize,
1357 );
1358 }
1359 planes.push(u_data);
1360 strides.push(u_stride);
1361
1362 let v_stride = chroma_width as usize;
1364 let v_size = v_stride * chroma_height as usize;
1365 let mut v_data = self.allocate_buffer(v_size);
1366 for y in 0..chroma_height as usize {
1367 let src_offset = y * (*frame).linesize[2] as usize;
1368 let dst_offset = y * v_stride;
1369 let src_ptr = (*frame).data[2].add(src_offset);
1370 let v_slice = v_data.as_mut();
1371 std::ptr::copy_nonoverlapping(
1374 src_ptr,
1375 v_slice[dst_offset..].as_mut_ptr(),
1376 chroma_width as usize,
1377 );
1378 }
1379 planes.push(v_data);
1380 strides.push(v_stride);
1381 }
1382 PixelFormat::Gray8 => {
1383 let stride = width as usize;
1385 let mut plane_data = self.allocate_buffer(stride * height as usize);
1386
1387 for y in 0..height as usize {
1388 let src_offset = y * (*frame).linesize[0] as usize;
1389 let dst_offset = y * stride;
1390 let src_ptr = (*frame).data[0].add(src_offset);
1391 let plane_slice = plane_data.as_mut();
1392 std::ptr::copy_nonoverlapping(
1395 src_ptr,
1396 plane_slice[dst_offset..].as_mut_ptr(),
1397 width as usize,
1398 );
1399 }
1400
1401 planes.push(plane_data);
1402 strides.push(stride);
1403 }
1404 PixelFormat::Nv12 | PixelFormat::Nv21 => {
1405 let uv_height = height / 2;
1407
1408 let y_stride = width as usize;
1410 let mut y_data = self.allocate_buffer(y_stride * height as usize);
1411 for y in 0..height as usize {
1412 let src_offset = y * (*frame).linesize[0] as usize;
1413 let dst_offset = y * y_stride;
1414 let src_ptr = (*frame).data[0].add(src_offset);
1415 let y_slice = y_data.as_mut();
1416 std::ptr::copy_nonoverlapping(
1419 src_ptr,
1420 y_slice[dst_offset..].as_mut_ptr(),
1421 width as usize,
1422 );
1423 }
1424 planes.push(y_data);
1425 strides.push(y_stride);
1426
1427 let uv_stride = width as usize;
1429 let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1430 for y in 0..uv_height as usize {
1431 let src_offset = y * (*frame).linesize[1] as usize;
1432 let dst_offset = y * uv_stride;
1433 let src_ptr = (*frame).data[1].add(src_offset);
1434 let uv_slice = uv_data.as_mut();
1435 std::ptr::copy_nonoverlapping(
1438 src_ptr,
1439 uv_slice[dst_offset..].as_mut_ptr(),
1440 width as usize,
1441 );
1442 }
1443 planes.push(uv_data);
1444 strides.push(uv_stride);
1445 }
1446 PixelFormat::Gbrpf32le => {
1447 const BYTES_PER_SAMPLE: usize = 4;
1449 let row_size = width as usize * BYTES_PER_SAMPLE;
1450 let size = row_size * height as usize;
1451
1452 for plane_idx in 0..3usize {
1453 let src_linesize = (*frame).linesize[plane_idx] as usize;
1454 let mut plane_data = self.allocate_buffer(size);
1455 for y in 0..height as usize {
1456 let src_offset = y * src_linesize;
1457 let dst_offset = y * row_size;
1458 let src_ptr = (*frame).data[plane_idx].add(src_offset);
1459 let dst_slice = plane_data.as_mut();
1460 std::ptr::copy_nonoverlapping(
1463 src_ptr,
1464 dst_slice[dst_offset..].as_mut_ptr(),
1465 row_size,
1466 );
1467 }
1468 planes.push(plane_data);
1469 strides.push(row_size);
1470 }
1471 }
1472 _ => {
1473 return Err(DecodeError::Ffmpeg {
1474 code: 0,
1475 message: format!("Unsupported pixel format: {format:?}"),
1476 });
1477 }
1478 }
1479
1480 Ok((planes, strides))
1481 }
1482 }
1483
1484 fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1486 match format {
1487 PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1488 PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1489 PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1490 PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1491 PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1492 PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1493 PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1494 PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1495 PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1496 PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1497 PixelFormat::Yuv420p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE,
1498 PixelFormat::Yuv422p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE,
1499 PixelFormat::Yuv444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE,
1500 PixelFormat::Yuva444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUVA444P10LE,
1501 PixelFormat::P010le => ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE,
1502 PixelFormat::Gbrpf32le => ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE,
1503 _ => {
1504 log::warn!(
1505 "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1506 );
1507 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1508 }
1509 }
1510 }
1511
1512 pub(crate) fn position(&self) -> Duration {
1514 self.position
1515 }
1516
1517 pub(crate) fn is_eof(&self) -> bool {
1519 self.eof
1520 }
1521
1522 fn duration_to_pts(&self, duration: Duration) -> i64 {
1536 let time_base = unsafe {
1542 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1543 (*(*stream)).time_base
1544 };
1545
1546 let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1548 (duration.as_secs_f64() * time_base_f64) as i64
1549 }
1550
1551 #[allow(dead_code)]
1569 fn pts_to_duration(&self, pts: i64) -> Duration {
1570 unsafe {
1572 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1573 let time_base = (*(*stream)).time_base;
1574
1575 let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1577 Duration::from_secs_f64(duration_secs)
1578 }
1579 }
1580
1581 pub(crate) fn seek(
1605 &mut self,
1606 position: Duration,
1607 mode: crate::SeekMode,
1608 ) -> Result<(), DecodeError> {
1609 use crate::SeekMode;
1610
1611 let timestamp = self.duration_to_pts(position);
1612
1613 let flags = ff_sys::avformat::seek_flags::BACKWARD;
1616
1617 unsafe {
1622 ff_sys::av_packet_unref(self.packet);
1623 ff_sys::av_frame_unref(self.frame);
1624 }
1625
1626 unsafe {
1633 ff_sys::avformat::seek_frame(
1634 self.format_ctx,
1635 self.stream_index as i32,
1636 timestamp,
1637 flags,
1638 )
1639 .map_err(|e| DecodeError::SeekFailed {
1640 target: position,
1641 reason: ff_sys::av_error_string(e),
1642 })?;
1643 }
1644
1645 unsafe {
1648 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1649 }
1650
1651 unsafe {
1657 loop {
1658 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1659 if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1660 break;
1662 } else if ret == 0 {
1663 ff_sys::av_frame_unref(self.frame);
1665 } else {
1666 break;
1668 }
1669 }
1670 }
1671
1672 self.eof = false;
1674 if mode == SeekMode::Exact {
1692 self.skip_to_exact(position)?;
1694 } else {
1695 let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1698 let min_position = position.saturating_sub(tolerance);
1699
1700 while let Some(frame) = self.decode_one()? {
1701 let frame_time = frame.timestamp().as_duration();
1702 if frame_time >= min_position {
1703 break;
1705 }
1706 }
1708 }
1709
1710 Ok(())
1711 }
1712
1713 fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1734 loop {
1735 match self.decode_one()? {
1736 Some(frame) => {
1737 let frame_time = frame.timestamp().as_duration();
1738 if frame_time >= target {
1739 break;
1742 }
1743 }
1745 None => {
1746 return Err(DecodeError::SeekFailed {
1748 target,
1749 reason: "Reached end of stream before target position".to_string(),
1750 });
1751 }
1752 }
1753 }
1754 Ok(())
1755 }
1756
1757 pub(crate) fn flush(&mut self) {
1762 unsafe {
1764 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1765 }
1766 self.eof = false;
1767 }
1768
1769 pub(crate) fn scale_frame(
1808 &mut self,
1809 frame: &VideoFrame,
1810 target_width: u32,
1811 target_height: u32,
1812 ) -> Result<VideoFrame, DecodeError> {
1813 let src_width = frame.width();
1814 let src_height = frame.height();
1815 let src_format = frame.format();
1816
1817 let src_aspect = src_width as f64 / src_height as f64;
1819 let target_aspect = target_width as f64 / target_height as f64;
1820
1821 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1822 let height = (target_width as f64 / src_aspect).round() as u32;
1824 (target_width, height)
1825 } else {
1826 let width = (target_height as f64 * src_aspect).round() as u32;
1828 (width, target_height)
1829 };
1830
1831 let av_format = Self::pixel_format_to_av(src_format);
1833
1834 let cache_key = (
1836 src_width,
1837 src_height,
1838 scaled_width,
1839 scaled_height,
1840 av_format,
1841 );
1842
1843 unsafe {
1845 let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1847 (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1848 {
1849 if cached_key == cache_key {
1850 (cached_ctx, true)
1852 } else {
1853 ff_sys::swscale::free_context(cached_ctx);
1855 self.thumbnail_sws_ctx = None;
1857 self.thumbnail_cache_key = None;
1858
1859 let new_ctx = ff_sys::swscale::get_context(
1860 src_width as i32,
1861 src_height as i32,
1862 av_format,
1863 scaled_width as i32,
1864 scaled_height as i32,
1865 av_format,
1866 ff_sys::swscale::scale_flags::BILINEAR,
1867 )
1868 .map_err(|e| DecodeError::Ffmpeg {
1869 code: 0,
1870 message: format!("Failed to create scaling context: {e}"),
1871 })?;
1872
1873 (new_ctx, false)
1875 }
1876 } else {
1877 let new_ctx = ff_sys::swscale::get_context(
1879 src_width as i32,
1880 src_height as i32,
1881 av_format,
1882 scaled_width as i32,
1883 scaled_height as i32,
1884 av_format,
1885 ff_sys::swscale::scale_flags::BILINEAR,
1886 )
1887 .map_err(|e| DecodeError::Ffmpeg {
1888 code: 0,
1889 message: format!("Failed to create scaling context: {e}"),
1890 })?;
1891
1892 (new_ctx, false)
1894 };
1895
1896 let src_frame_guard = AvFrameGuard::new()?;
1898 let src_frame = src_frame_guard.as_ptr();
1899
1900 (*src_frame).width = src_width as i32;
1901 (*src_frame).height = src_height as i32;
1902 (*src_frame).format = av_format;
1903
1904 let planes = frame.planes();
1906 let strides = frame.strides();
1907
1908 for (i, plane_data) in planes.iter().enumerate() {
1909 if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1910 break;
1911 }
1912 (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1913 (*src_frame).linesize[i] = strides[i] as i32;
1914 }
1915
1916 let dst_frame_guard = AvFrameGuard::new()?;
1918 let dst_frame = dst_frame_guard.as_ptr();
1919
1920 (*dst_frame).width = scaled_width as i32;
1921 (*dst_frame).height = scaled_height as i32;
1922 (*dst_frame).format = av_format;
1923
1924 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1926 if buffer_ret < 0 {
1927 if !is_cached {
1929 ff_sys::swscale::free_context(sws_ctx);
1930 }
1931 return Err(DecodeError::Ffmpeg {
1932 code: buffer_ret,
1933 message: format!(
1934 "Failed to allocate destination frame buffer: {}",
1935 ff_sys::av_error_string(buffer_ret)
1936 ),
1937 });
1938 }
1939
1940 let scale_result = ff_sys::swscale::scale(
1942 sws_ctx,
1943 (*src_frame).data.as_ptr() as *const *const u8,
1944 (*src_frame).linesize.as_ptr(),
1945 0,
1946 src_height as i32,
1947 (*dst_frame).data.as_ptr() as *const *mut u8,
1948 (*dst_frame).linesize.as_ptr(),
1949 );
1950
1951 if let Err(e) = scale_result {
1952 if !is_cached {
1954 ff_sys::swscale::free_context(sws_ctx);
1955 }
1956 return Err(DecodeError::Ffmpeg {
1957 code: 0,
1958 message: format!("Failed to scale frame: {e}"),
1959 });
1960 }
1961
1962 if !is_cached {
1964 self.thumbnail_sws_ctx = Some(sws_ctx);
1965 self.thumbnail_cache_key = Some(cache_key);
1966 }
1967
1968 (*dst_frame).pts = frame.timestamp().pts();
1970
1971 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1973
1974 Ok(video_frame)
1975 }
1976 }
1977}
1978
1979impl Drop for VideoDecoderInner {
1980 fn drop(&mut self) {
1981 if let Some(sws_ctx) = self.sws_ctx {
1983 unsafe {
1985 ff_sys::swscale::free_context(sws_ctx);
1986 }
1987 }
1988
1989 if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1991 unsafe {
1993 ff_sys::swscale::free_context(thumbnail_ctx);
1994 }
1995 }
1996
1997 if let Some(hw_ctx) = self.hw_device_ctx {
1999 unsafe {
2001 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
2002 }
2003 }
2004
2005 if !self.frame.is_null() {
2007 unsafe {
2009 ff_sys::av_frame_free(&mut (self.frame as *mut _));
2010 }
2011 }
2012
2013 if !self.packet.is_null() {
2014 unsafe {
2016 ff_sys::av_packet_free(&mut (self.packet as *mut _));
2017 }
2018 }
2019
2020 if !self.codec_ctx.is_null() {
2022 unsafe {
2024 ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
2025 }
2026 }
2027
2028 if !self.format_ctx.is_null() {
2030 unsafe {
2032 ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
2033 }
2034 }
2035 }
2036}
2037
2038unsafe impl Send for VideoDecoderInner {}
2041
2042#[cfg(test)]
2043mod tests {
2044 use ff_format::PixelFormat;
2045 use ff_format::codec::VideoCodec;
2046 use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
2047
2048 use crate::HardwareAccel;
2049
2050 use super::VideoDecoderInner;
2051
2052 #[test]
2057 fn pixel_format_yuv420p() {
2058 assert_eq!(
2059 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
2060 PixelFormat::Yuv420p
2061 );
2062 }
2063
2064 #[test]
2065 fn pixel_format_yuv422p() {
2066 assert_eq!(
2067 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
2068 PixelFormat::Yuv422p
2069 );
2070 }
2071
2072 #[test]
2073 fn pixel_format_yuv444p() {
2074 assert_eq!(
2075 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
2076 PixelFormat::Yuv444p
2077 );
2078 }
2079
2080 #[test]
2081 fn pixel_format_rgb24() {
2082 assert_eq!(
2083 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
2084 PixelFormat::Rgb24
2085 );
2086 }
2087
2088 #[test]
2089 fn pixel_format_bgr24() {
2090 assert_eq!(
2091 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
2092 PixelFormat::Bgr24
2093 );
2094 }
2095
2096 #[test]
2097 fn pixel_format_rgba() {
2098 assert_eq!(
2099 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
2100 PixelFormat::Rgba
2101 );
2102 }
2103
2104 #[test]
2105 fn pixel_format_bgra() {
2106 assert_eq!(
2107 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
2108 PixelFormat::Bgra
2109 );
2110 }
2111
2112 #[test]
2113 fn pixel_format_gray8() {
2114 assert_eq!(
2115 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
2116 PixelFormat::Gray8
2117 );
2118 }
2119
2120 #[test]
2121 fn pixel_format_nv12() {
2122 assert_eq!(
2123 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
2124 PixelFormat::Nv12
2125 );
2126 }
2127
2128 #[test]
2129 fn pixel_format_nv21() {
2130 assert_eq!(
2131 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
2132 PixelFormat::Nv21
2133 );
2134 }
2135
2136 #[test]
2137 fn pixel_format_yuv420p10le_should_return_yuv420p10le() {
2138 assert_eq!(
2139 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE),
2140 PixelFormat::Yuv420p10le
2141 );
2142 }
2143
2144 #[test]
2145 fn pixel_format_yuv422p10le_should_return_yuv422p10le() {
2146 assert_eq!(
2147 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE),
2148 PixelFormat::Yuv422p10le
2149 );
2150 }
2151
2152 #[test]
2153 fn pixel_format_yuv444p10le_should_return_yuv444p10le() {
2154 assert_eq!(
2155 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE),
2156 PixelFormat::Yuv444p10le
2157 );
2158 }
2159
2160 #[test]
2161 fn pixel_format_p010le_should_return_p010le() {
2162 assert_eq!(
2163 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE),
2164 PixelFormat::P010le
2165 );
2166 }
2167
2168 #[test]
2169 fn pixel_format_unknown_falls_back_to_yuv420p() {
2170 assert_eq!(
2171 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
2172 PixelFormat::Yuv420p
2173 );
2174 }
2175
2176 #[test]
2181 fn color_space_bt709() {
2182 assert_eq!(
2183 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
2184 ColorSpace::Bt709
2185 );
2186 }
2187
2188 #[test]
2189 fn color_space_bt470bg_yields_bt601() {
2190 assert_eq!(
2191 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
2192 ColorSpace::Bt601
2193 );
2194 }
2195
2196 #[test]
2197 fn color_space_smpte170m_yields_bt601() {
2198 assert_eq!(
2199 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
2200 ColorSpace::Bt601
2201 );
2202 }
2203
2204 #[test]
2205 fn color_space_bt2020_ncl() {
2206 assert_eq!(
2207 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
2208 ColorSpace::Bt2020
2209 );
2210 }
2211
2212 #[test]
2213 fn color_space_unknown_falls_back_to_bt709() {
2214 assert_eq!(
2215 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
2216 ColorSpace::Bt709
2217 );
2218 }
2219
2220 #[test]
2225 fn color_range_jpeg_yields_full() {
2226 assert_eq!(
2227 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
2228 ColorRange::Full
2229 );
2230 }
2231
2232 #[test]
2233 fn color_range_mpeg_yields_limited() {
2234 assert_eq!(
2235 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2236 ColorRange::Limited
2237 );
2238 }
2239
2240 #[test]
2241 fn color_range_unknown_falls_back_to_limited() {
2242 assert_eq!(
2243 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2244 ColorRange::Limited
2245 );
2246 }
2247
2248 #[test]
2253 fn color_primaries_bt709() {
2254 assert_eq!(
2255 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2256 ColorPrimaries::Bt709
2257 );
2258 }
2259
2260 #[test]
2261 fn color_primaries_bt470bg_yields_bt601() {
2262 assert_eq!(
2263 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2264 ColorPrimaries::Bt601
2265 );
2266 }
2267
2268 #[test]
2269 fn color_primaries_smpte170m_yields_bt601() {
2270 assert_eq!(
2271 VideoDecoderInner::convert_color_primaries(
2272 ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2273 ),
2274 ColorPrimaries::Bt601
2275 );
2276 }
2277
2278 #[test]
2279 fn color_primaries_bt2020() {
2280 assert_eq!(
2281 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2282 ColorPrimaries::Bt2020
2283 );
2284 }
2285
2286 #[test]
2287 fn color_primaries_unknown_falls_back_to_bt709() {
2288 assert_eq!(
2289 VideoDecoderInner::convert_color_primaries(
2290 ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2291 ),
2292 ColorPrimaries::Bt709
2293 );
2294 }
2295
2296 #[test]
2301 fn codec_h264() {
2302 assert_eq!(
2303 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2304 VideoCodec::H264
2305 );
2306 }
2307
2308 #[test]
2309 fn codec_hevc_yields_h265() {
2310 assert_eq!(
2311 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2312 VideoCodec::H265
2313 );
2314 }
2315
2316 #[test]
2317 fn codec_vp8() {
2318 assert_eq!(
2319 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2320 VideoCodec::Vp8
2321 );
2322 }
2323
2324 #[test]
2325 fn codec_vp9() {
2326 assert_eq!(
2327 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2328 VideoCodec::Vp9
2329 );
2330 }
2331
2332 #[test]
2333 fn codec_av1() {
2334 assert_eq!(
2335 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2336 VideoCodec::Av1
2337 );
2338 }
2339
2340 #[test]
2341 fn codec_mpeg4() {
2342 assert_eq!(
2343 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2344 VideoCodec::Mpeg4
2345 );
2346 }
2347
2348 #[test]
2349 fn codec_prores() {
2350 assert_eq!(
2351 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2352 VideoCodec::ProRes
2353 );
2354 }
2355
2356 #[test]
2357 fn codec_unknown_falls_back_to_h264() {
2358 assert_eq!(
2359 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2360 VideoCodec::H264
2361 );
2362 }
2363
2364 #[test]
2369 fn hw_accel_auto_yields_none() {
2370 assert_eq!(
2371 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2372 None
2373 );
2374 }
2375
2376 #[test]
2377 fn hw_accel_none_yields_none() {
2378 assert_eq!(
2379 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2380 None
2381 );
2382 }
2383
2384 #[test]
2385 fn hw_accel_nvdec_yields_cuda() {
2386 assert_eq!(
2387 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2388 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2389 );
2390 }
2391
2392 #[test]
2393 fn hw_accel_qsv_yields_qsv() {
2394 assert_eq!(
2395 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2396 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2397 );
2398 }
2399
2400 #[test]
2401 fn hw_accel_amf_yields_d3d11va() {
2402 assert_eq!(
2403 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2404 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2405 );
2406 }
2407
2408 #[test]
2409 fn hw_accel_videotoolbox() {
2410 assert_eq!(
2411 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2412 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2413 );
2414 }
2415
2416 #[test]
2417 fn hw_accel_vaapi() {
2418 assert_eq!(
2419 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2420 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2421 );
2422 }
2423
2424 #[test]
2429 fn pixel_format_to_av_round_trip_yuv420p() {
2430 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2431 assert_eq!(
2432 VideoDecoderInner::convert_pixel_format(av),
2433 PixelFormat::Yuv420p
2434 );
2435 }
2436
2437 #[test]
2438 fn pixel_format_to_av_round_trip_yuv422p() {
2439 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2440 assert_eq!(
2441 VideoDecoderInner::convert_pixel_format(av),
2442 PixelFormat::Yuv422p
2443 );
2444 }
2445
2446 #[test]
2447 fn pixel_format_to_av_round_trip_yuv444p() {
2448 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2449 assert_eq!(
2450 VideoDecoderInner::convert_pixel_format(av),
2451 PixelFormat::Yuv444p
2452 );
2453 }
2454
2455 #[test]
2456 fn pixel_format_to_av_round_trip_rgb24() {
2457 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2458 assert_eq!(
2459 VideoDecoderInner::convert_pixel_format(av),
2460 PixelFormat::Rgb24
2461 );
2462 }
2463
2464 #[test]
2465 fn pixel_format_to_av_round_trip_bgr24() {
2466 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2467 assert_eq!(
2468 VideoDecoderInner::convert_pixel_format(av),
2469 PixelFormat::Bgr24
2470 );
2471 }
2472
2473 #[test]
2474 fn pixel_format_to_av_round_trip_rgba() {
2475 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2476 assert_eq!(
2477 VideoDecoderInner::convert_pixel_format(av),
2478 PixelFormat::Rgba
2479 );
2480 }
2481
2482 #[test]
2483 fn pixel_format_to_av_round_trip_bgra() {
2484 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2485 assert_eq!(
2486 VideoDecoderInner::convert_pixel_format(av),
2487 PixelFormat::Bgra
2488 );
2489 }
2490
2491 #[test]
2492 fn pixel_format_to_av_round_trip_gray8() {
2493 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2494 assert_eq!(
2495 VideoDecoderInner::convert_pixel_format(av),
2496 PixelFormat::Gray8
2497 );
2498 }
2499
2500 #[test]
2501 fn pixel_format_to_av_round_trip_nv12() {
2502 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2503 assert_eq!(
2504 VideoDecoderInner::convert_pixel_format(av),
2505 PixelFormat::Nv12
2506 );
2507 }
2508
2509 #[test]
2510 fn pixel_format_to_av_round_trip_nv21() {
2511 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2512 assert_eq!(
2513 VideoDecoderInner::convert_pixel_format(av),
2514 PixelFormat::Nv21
2515 );
2516 }
2517
2518 #[test]
2519 fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2520 assert_eq!(
2522 VideoDecoderInner::pixel_format_to_av(PixelFormat::Other(999)),
2523 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2524 );
2525 }
2526
2527 #[test]
2532 fn codec_name_should_return_h264_for_h264_codec_id() {
2533 let name =
2534 unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_H264) };
2535 assert_eq!(name, "h264");
2536 }
2537
2538 #[test]
2539 fn codec_name_should_return_none_for_none_codec_id() {
2540 let name =
2541 unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_NONE) };
2542 assert_eq!(name, "none");
2543 }
2544
2545 #[test]
2546 fn convert_pixel_format_should_map_gbrpf32le() {
2547 assert_eq!(
2548 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE),
2549 PixelFormat::Gbrpf32le
2550 );
2551 }
2552
2553 #[test]
2554 fn unsupported_codec_error_should_include_codec_name() {
2555 let codec_id = ff_sys::AVCodecID_AV_CODEC_ID_H264;
2556 let codec_name = unsafe { VideoDecoderInner::extract_codec_name(codec_id) };
2557 let error = crate::error::DecodeError::UnsupportedCodec {
2558 codec: format!("{codec_name} (codec_id={codec_id:?})"),
2559 };
2560 let msg = error.to_string();
2561 assert!(msg.contains("h264"), "expected codec name in error: {msg}");
2562 assert!(
2563 msg.contains("codec_id="),
2564 "expected codec_id in error: {msg}"
2565 );
2566 }
2567}