1#![allow(unsafe_code)]
8#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::path::Path;
26use std::ptr;
27use std::sync::Arc;
28use std::time::Duration;
29
30use ff_format::PooledBuffer;
31use ff_format::codec::VideoCodec;
32use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
33use ff_format::time::{Rational, Timestamp};
34use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
35use ff_sys::{
36 AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
37 AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
38 AVPixelFormat, SwsContext,
39};
40
41use crate::HardwareAccel;
42use crate::error::DecodeError;
43use ff_common::FramePool;
44
45const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
51
52struct AvFormatContextGuard(*mut AVFormatContext);
54
55impl AvFormatContextGuard {
56 unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
62 let format_ctx = unsafe {
64 ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
65 code: e,
66 message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
67 })?
68 };
69 Ok(Self(format_ctx))
70 }
71
72 const fn as_ptr(&self) -> *mut AVFormatContext {
74 self.0
75 }
76
77 fn into_raw(self) -> *mut AVFormatContext {
79 let ptr = self.0;
80 std::mem::forget(self);
81 ptr
82 }
83}
84
85impl Drop for AvFormatContextGuard {
86 fn drop(&mut self) {
87 if !self.0.is_null() {
88 unsafe {
90 ff_sys::avformat::close_input(&mut (self.0 as *mut _));
91 }
92 }
93 }
94}
95
96struct AvCodecContextGuard(*mut AVCodecContext);
98
99impl AvCodecContextGuard {
100 unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
106 let codec_ctx = unsafe {
108 ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
109 code: e,
110 message: format!("Failed to allocate codec context: {e}"),
111 })?
112 };
113 Ok(Self(codec_ctx))
114 }
115
116 const fn as_ptr(&self) -> *mut AVCodecContext {
118 self.0
119 }
120
121 fn into_raw(self) -> *mut AVCodecContext {
123 let ptr = self.0;
124 std::mem::forget(self);
125 ptr
126 }
127}
128
129impl Drop for AvCodecContextGuard {
130 fn drop(&mut self) {
131 if !self.0.is_null() {
132 unsafe {
134 ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
135 }
136 }
137 }
138}
139
140struct AvPacketGuard(*mut AVPacket);
142
143impl AvPacketGuard {
144 unsafe fn new() -> Result<Self, DecodeError> {
150 let packet = unsafe { ff_sys::av_packet_alloc() };
152 if packet.is_null() {
153 return Err(DecodeError::Ffmpeg {
154 code: 0,
155 message: "Failed to allocate packet".to_string(),
156 });
157 }
158 Ok(Self(packet))
159 }
160
161 #[allow(dead_code)]
163 const fn as_ptr(&self) -> *mut AVPacket {
164 self.0
165 }
166
167 fn into_raw(self) -> *mut AVPacket {
169 let ptr = self.0;
170 std::mem::forget(self);
171 ptr
172 }
173}
174
175impl Drop for AvPacketGuard {
176 fn drop(&mut self) {
177 if !self.0.is_null() {
178 unsafe {
180 ff_sys::av_packet_free(&mut (self.0 as *mut _));
181 }
182 }
183 }
184}
185
186struct AvFrameGuard(*mut AVFrame);
188
189impl AvFrameGuard {
190 unsafe fn new() -> Result<Self, DecodeError> {
196 let frame = unsafe { ff_sys::av_frame_alloc() };
198 if frame.is_null() {
199 return Err(DecodeError::Ffmpeg {
200 code: 0,
201 message: "Failed to allocate frame".to_string(),
202 });
203 }
204 Ok(Self(frame))
205 }
206
207 const fn as_ptr(&self) -> *mut AVFrame {
209 self.0
210 }
211
212 fn into_raw(self) -> *mut AVFrame {
214 let ptr = self.0;
215 std::mem::forget(self);
216 ptr
217 }
218}
219
220impl Drop for AvFrameGuard {
221 fn drop(&mut self) {
222 if !self.0.is_null() {
223 unsafe {
225 ff_sys::av_frame_free(&mut (self.0 as *mut _));
226 }
227 }
228 }
229}
230
231pub(crate) struct VideoDecoderInner {
236 format_ctx: *mut AVFormatContext,
238 codec_ctx: *mut AVCodecContext,
240 stream_index: i32,
242 sws_ctx: Option<*mut SwsContext>,
244 output_format: Option<PixelFormat>,
246 eof: bool,
248 position: Duration,
250 packet: *mut AVPacket,
252 frame: *mut AVFrame,
254 thumbnail_sws_ctx: Option<*mut SwsContext>,
256 thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
258 hw_device_ctx: Option<*mut AVBufferRef>,
260 active_hw_accel: HardwareAccel,
262 frame_pool: Option<Arc<dyn FramePool>>,
264}
265
266impl VideoDecoderInner {
267 fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
271 match accel {
272 HardwareAccel::Auto => None,
273 HardwareAccel::None => None,
274 HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
275 HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
276 HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), HardwareAccel::VideoToolbox => {
278 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
279 }
280 HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
281 }
282 }
283
284 const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
286 &[
288 HardwareAccel::Nvdec,
289 HardwareAccel::Qsv,
290 HardwareAccel::VideoToolbox,
291 HardwareAccel::Vaapi,
292 HardwareAccel::Amf,
293 ]
294 }
295
296 unsafe fn init_hardware_accel(
312 codec_ctx: *mut AVCodecContext,
313 accel: HardwareAccel,
314 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
315 match accel {
316 HardwareAccel::Auto => {
317 for &hw_type in Self::hw_accel_auto_priority() {
319 if let Ok((Some(ctx), active)) =
321 unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
322 {
323 return Ok((Some(ctx), active));
324 }
325 }
327 Ok((None, HardwareAccel::None))
329 }
330 HardwareAccel::None => {
331 Ok((None, HardwareAccel::None))
333 }
334 _ => {
335 unsafe { Self::try_init_hw_device(codec_ctx, accel) }
338 }
339 }
340 }
341
342 unsafe fn try_init_hw_device(
348 codec_ctx: *mut AVCodecContext,
349 accel: HardwareAccel,
350 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
351 let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
353 return Ok((None, HardwareAccel::None));
354 };
355
356 let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
359 let ret = unsafe {
360 ff_sys::av_hwdevice_ctx_create(
361 ptr::addr_of_mut!(hw_device_ctx),
362 device_type,
363 ptr::null(), ptr::null_mut(), 0, )
367 };
368
369 if ret < 0 {
370 return Err(DecodeError::HwAccelUnavailable { accel });
372 }
373
374 unsafe {
378 (*codec_ctx).hw_device_ctx = hw_device_ctx;
379 }
380
381 let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
384 if our_ref.is_null() {
385 return Err(DecodeError::HwAccelUnavailable { accel });
388 }
389
390 Ok((Some(our_ref), accel))
391 }
392
393 pub(crate) fn hardware_accel(&self) -> HardwareAccel {
395 self.active_hw_accel
396 }
397
398 const fn is_hardware_format(format: AVPixelFormat) -> bool {
402 matches!(
403 format,
404 ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
405 | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
406 | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
407 | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
408 | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
409 | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
410 | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
411 | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
412 | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
413 | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
414 )
415 }
416
417 unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
426 let frame_format = unsafe { (*self.frame).format };
428
429 if !Self::is_hardware_format(frame_format) {
430 return Ok(());
432 }
433
434 let sw_frame = unsafe { ff_sys::av_frame_alloc() };
437 if sw_frame.is_null() {
438 return Err(DecodeError::Ffmpeg {
439 code: 0,
440 message: "Failed to allocate software frame for hardware transfer".to_string(),
441 });
442 }
443
444 let ret = unsafe {
447 ff_sys::av_hwframe_transfer_data(
448 sw_frame, self.frame, 0, )
450 };
451
452 if ret < 0 {
453 unsafe {
455 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
456 }
457 return Err(DecodeError::Ffmpeg {
458 code: ret,
459 message: format!(
460 "Failed to transfer hardware frame to CPU memory: {}",
461 ff_sys::av_error_string(ret)
462 ),
463 });
464 }
465
466 unsafe {
469 (*sw_frame).pts = (*self.frame).pts;
470 (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
471 (*sw_frame).duration = (*self.frame).duration;
472 (*sw_frame).time_base = (*self.frame).time_base;
473 }
474
475 unsafe {
478 ff_sys::av_frame_unref(self.frame);
479 ff_sys::av_frame_move_ref(self.frame, sw_frame);
480 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
481 }
482
483 Ok(())
484 }
485
486 pub(crate) fn new(
503 path: &Path,
504 output_format: Option<PixelFormat>,
505 hardware_accel: HardwareAccel,
506 thread_count: usize,
507 frame_pool: Option<Arc<dyn FramePool>>,
508 ) -> Result<(Self, VideoStreamInfo), DecodeError> {
509 ff_sys::ensure_initialized();
511
512 let format_ctx_guard = unsafe { AvFormatContextGuard::new(path)? };
515 let format_ctx = format_ctx_guard.as_ptr();
516
517 unsafe {
520 ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
521 code: e,
522 message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
523 })?;
524 }
525
526 let (stream_index, codec_id) =
529 unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
530 DecodeError::NoVideoStream {
531 path: path.to_path_buf(),
532 }
533 })?;
534
535 let codec = unsafe {
538 ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
539 DecodeError::UnsupportedCodec {
540 codec: format!("codec_id={codec_id:?}"),
541 }
542 })?
543 };
544
545 let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
548 let codec_ctx = codec_ctx_guard.as_ptr();
549
550 unsafe {
553 let stream = (*format_ctx).streams.add(stream_index as usize);
554 let codecpar = (*(*stream)).codecpar;
555 ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
556 DecodeError::Ffmpeg {
557 code: e,
558 message: format!(
559 "Failed to copy codec parameters: {}",
560 ff_sys::av_error_string(e)
561 ),
562 }
563 })?;
564
565 if thread_count > 0 {
567 (*codec_ctx).thread_count = thread_count as i32;
568 }
569 }
570
571 let (hw_device_ctx, active_hw_accel) =
574 unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
575
576 unsafe {
579 ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
580 if let Some(hw_ctx) = hw_device_ctx {
585 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
586 }
587 DecodeError::Ffmpeg {
588 code: e,
589 message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
590 }
591 })?;
592 }
593
594 let stream_info =
597 unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
598
599 let packet_guard = unsafe { AvPacketGuard::new()? };
602 let frame_guard = unsafe { AvFrameGuard::new()? };
603
604 Ok((
606 Self {
607 format_ctx: format_ctx_guard.into_raw(),
608 codec_ctx: codec_ctx_guard.into_raw(),
609 stream_index: stream_index as i32,
610 sws_ctx: None,
611 output_format,
612 eof: false,
613 position: Duration::ZERO,
614 packet: packet_guard.into_raw(),
615 frame: frame_guard.into_raw(),
616 thumbnail_sws_ctx: None,
617 thumbnail_cache_key: None,
618 hw_device_ctx,
619 active_hw_accel,
620 frame_pool,
621 },
622 stream_info,
623 ))
624 }
625
626 unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
636 unsafe {
638 let nb_streams = (*format_ctx).nb_streams as usize;
639
640 for i in 0..nb_streams {
641 let stream = (*format_ctx).streams.add(i);
642 let codecpar = (*(*stream)).codecpar;
643
644 if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
645 return Some((i, (*codecpar).codec_id));
646 }
647 }
648
649 None
650 }
651 }
652
653 unsafe fn extract_stream_info(
655 format_ctx: *mut AVFormatContext,
656 stream_index: i32,
657 codec_ctx: *mut AVCodecContext,
658 ) -> Result<VideoStreamInfo, DecodeError> {
659 let (
661 width,
662 height,
663 fps_rational,
664 duration_val,
665 pix_fmt,
666 color_space_val,
667 color_range_val,
668 color_primaries_val,
669 codec_id,
670 ) = unsafe {
671 let stream = (*format_ctx).streams.add(stream_index as usize);
672 let codecpar = (*(*stream)).codecpar;
673
674 (
675 (*codecpar).width as u32,
676 (*codecpar).height as u32,
677 (*(*stream)).avg_frame_rate,
678 (*format_ctx).duration,
679 (*codec_ctx).pix_fmt,
680 (*codecpar).color_space,
681 (*codecpar).color_range,
682 (*codecpar).color_primaries,
683 (*codecpar).codec_id,
684 )
685 };
686
687 let frame_rate = if fps_rational.den != 0 {
689 Rational::new(fps_rational.num as i32, fps_rational.den as i32)
690 } else {
691 log::warn!(
692 "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
693 fps_rational.num
694 );
695 Rational::new(30, 1)
696 };
697
698 let duration = if duration_val > 0 {
700 let duration_secs = duration_val as f64 / 1_000_000.0;
701 Some(Duration::from_secs_f64(duration_secs))
702 } else {
703 None
704 };
705
706 let pixel_format = Self::convert_pixel_format(pix_fmt);
708
709 let color_space = Self::convert_color_space(color_space_val);
711 let color_range = Self::convert_color_range(color_range_val);
712 let color_primaries = Self::convert_color_primaries(color_primaries_val);
713
714 let codec = Self::convert_codec(codec_id);
716
717 let mut builder = VideoStreamInfo::builder()
719 .index(stream_index as u32)
720 .codec(codec)
721 .width(width)
722 .height(height)
723 .frame_rate(frame_rate)
724 .pixel_format(pixel_format)
725 .color_space(color_space)
726 .color_range(color_range)
727 .color_primaries(color_primaries);
728
729 if let Some(d) = duration {
730 builder = builder.duration(d);
731 }
732
733 Ok(builder.build())
734 }
735
736 fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
738 if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
739 PixelFormat::Yuv420p
740 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
741 PixelFormat::Yuv422p
742 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
743 PixelFormat::Yuv444p
744 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
745 PixelFormat::Rgb24
746 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
747 PixelFormat::Bgr24
748 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
749 PixelFormat::Rgba
750 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
751 PixelFormat::Bgra
752 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
753 PixelFormat::Gray8
754 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
755 PixelFormat::Nv12
756 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
757 PixelFormat::Nv21
758 } else {
759 log::warn!(
760 "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
761 );
762 PixelFormat::Yuv420p
763 }
764 }
765
766 fn convert_color_space(space: AVColorSpace) -> ColorSpace {
768 if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
769 ColorSpace::Bt709
770 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
771 || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
772 {
773 ColorSpace::Bt601
774 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
775 ColorSpace::Bt2020
776 } else {
777 log::warn!(
778 "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
779 );
780 ColorSpace::Bt709
781 }
782 }
783
784 fn convert_color_range(range: AVColorRange) -> ColorRange {
786 if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
787 ColorRange::Full
788 } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
789 ColorRange::Limited
790 } else {
791 log::warn!(
792 "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
793 );
794 ColorRange::Limited
795 }
796 }
797
798 fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
800 if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
801 ColorPrimaries::Bt709
802 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
803 || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
804 {
805 ColorPrimaries::Bt601
806 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
807 ColorPrimaries::Bt2020
808 } else {
809 log::warn!(
810 "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
811 );
812 ColorPrimaries::Bt709
813 }
814 }
815
816 fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
818 if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
819 VideoCodec::H264
820 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
821 VideoCodec::H265
822 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
823 VideoCodec::Vp8
824 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
825 VideoCodec::Vp9
826 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
827 VideoCodec::Av1
828 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
829 VideoCodec::Mpeg4
830 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
831 VideoCodec::ProRes
832 } else {
833 log::warn!(
834 "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
835 );
836 VideoCodec::H264
837 }
838 }
839
840 pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
848 if self.eof {
849 return Ok(None);
850 }
851
852 unsafe {
853 loop {
854 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
856
857 if ret == 0 {
858 self.transfer_hardware_frame_if_needed()?;
861
862 let video_frame = self.convert_frame_to_video_frame()?;
863
864 let pts = (*self.frame).pts;
866 if pts != ff_sys::AV_NOPTS_VALUE {
867 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
868 let time_base = (*(*stream)).time_base;
869 let timestamp_secs =
870 pts as f64 * time_base.num as f64 / time_base.den as f64;
871 self.position = Duration::from_secs_f64(timestamp_secs);
872 }
873
874 return Ok(Some(video_frame));
875 } else if ret == ff_sys::error_codes::EAGAIN {
876 let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
879
880 if read_ret == ff_sys::error_codes::EOF {
881 ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
883 self.eof = true;
884 continue;
885 } else if read_ret < 0 {
886 return Err(DecodeError::Ffmpeg {
887 code: read_ret,
888 message: format!(
889 "Failed to read frame: {}",
890 ff_sys::av_error_string(read_ret)
891 ),
892 });
893 }
894
895 if (*self.packet).stream_index == self.stream_index {
897 let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
899 ff_sys::av_packet_unref(self.packet);
900
901 if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
902 return Err(DecodeError::Ffmpeg {
903 code: send_ret,
904 message: format!(
905 "Failed to send packet: {}",
906 ff_sys::av_error_string(send_ret)
907 ),
908 });
909 }
910 } else {
911 ff_sys::av_packet_unref(self.packet);
913 }
914 } else if ret == ff_sys::error_codes::EOF {
915 self.eof = true;
917 return Ok(None);
918 } else {
919 return Err(DecodeError::DecodingFailed {
920 timestamp: Some(self.position),
921 reason: ff_sys::av_error_string(ret),
922 });
923 }
924 }
925 }
926 }
927
928 unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
930 unsafe {
932 let width = (*self.frame).width as u32;
933 let height = (*self.frame).height as u32;
934 let src_format = (*self.frame).format;
935
936 let dst_format = if let Some(fmt) = self.output_format {
938 Self::pixel_format_to_av(fmt)
939 } else {
940 src_format
941 };
942
943 let needs_conversion = src_format != dst_format;
945
946 if needs_conversion {
947 self.convert_with_sws(width, height, src_format, dst_format)
948 } else {
949 self.av_frame_to_video_frame(self.frame)
950 }
951 }
952 }
953
954 unsafe fn convert_with_sws(
956 &mut self,
957 width: u32,
958 height: u32,
959 src_format: i32,
960 dst_format: i32,
961 ) -> Result<VideoFrame, DecodeError> {
962 unsafe {
964 if self.sws_ctx.is_none() {
966 let ctx = ff_sys::swscale::get_context(
967 width as i32,
968 height as i32,
969 src_format,
970 width as i32,
971 height as i32,
972 dst_format,
973 ff_sys::swscale::scale_flags::BILINEAR,
974 )
975 .map_err(|e| DecodeError::Ffmpeg {
976 code: 0,
977 message: format!("Failed to create sws context: {e}"),
978 })?;
979
980 self.sws_ctx = Some(ctx);
981 }
982
983 let Some(sws_ctx) = self.sws_ctx else {
984 return Err(DecodeError::Ffmpeg {
985 code: 0,
986 message: "SwsContext not initialized".to_string(),
987 });
988 };
989
990 let dst_frame_guard = AvFrameGuard::new()?;
992 let dst_frame = dst_frame_guard.as_ptr();
993
994 (*dst_frame).width = width as i32;
995 (*dst_frame).height = height as i32;
996 (*dst_frame).format = dst_format;
997
998 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1000 if buffer_ret < 0 {
1001 return Err(DecodeError::Ffmpeg {
1002 code: buffer_ret,
1003 message: format!(
1004 "Failed to allocate frame buffer: {}",
1005 ff_sys::av_error_string(buffer_ret)
1006 ),
1007 });
1008 }
1009
1010 ff_sys::swscale::scale(
1012 sws_ctx,
1013 (*self.frame).data.as_ptr() as *const *const u8,
1014 (*self.frame).linesize.as_ptr(),
1015 0,
1016 height as i32,
1017 (*dst_frame).data.as_ptr() as *const *mut u8,
1018 (*dst_frame).linesize.as_ptr(),
1019 )
1020 .map_err(|e| DecodeError::Ffmpeg {
1021 code: 0,
1022 message: format!("Failed to scale frame: {e}"),
1023 })?;
1024
1025 (*dst_frame).pts = (*self.frame).pts;
1027
1028 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1030
1031 Ok(video_frame)
1034 }
1035 }
1036
1037 unsafe fn av_frame_to_video_frame(
1039 &self,
1040 frame: *const AVFrame,
1041 ) -> Result<VideoFrame, DecodeError> {
1042 unsafe {
1044 let width = (*frame).width as u32;
1045 let height = (*frame).height as u32;
1046 let format = Self::convert_pixel_format((*frame).format);
1047
1048 let pts = (*frame).pts;
1050 let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1051 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1052 let time_base = (*(*stream)).time_base;
1053 Timestamp::new(
1054 pts as i64,
1055 Rational::new(time_base.num as i32, time_base.den as i32),
1056 )
1057 } else {
1058 Timestamp::default()
1059 };
1060
1061 let (planes, strides) =
1063 self.extract_planes_and_strides(frame, width, height, format)?;
1064
1065 VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1066 DecodeError::Ffmpeg {
1067 code: 0,
1068 message: format!("Failed to create VideoFrame: {e}"),
1069 }
1070 })
1071 }
1072 }
1073
1074 fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1084 if let Some(ref pool) = self.frame_pool
1085 && let Some(pooled_buffer) = pool.acquire(size)
1086 {
1087 return pooled_buffer;
1090 }
1091
1092 PooledBuffer::standalone(vec![0u8; size])
1094 }
1095
1096 unsafe fn extract_planes_and_strides(
1098 &self,
1099 frame: *const AVFrame,
1100 width: u32,
1101 height: u32,
1102 format: PixelFormat,
1103 ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1104 const BYTES_PER_PIXEL_RGBA: usize = 4;
1106 const BYTES_PER_PIXEL_RGB24: usize = 3;
1107
1108 unsafe {
1110 let mut planes = Vec::new();
1111 let mut strides = Vec::new();
1112
1113 #[allow(clippy::match_same_arms)]
1114 match format {
1115 PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1116 let stride = (*frame).linesize[0] as usize;
1118 let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1119 {
1120 BYTES_PER_PIXEL_RGBA
1121 } else {
1122 BYTES_PER_PIXEL_RGB24
1123 };
1124 let row_size = (width as usize) * bytes_per_pixel;
1125 let buffer_size = row_size * height as usize;
1126 let mut plane_data = self.allocate_buffer(buffer_size);
1127
1128 for y in 0..height as usize {
1129 let src_offset = y * stride;
1130 let dst_offset = y * row_size;
1131 let src_ptr = (*frame).data[0].add(src_offset);
1132 let plane_slice = plane_data.as_mut();
1133 std::ptr::copy_nonoverlapping(
1137 src_ptr,
1138 plane_slice[dst_offset..].as_mut_ptr(),
1139 row_size,
1140 );
1141 }
1142
1143 planes.push(plane_data);
1144 strides.push(row_size);
1145 }
1146 PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1147 let (chroma_width, chroma_height) = match format {
1149 PixelFormat::Yuv420p => (width / 2, height / 2),
1150 PixelFormat::Yuv422p => (width / 2, height),
1151 PixelFormat::Yuv444p => (width, height),
1152 _ => unreachable!(),
1153 };
1154
1155 let y_stride = width as usize;
1157 let y_size = y_stride * height as usize;
1158 let mut y_data = self.allocate_buffer(y_size);
1159 for y in 0..height as usize {
1160 let src_offset = y * (*frame).linesize[0] as usize;
1161 let dst_offset = y * y_stride;
1162 let src_ptr = (*frame).data[0].add(src_offset);
1163 let y_slice = y_data.as_mut();
1164 std::ptr::copy_nonoverlapping(
1167 src_ptr,
1168 y_slice[dst_offset..].as_mut_ptr(),
1169 width as usize,
1170 );
1171 }
1172 planes.push(y_data);
1173 strides.push(y_stride);
1174
1175 let u_stride = chroma_width as usize;
1177 let u_size = u_stride * chroma_height as usize;
1178 let mut u_data = self.allocate_buffer(u_size);
1179 for y in 0..chroma_height as usize {
1180 let src_offset = y * (*frame).linesize[1] as usize;
1181 let dst_offset = y * u_stride;
1182 let src_ptr = (*frame).data[1].add(src_offset);
1183 let u_slice = u_data.as_mut();
1184 std::ptr::copy_nonoverlapping(
1187 src_ptr,
1188 u_slice[dst_offset..].as_mut_ptr(),
1189 chroma_width as usize,
1190 );
1191 }
1192 planes.push(u_data);
1193 strides.push(u_stride);
1194
1195 let v_stride = chroma_width as usize;
1197 let v_size = v_stride * chroma_height as usize;
1198 let mut v_data = self.allocate_buffer(v_size);
1199 for y in 0..chroma_height as usize {
1200 let src_offset = y * (*frame).linesize[2] as usize;
1201 let dst_offset = y * v_stride;
1202 let src_ptr = (*frame).data[2].add(src_offset);
1203 let v_slice = v_data.as_mut();
1204 std::ptr::copy_nonoverlapping(
1207 src_ptr,
1208 v_slice[dst_offset..].as_mut_ptr(),
1209 chroma_width as usize,
1210 );
1211 }
1212 planes.push(v_data);
1213 strides.push(v_stride);
1214 }
1215 PixelFormat::Gray8 => {
1216 let stride = width as usize;
1218 let mut plane_data = self.allocate_buffer(stride * height as usize);
1219
1220 for y in 0..height as usize {
1221 let src_offset = y * (*frame).linesize[0] as usize;
1222 let dst_offset = y * stride;
1223 let src_ptr = (*frame).data[0].add(src_offset);
1224 let plane_slice = plane_data.as_mut();
1225 std::ptr::copy_nonoverlapping(
1228 src_ptr,
1229 plane_slice[dst_offset..].as_mut_ptr(),
1230 width as usize,
1231 );
1232 }
1233
1234 planes.push(plane_data);
1235 strides.push(stride);
1236 }
1237 PixelFormat::Nv12 | PixelFormat::Nv21 => {
1238 let uv_height = height / 2;
1240
1241 let y_stride = width as usize;
1243 let mut y_data = self.allocate_buffer(y_stride * height as usize);
1244 for y in 0..height as usize {
1245 let src_offset = y * (*frame).linesize[0] as usize;
1246 let dst_offset = y * y_stride;
1247 let src_ptr = (*frame).data[0].add(src_offset);
1248 let y_slice = y_data.as_mut();
1249 std::ptr::copy_nonoverlapping(
1252 src_ptr,
1253 y_slice[dst_offset..].as_mut_ptr(),
1254 width as usize,
1255 );
1256 }
1257 planes.push(y_data);
1258 strides.push(y_stride);
1259
1260 let uv_stride = width as usize;
1262 let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1263 for y in 0..uv_height as usize {
1264 let src_offset = y * (*frame).linesize[1] as usize;
1265 let dst_offset = y * uv_stride;
1266 let src_ptr = (*frame).data[1].add(src_offset);
1267 let uv_slice = uv_data.as_mut();
1268 std::ptr::copy_nonoverlapping(
1271 src_ptr,
1272 uv_slice[dst_offset..].as_mut_ptr(),
1273 width as usize,
1274 );
1275 }
1276 planes.push(uv_data);
1277 strides.push(uv_stride);
1278 }
1279 _ => {
1280 return Err(DecodeError::Ffmpeg {
1281 code: 0,
1282 message: format!("Unsupported pixel format: {format:?}"),
1283 });
1284 }
1285 }
1286
1287 Ok((planes, strides))
1288 }
1289 }
1290
1291 fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1293 match format {
1294 PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1295 PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1296 PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1297 PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1298 PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1299 PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1300 PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1301 PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1302 PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1303 PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1304 _ => {
1305 log::warn!(
1306 "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1307 );
1308 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1309 }
1310 }
1311 }
1312
1313 pub(crate) fn position(&self) -> Duration {
1315 self.position
1316 }
1317
1318 pub(crate) fn is_eof(&self) -> bool {
1320 self.eof
1321 }
1322
1323 fn duration_to_pts(&self, duration: Duration) -> i64 {
1337 let time_base = unsafe {
1343 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1344 (*(*stream)).time_base
1345 };
1346
1347 let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1349 (duration.as_secs_f64() * time_base_f64) as i64
1350 }
1351
1352 #[allow(dead_code)]
1370 fn pts_to_duration(&self, pts: i64) -> Duration {
1371 unsafe {
1373 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1374 let time_base = (*(*stream)).time_base;
1375
1376 let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1378 Duration::from_secs_f64(duration_secs)
1379 }
1380 }
1381
1382 pub(crate) fn seek(
1406 &mut self,
1407 position: Duration,
1408 mode: crate::SeekMode,
1409 ) -> Result<(), DecodeError> {
1410 use crate::SeekMode;
1411
1412 let timestamp = self.duration_to_pts(position);
1413
1414 let flags = ff_sys::avformat::seek_flags::BACKWARD;
1417
1418 unsafe {
1423 ff_sys::av_packet_unref(self.packet);
1424 ff_sys::av_frame_unref(self.frame);
1425 }
1426
1427 unsafe {
1434 ff_sys::avformat::seek_frame(
1435 self.format_ctx,
1436 self.stream_index as i32,
1437 timestamp,
1438 flags,
1439 )
1440 .map_err(|e| DecodeError::SeekFailed {
1441 target: position,
1442 reason: ff_sys::av_error_string(e),
1443 })?;
1444 }
1445
1446 unsafe {
1449 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1450 }
1451
1452 unsafe {
1458 loop {
1459 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1460 if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1461 break;
1463 } else if ret == 0 {
1464 ff_sys::av_frame_unref(self.frame);
1466 } else {
1467 break;
1469 }
1470 }
1471 }
1472
1473 self.eof = false;
1475 if mode == SeekMode::Exact {
1493 self.skip_to_exact(position)?;
1495 } else {
1496 let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1499 let min_position = position.saturating_sub(tolerance);
1500
1501 while let Some(frame) = self.decode_one()? {
1502 let frame_time = frame.timestamp().as_duration();
1503 if frame_time >= min_position {
1504 break;
1506 }
1507 }
1509 }
1510
1511 Ok(())
1512 }
1513
1514 fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1535 loop {
1536 match self.decode_one()? {
1537 Some(frame) => {
1538 let frame_time = frame.timestamp().as_duration();
1539 if frame_time >= target {
1540 break;
1543 }
1544 }
1546 None => {
1547 return Err(DecodeError::SeekFailed {
1549 target,
1550 reason: "Reached end of stream before target position".to_string(),
1551 });
1552 }
1553 }
1554 }
1555 Ok(())
1556 }
1557
1558 pub(crate) fn flush(&mut self) {
1563 unsafe {
1565 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1566 }
1567 self.eof = false;
1568 }
1569
1570 pub(crate) fn scale_frame(
1609 &mut self,
1610 frame: &VideoFrame,
1611 target_width: u32,
1612 target_height: u32,
1613 ) -> Result<VideoFrame, DecodeError> {
1614 let src_width = frame.width();
1615 let src_height = frame.height();
1616 let src_format = frame.format();
1617
1618 let src_aspect = src_width as f64 / src_height as f64;
1620 let target_aspect = target_width as f64 / target_height as f64;
1621
1622 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1623 let height = (target_width as f64 / src_aspect).round() as u32;
1625 (target_width, height)
1626 } else {
1627 let width = (target_height as f64 * src_aspect).round() as u32;
1629 (width, target_height)
1630 };
1631
1632 let av_format = Self::pixel_format_to_av(src_format);
1634
1635 let cache_key = (
1637 src_width,
1638 src_height,
1639 scaled_width,
1640 scaled_height,
1641 av_format,
1642 );
1643
1644 unsafe {
1646 let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1648 (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1649 {
1650 if cached_key == cache_key {
1651 (cached_ctx, true)
1653 } else {
1654 ff_sys::swscale::free_context(cached_ctx);
1656 self.thumbnail_sws_ctx = None;
1658 self.thumbnail_cache_key = None;
1659
1660 let new_ctx = ff_sys::swscale::get_context(
1661 src_width as i32,
1662 src_height as i32,
1663 av_format,
1664 scaled_width as i32,
1665 scaled_height as i32,
1666 av_format,
1667 ff_sys::swscale::scale_flags::BILINEAR,
1668 )
1669 .map_err(|e| DecodeError::Ffmpeg {
1670 code: 0,
1671 message: format!("Failed to create scaling context: {e}"),
1672 })?;
1673
1674 (new_ctx, false)
1676 }
1677 } else {
1678 let new_ctx = ff_sys::swscale::get_context(
1680 src_width as i32,
1681 src_height as i32,
1682 av_format,
1683 scaled_width as i32,
1684 scaled_height as i32,
1685 av_format,
1686 ff_sys::swscale::scale_flags::BILINEAR,
1687 )
1688 .map_err(|e| DecodeError::Ffmpeg {
1689 code: 0,
1690 message: format!("Failed to create scaling context: {e}"),
1691 })?;
1692
1693 (new_ctx, false)
1695 };
1696
1697 let src_frame_guard = AvFrameGuard::new()?;
1699 let src_frame = src_frame_guard.as_ptr();
1700
1701 (*src_frame).width = src_width as i32;
1702 (*src_frame).height = src_height as i32;
1703 (*src_frame).format = av_format;
1704
1705 let planes = frame.planes();
1707 let strides = frame.strides();
1708
1709 for (i, plane_data) in planes.iter().enumerate() {
1710 if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1711 break;
1712 }
1713 (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1714 (*src_frame).linesize[i] = strides[i] as i32;
1715 }
1716
1717 let dst_frame_guard = AvFrameGuard::new()?;
1719 let dst_frame = dst_frame_guard.as_ptr();
1720
1721 (*dst_frame).width = scaled_width as i32;
1722 (*dst_frame).height = scaled_height as i32;
1723 (*dst_frame).format = av_format;
1724
1725 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1727 if buffer_ret < 0 {
1728 if !is_cached {
1730 ff_sys::swscale::free_context(sws_ctx);
1731 }
1732 return Err(DecodeError::Ffmpeg {
1733 code: buffer_ret,
1734 message: format!(
1735 "Failed to allocate destination frame buffer: {}",
1736 ff_sys::av_error_string(buffer_ret)
1737 ),
1738 });
1739 }
1740
1741 let scale_result = ff_sys::swscale::scale(
1743 sws_ctx,
1744 (*src_frame).data.as_ptr() as *const *const u8,
1745 (*src_frame).linesize.as_ptr(),
1746 0,
1747 src_height as i32,
1748 (*dst_frame).data.as_ptr() as *const *mut u8,
1749 (*dst_frame).linesize.as_ptr(),
1750 );
1751
1752 if let Err(e) = scale_result {
1753 if !is_cached {
1755 ff_sys::swscale::free_context(sws_ctx);
1756 }
1757 return Err(DecodeError::Ffmpeg {
1758 code: 0,
1759 message: format!("Failed to scale frame: {e}"),
1760 });
1761 }
1762
1763 if !is_cached {
1765 self.thumbnail_sws_ctx = Some(sws_ctx);
1766 self.thumbnail_cache_key = Some(cache_key);
1767 }
1768
1769 (*dst_frame).pts = frame.timestamp().pts();
1771
1772 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1774
1775 Ok(video_frame)
1776 }
1777 }
1778}
1779
1780impl Drop for VideoDecoderInner {
1781 fn drop(&mut self) {
1782 if let Some(sws_ctx) = self.sws_ctx {
1784 unsafe {
1786 ff_sys::swscale::free_context(sws_ctx);
1787 }
1788 }
1789
1790 if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1792 unsafe {
1794 ff_sys::swscale::free_context(thumbnail_ctx);
1795 }
1796 }
1797
1798 if let Some(hw_ctx) = self.hw_device_ctx {
1800 unsafe {
1802 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
1803 }
1804 }
1805
1806 if !self.frame.is_null() {
1808 unsafe {
1810 ff_sys::av_frame_free(&mut (self.frame as *mut _));
1811 }
1812 }
1813
1814 if !self.packet.is_null() {
1815 unsafe {
1817 ff_sys::av_packet_free(&mut (self.packet as *mut _));
1818 }
1819 }
1820
1821 if !self.codec_ctx.is_null() {
1823 unsafe {
1825 ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
1826 }
1827 }
1828
1829 if !self.format_ctx.is_null() {
1831 unsafe {
1833 ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
1834 }
1835 }
1836 }
1837}
1838
1839unsafe impl Send for VideoDecoderInner {}
1842
1843#[cfg(test)]
1844mod tests {
1845 use ff_format::PixelFormat;
1846 use ff_format::codec::VideoCodec;
1847 use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
1848
1849 use crate::HardwareAccel;
1850
1851 use super::VideoDecoderInner;
1852
1853 #[test]
1858 fn pixel_format_yuv420p() {
1859 assert_eq!(
1860 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
1861 PixelFormat::Yuv420p
1862 );
1863 }
1864
1865 #[test]
1866 fn pixel_format_yuv422p() {
1867 assert_eq!(
1868 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
1869 PixelFormat::Yuv422p
1870 );
1871 }
1872
1873 #[test]
1874 fn pixel_format_yuv444p() {
1875 assert_eq!(
1876 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
1877 PixelFormat::Yuv444p
1878 );
1879 }
1880
1881 #[test]
1882 fn pixel_format_rgb24() {
1883 assert_eq!(
1884 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
1885 PixelFormat::Rgb24
1886 );
1887 }
1888
1889 #[test]
1890 fn pixel_format_bgr24() {
1891 assert_eq!(
1892 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
1893 PixelFormat::Bgr24
1894 );
1895 }
1896
1897 #[test]
1898 fn pixel_format_rgba() {
1899 assert_eq!(
1900 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
1901 PixelFormat::Rgba
1902 );
1903 }
1904
1905 #[test]
1906 fn pixel_format_bgra() {
1907 assert_eq!(
1908 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
1909 PixelFormat::Bgra
1910 );
1911 }
1912
1913 #[test]
1914 fn pixel_format_gray8() {
1915 assert_eq!(
1916 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
1917 PixelFormat::Gray8
1918 );
1919 }
1920
1921 #[test]
1922 fn pixel_format_nv12() {
1923 assert_eq!(
1924 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
1925 PixelFormat::Nv12
1926 );
1927 }
1928
1929 #[test]
1930 fn pixel_format_nv21() {
1931 assert_eq!(
1932 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
1933 PixelFormat::Nv21
1934 );
1935 }
1936
1937 #[test]
1938 fn pixel_format_unknown_falls_back_to_yuv420p() {
1939 assert_eq!(
1940 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
1941 PixelFormat::Yuv420p
1942 );
1943 }
1944
1945 #[test]
1950 fn color_space_bt709() {
1951 assert_eq!(
1952 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
1953 ColorSpace::Bt709
1954 );
1955 }
1956
1957 #[test]
1958 fn color_space_bt470bg_yields_bt601() {
1959 assert_eq!(
1960 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
1961 ColorSpace::Bt601
1962 );
1963 }
1964
1965 #[test]
1966 fn color_space_smpte170m_yields_bt601() {
1967 assert_eq!(
1968 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
1969 ColorSpace::Bt601
1970 );
1971 }
1972
1973 #[test]
1974 fn color_space_bt2020_ncl() {
1975 assert_eq!(
1976 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
1977 ColorSpace::Bt2020
1978 );
1979 }
1980
1981 #[test]
1982 fn color_space_unknown_falls_back_to_bt709() {
1983 assert_eq!(
1984 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
1985 ColorSpace::Bt709
1986 );
1987 }
1988
1989 #[test]
1994 fn color_range_jpeg_yields_full() {
1995 assert_eq!(
1996 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
1997 ColorRange::Full
1998 );
1999 }
2000
2001 #[test]
2002 fn color_range_mpeg_yields_limited() {
2003 assert_eq!(
2004 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2005 ColorRange::Limited
2006 );
2007 }
2008
2009 #[test]
2010 fn color_range_unknown_falls_back_to_limited() {
2011 assert_eq!(
2012 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2013 ColorRange::Limited
2014 );
2015 }
2016
2017 #[test]
2022 fn color_primaries_bt709() {
2023 assert_eq!(
2024 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2025 ColorPrimaries::Bt709
2026 );
2027 }
2028
2029 #[test]
2030 fn color_primaries_bt470bg_yields_bt601() {
2031 assert_eq!(
2032 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2033 ColorPrimaries::Bt601
2034 );
2035 }
2036
2037 #[test]
2038 fn color_primaries_smpte170m_yields_bt601() {
2039 assert_eq!(
2040 VideoDecoderInner::convert_color_primaries(
2041 ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2042 ),
2043 ColorPrimaries::Bt601
2044 );
2045 }
2046
2047 #[test]
2048 fn color_primaries_bt2020() {
2049 assert_eq!(
2050 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2051 ColorPrimaries::Bt2020
2052 );
2053 }
2054
2055 #[test]
2056 fn color_primaries_unknown_falls_back_to_bt709() {
2057 assert_eq!(
2058 VideoDecoderInner::convert_color_primaries(
2059 ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2060 ),
2061 ColorPrimaries::Bt709
2062 );
2063 }
2064
2065 #[test]
2070 fn codec_h264() {
2071 assert_eq!(
2072 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2073 VideoCodec::H264
2074 );
2075 }
2076
2077 #[test]
2078 fn codec_hevc_yields_h265() {
2079 assert_eq!(
2080 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2081 VideoCodec::H265
2082 );
2083 }
2084
2085 #[test]
2086 fn codec_vp8() {
2087 assert_eq!(
2088 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2089 VideoCodec::Vp8
2090 );
2091 }
2092
2093 #[test]
2094 fn codec_vp9() {
2095 assert_eq!(
2096 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2097 VideoCodec::Vp9
2098 );
2099 }
2100
2101 #[test]
2102 fn codec_av1() {
2103 assert_eq!(
2104 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2105 VideoCodec::Av1
2106 );
2107 }
2108
2109 #[test]
2110 fn codec_mpeg4() {
2111 assert_eq!(
2112 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2113 VideoCodec::Mpeg4
2114 );
2115 }
2116
2117 #[test]
2118 fn codec_prores() {
2119 assert_eq!(
2120 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2121 VideoCodec::ProRes
2122 );
2123 }
2124
2125 #[test]
2126 fn codec_unknown_falls_back_to_h264() {
2127 assert_eq!(
2128 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2129 VideoCodec::H264
2130 );
2131 }
2132
2133 #[test]
2138 fn hw_accel_auto_yields_none() {
2139 assert_eq!(
2140 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2141 None
2142 );
2143 }
2144
2145 #[test]
2146 fn hw_accel_none_yields_none() {
2147 assert_eq!(
2148 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2149 None
2150 );
2151 }
2152
2153 #[test]
2154 fn hw_accel_nvdec_yields_cuda() {
2155 assert_eq!(
2156 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2157 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2158 );
2159 }
2160
2161 #[test]
2162 fn hw_accel_qsv_yields_qsv() {
2163 assert_eq!(
2164 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2165 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2166 );
2167 }
2168
2169 #[test]
2170 fn hw_accel_amf_yields_d3d11va() {
2171 assert_eq!(
2172 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2173 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2174 );
2175 }
2176
2177 #[test]
2178 fn hw_accel_videotoolbox() {
2179 assert_eq!(
2180 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2181 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2182 );
2183 }
2184
2185 #[test]
2186 fn hw_accel_vaapi() {
2187 assert_eq!(
2188 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2189 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2190 );
2191 }
2192
2193 #[test]
2198 fn pixel_format_to_av_round_trip_yuv420p() {
2199 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2200 assert_eq!(
2201 VideoDecoderInner::convert_pixel_format(av),
2202 PixelFormat::Yuv420p
2203 );
2204 }
2205
2206 #[test]
2207 fn pixel_format_to_av_round_trip_yuv422p() {
2208 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2209 assert_eq!(
2210 VideoDecoderInner::convert_pixel_format(av),
2211 PixelFormat::Yuv422p
2212 );
2213 }
2214
2215 #[test]
2216 fn pixel_format_to_av_round_trip_yuv444p() {
2217 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2218 assert_eq!(
2219 VideoDecoderInner::convert_pixel_format(av),
2220 PixelFormat::Yuv444p
2221 );
2222 }
2223
2224 #[test]
2225 fn pixel_format_to_av_round_trip_rgb24() {
2226 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2227 assert_eq!(
2228 VideoDecoderInner::convert_pixel_format(av),
2229 PixelFormat::Rgb24
2230 );
2231 }
2232
2233 #[test]
2234 fn pixel_format_to_av_round_trip_bgr24() {
2235 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2236 assert_eq!(
2237 VideoDecoderInner::convert_pixel_format(av),
2238 PixelFormat::Bgr24
2239 );
2240 }
2241
2242 #[test]
2243 fn pixel_format_to_av_round_trip_rgba() {
2244 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2245 assert_eq!(
2246 VideoDecoderInner::convert_pixel_format(av),
2247 PixelFormat::Rgba
2248 );
2249 }
2250
2251 #[test]
2252 fn pixel_format_to_av_round_trip_bgra() {
2253 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2254 assert_eq!(
2255 VideoDecoderInner::convert_pixel_format(av),
2256 PixelFormat::Bgra
2257 );
2258 }
2259
2260 #[test]
2261 fn pixel_format_to_av_round_trip_gray8() {
2262 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2263 assert_eq!(
2264 VideoDecoderInner::convert_pixel_format(av),
2265 PixelFormat::Gray8
2266 );
2267 }
2268
2269 #[test]
2270 fn pixel_format_to_av_round_trip_nv12() {
2271 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2272 assert_eq!(
2273 VideoDecoderInner::convert_pixel_format(av),
2274 PixelFormat::Nv12
2275 );
2276 }
2277
2278 #[test]
2279 fn pixel_format_to_av_round_trip_nv21() {
2280 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2281 assert_eq!(
2282 VideoDecoderInner::convert_pixel_format(av),
2283 PixelFormat::Nv21
2284 );
2285 }
2286
2287 #[test]
2288 fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2289 assert_eq!(
2291 VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p10le),
2292 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2293 );
2294 }
2295}