1#![allow(unsafe_code)]
8#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::path::Path;
26use std::ptr;
27use std::sync::Arc;
28use std::time::Duration;
29
30use ff_format::PooledBuffer;
31use ff_format::codec::VideoCodec;
32use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
33use ff_format::time::{Rational, Timestamp};
34use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
35use ff_sys::{
36 AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
37 AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
38 AVPixelFormat, SwsContext,
39};
40
41use crate::HardwareAccel;
42use crate::error::DecodeError;
43use crate::pool::FramePool;
44
45const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
51
52struct AvFormatContextGuard(*mut AVFormatContext);
54
55impl AvFormatContextGuard {
56 unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
62 let format_ctx = unsafe {
64 ff_sys::avformat::open_input(path).map_err(|e| {
65 DecodeError::Ffmpeg(format!(
66 "Failed to open file: {}",
67 ff_sys::av_error_string(e)
68 ))
69 })?
70 };
71 Ok(Self(format_ctx))
72 }
73
74 const fn as_ptr(&self) -> *mut AVFormatContext {
76 self.0
77 }
78
79 fn into_raw(self) -> *mut AVFormatContext {
81 let ptr = self.0;
82 std::mem::forget(self);
83 ptr
84 }
85}
86
87impl Drop for AvFormatContextGuard {
88 fn drop(&mut self) {
89 if !self.0.is_null() {
90 unsafe {
92 ff_sys::avformat::close_input(&mut (self.0 as *mut _));
93 }
94 }
95 }
96}
97
98struct AvCodecContextGuard(*mut AVCodecContext);
100
101impl AvCodecContextGuard {
102 unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
108 let codec_ctx = unsafe {
110 ff_sys::avcodec::alloc_context3(codec).map_err(|e| {
111 DecodeError::Ffmpeg(format!("Failed to allocate codec context: {e}"))
112 })?
113 };
114 Ok(Self(codec_ctx))
115 }
116
117 const fn as_ptr(&self) -> *mut AVCodecContext {
119 self.0
120 }
121
122 fn into_raw(self) -> *mut AVCodecContext {
124 let ptr = self.0;
125 std::mem::forget(self);
126 ptr
127 }
128}
129
130impl Drop for AvCodecContextGuard {
131 fn drop(&mut self) {
132 if !self.0.is_null() {
133 unsafe {
135 ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
136 }
137 }
138 }
139}
140
141struct AvPacketGuard(*mut AVPacket);
143
144impl AvPacketGuard {
145 unsafe fn new() -> Result<Self, DecodeError> {
151 let packet = unsafe { ff_sys::av_packet_alloc() };
153 if packet.is_null() {
154 return Err(DecodeError::Ffmpeg("Failed to allocate packet".to_string()));
155 }
156 Ok(Self(packet))
157 }
158
159 #[allow(dead_code)]
161 const fn as_ptr(&self) -> *mut AVPacket {
162 self.0
163 }
164
165 fn into_raw(self) -> *mut AVPacket {
167 let ptr = self.0;
168 std::mem::forget(self);
169 ptr
170 }
171}
172
173impl Drop for AvPacketGuard {
174 fn drop(&mut self) {
175 if !self.0.is_null() {
176 unsafe {
178 ff_sys::av_packet_free(&mut (self.0 as *mut _));
179 }
180 }
181 }
182}
183
184struct AvFrameGuard(*mut AVFrame);
186
187impl AvFrameGuard {
188 unsafe fn new() -> Result<Self, DecodeError> {
194 let frame = unsafe { ff_sys::av_frame_alloc() };
196 if frame.is_null() {
197 return Err(DecodeError::Ffmpeg("Failed to allocate frame".to_string()));
198 }
199 Ok(Self(frame))
200 }
201
202 const fn as_ptr(&self) -> *mut AVFrame {
204 self.0
205 }
206
207 fn into_raw(self) -> *mut AVFrame {
209 let ptr = self.0;
210 std::mem::forget(self);
211 ptr
212 }
213}
214
215impl Drop for AvFrameGuard {
216 fn drop(&mut self) {
217 if !self.0.is_null() {
218 unsafe {
220 ff_sys::av_frame_free(&mut (self.0 as *mut _));
221 }
222 }
223 }
224}
225
226pub(crate) struct VideoDecoderInner {
231 format_ctx: *mut AVFormatContext,
233 codec_ctx: *mut AVCodecContext,
235 stream_index: i32,
237 sws_ctx: Option<*mut SwsContext>,
239 output_format: Option<PixelFormat>,
241 eof: bool,
243 position: Duration,
245 packet: *mut AVPacket,
247 frame: *mut AVFrame,
249 thumbnail_sws_ctx: Option<*mut SwsContext>,
251 thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
253 hw_device_ctx: Option<*mut AVBufferRef>,
255 active_hw_accel: HardwareAccel,
257 frame_pool: Option<Arc<dyn FramePool>>,
259}
260
261impl VideoDecoderInner {
262 fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
266 match accel {
267 HardwareAccel::Auto => None,
268 HardwareAccel::None => None,
269 HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
270 HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
271 HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), HardwareAccel::VideoToolbox => {
273 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
274 }
275 HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
276 }
277 }
278
279 const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
281 &[
283 HardwareAccel::Nvdec,
284 HardwareAccel::Qsv,
285 HardwareAccel::VideoToolbox,
286 HardwareAccel::Vaapi,
287 HardwareAccel::Amf,
288 ]
289 }
290
291 unsafe fn init_hardware_accel(
307 codec_ctx: *mut AVCodecContext,
308 accel: HardwareAccel,
309 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
310 match accel {
311 HardwareAccel::Auto => {
312 for &hw_type in Self::hw_accel_auto_priority() {
314 if let Ok((Some(ctx), active)) =
316 unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
317 {
318 return Ok((Some(ctx), active));
319 }
320 }
322 Ok((None, HardwareAccel::None))
324 }
325 HardwareAccel::None => {
326 Ok((None, HardwareAccel::None))
328 }
329 _ => {
330 unsafe { Self::try_init_hw_device(codec_ctx, accel) }
333 }
334 }
335 }
336
337 unsafe fn try_init_hw_device(
343 codec_ctx: *mut AVCodecContext,
344 accel: HardwareAccel,
345 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
346 let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
348 return Ok((None, HardwareAccel::None));
349 };
350
351 let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
354 let ret = unsafe {
355 ff_sys::av_hwdevice_ctx_create(
356 ptr::addr_of_mut!(hw_device_ctx),
357 device_type,
358 ptr::null(), ptr::null_mut(), 0, )
362 };
363
364 if ret < 0 {
365 return Err(DecodeError::HwAccelUnavailable { accel });
367 }
368
369 unsafe {
373 (*codec_ctx).hw_device_ctx = hw_device_ctx;
374 }
375
376 let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
379 if our_ref.is_null() {
380 return Err(DecodeError::HwAccelUnavailable { accel });
383 }
384
385 Ok((Some(our_ref), accel))
386 }
387
388 pub(crate) fn hardware_accel(&self) -> HardwareAccel {
390 self.active_hw_accel
391 }
392
393 const fn is_hardware_format(format: AVPixelFormat) -> bool {
397 matches!(
398 format,
399 ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
400 | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
401 | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
402 | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
403 | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
404 | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
405 | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
406 | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
407 | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
408 | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
409 )
410 }
411
412 unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
421 let frame_format = unsafe { (*self.frame).format };
423
424 if !Self::is_hardware_format(frame_format) {
425 return Ok(());
427 }
428
429 let sw_frame = unsafe { ff_sys::av_frame_alloc() };
432 if sw_frame.is_null() {
433 return Err(DecodeError::Ffmpeg(
434 "Failed to allocate software frame for hardware transfer".to_string(),
435 ));
436 }
437
438 let ret = unsafe {
441 ff_sys::av_hwframe_transfer_data(
442 sw_frame, self.frame, 0, )
444 };
445
446 if ret < 0 {
447 unsafe {
449 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
450 }
451 return Err(DecodeError::Ffmpeg(format!(
452 "Failed to transfer hardware frame to CPU memory: {}",
453 ff_sys::av_error_string(ret)
454 )));
455 }
456
457 unsafe {
460 (*sw_frame).pts = (*self.frame).pts;
461 (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
462 (*sw_frame).duration = (*self.frame).duration;
463 (*sw_frame).time_base = (*self.frame).time_base;
464 }
465
466 unsafe {
469 ff_sys::av_frame_unref(self.frame);
470 ff_sys::av_frame_move_ref(self.frame, sw_frame);
471 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
472 }
473
474 Ok(())
475 }
476
477 pub(crate) fn new(
494 path: &Path,
495 output_format: Option<PixelFormat>,
496 hardware_accel: HardwareAccel,
497 thread_count: usize,
498 frame_pool: Option<Arc<dyn FramePool>>,
499 ) -> Result<(Self, VideoStreamInfo), DecodeError> {
500 ff_sys::ensure_initialized();
502
503 let format_ctx_guard = unsafe { AvFormatContextGuard::new(path)? };
506 let format_ctx = format_ctx_guard.as_ptr();
507
508 unsafe {
511 ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| {
512 DecodeError::Ffmpeg(format!(
513 "Failed to find stream info: {}",
514 ff_sys::av_error_string(e)
515 ))
516 })?;
517 }
518
519 let (stream_index, codec_id) =
522 unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
523 DecodeError::NoVideoStream {
524 path: path.to_path_buf(),
525 }
526 })?;
527
528 let codec = unsafe {
531 ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
532 DecodeError::UnsupportedCodec {
533 codec: format!("codec_id={codec_id:?}"),
534 }
535 })?
536 };
537
538 let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
541 let codec_ctx = codec_ctx_guard.as_ptr();
542
543 unsafe {
546 let stream = (*format_ctx).streams.add(stream_index as usize);
547 let codecpar = (*(*stream)).codecpar;
548 ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
549 DecodeError::Ffmpeg(format!(
550 "Failed to copy codec parameters: {}",
551 ff_sys::av_error_string(e)
552 ))
553 })?;
554
555 if thread_count > 0 {
557 (*codec_ctx).thread_count = thread_count as i32;
558 }
559 }
560
561 let (hw_device_ctx, active_hw_accel) =
564 unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
565
566 unsafe {
569 ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
570 if let Some(hw_ctx) = hw_device_ctx {
575 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
576 }
577 DecodeError::Ffmpeg(format!(
578 "Failed to open codec: {}",
579 ff_sys::av_error_string(e)
580 ))
581 })?;
582 }
583
584 let stream_info =
587 unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
588
589 let packet_guard = unsafe { AvPacketGuard::new()? };
592 let frame_guard = unsafe { AvFrameGuard::new()? };
593
594 Ok((
596 Self {
597 format_ctx: format_ctx_guard.into_raw(),
598 codec_ctx: codec_ctx_guard.into_raw(),
599 stream_index: stream_index as i32,
600 sws_ctx: None,
601 output_format,
602 eof: false,
603 position: Duration::ZERO,
604 packet: packet_guard.into_raw(),
605 frame: frame_guard.into_raw(),
606 thumbnail_sws_ctx: None,
607 thumbnail_cache_key: None,
608 hw_device_ctx,
609 active_hw_accel,
610 frame_pool,
611 },
612 stream_info,
613 ))
614 }
615
616 unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
626 unsafe {
628 let nb_streams = (*format_ctx).nb_streams as usize;
629
630 for i in 0..nb_streams {
631 let stream = (*format_ctx).streams.add(i);
632 let codecpar = (*(*stream)).codecpar;
633
634 if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
635 return Some((i, (*codecpar).codec_id));
636 }
637 }
638
639 None
640 }
641 }
642
643 unsafe fn extract_stream_info(
645 format_ctx: *mut AVFormatContext,
646 stream_index: i32,
647 codec_ctx: *mut AVCodecContext,
648 ) -> Result<VideoStreamInfo, DecodeError> {
649 let (
651 width,
652 height,
653 fps_rational,
654 duration_val,
655 pix_fmt,
656 color_space_val,
657 color_range_val,
658 color_primaries_val,
659 codec_id,
660 ) = unsafe {
661 let stream = (*format_ctx).streams.add(stream_index as usize);
662 let codecpar = (*(*stream)).codecpar;
663
664 (
665 (*codecpar).width as u32,
666 (*codecpar).height as u32,
667 (*(*stream)).avg_frame_rate,
668 (*format_ctx).duration,
669 (*codec_ctx).pix_fmt,
670 (*codecpar).color_space,
671 (*codecpar).color_range,
672 (*codecpar).color_primaries,
673 (*codecpar).codec_id,
674 )
675 };
676
677 let frame_rate = if fps_rational.den != 0 {
679 Rational::new(fps_rational.num as i32, fps_rational.den as i32)
680 } else {
681 log::warn!(
682 "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
683 fps_rational.num
684 );
685 Rational::new(30, 1)
686 };
687
688 let duration = if duration_val > 0 {
690 let duration_secs = duration_val as f64 / 1_000_000.0;
691 Some(Duration::from_secs_f64(duration_secs))
692 } else {
693 None
694 };
695
696 let pixel_format = Self::convert_pixel_format(pix_fmt);
698
699 let color_space = Self::convert_color_space(color_space_val);
701 let color_range = Self::convert_color_range(color_range_val);
702 let color_primaries = Self::convert_color_primaries(color_primaries_val);
703
704 let codec = Self::convert_codec(codec_id);
706
707 let mut builder = VideoStreamInfo::builder()
709 .index(stream_index as u32)
710 .codec(codec)
711 .width(width)
712 .height(height)
713 .frame_rate(frame_rate)
714 .pixel_format(pixel_format)
715 .color_space(color_space)
716 .color_range(color_range)
717 .color_primaries(color_primaries);
718
719 if let Some(d) = duration {
720 builder = builder.duration(d);
721 }
722
723 Ok(builder.build())
724 }
725
726 fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
728 if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
729 PixelFormat::Yuv420p
730 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
731 PixelFormat::Yuv422p
732 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
733 PixelFormat::Yuv444p
734 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
735 PixelFormat::Rgb24
736 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
737 PixelFormat::Bgr24
738 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
739 PixelFormat::Rgba
740 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
741 PixelFormat::Bgra
742 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
743 PixelFormat::Gray8
744 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
745 PixelFormat::Nv12
746 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
747 PixelFormat::Nv21
748 } else {
749 log::warn!(
750 "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
751 );
752 PixelFormat::Yuv420p
753 }
754 }
755
756 fn convert_color_space(space: AVColorSpace) -> ColorSpace {
758 if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
759 ColorSpace::Bt709
760 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
761 || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
762 {
763 ColorSpace::Bt601
764 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
765 ColorSpace::Bt2020
766 } else {
767 log::warn!(
768 "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
769 );
770 ColorSpace::Bt709
771 }
772 }
773
774 fn convert_color_range(range: AVColorRange) -> ColorRange {
776 if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
777 ColorRange::Full
778 } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
779 ColorRange::Limited
780 } else {
781 log::warn!(
782 "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
783 );
784 ColorRange::Limited
785 }
786 }
787
788 fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
790 if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
791 ColorPrimaries::Bt709
792 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
793 || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
794 {
795 ColorPrimaries::Bt601
796 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
797 ColorPrimaries::Bt2020
798 } else {
799 log::warn!(
800 "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
801 );
802 ColorPrimaries::Bt709
803 }
804 }
805
806 fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
808 if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
809 VideoCodec::H264
810 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
811 VideoCodec::H265
812 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
813 VideoCodec::Vp8
814 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
815 VideoCodec::Vp9
816 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
817 VideoCodec::Av1
818 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
819 VideoCodec::Mpeg4
820 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
821 VideoCodec::ProRes
822 } else {
823 log::warn!(
824 "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
825 );
826 VideoCodec::H264
827 }
828 }
829
830 pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
838 if self.eof {
839 return Ok(None);
840 }
841
842 unsafe {
843 loop {
844 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
846
847 if ret == 0 {
848 self.transfer_hardware_frame_if_needed()?;
851
852 let video_frame = self.convert_frame_to_video_frame()?;
853
854 let pts = (*self.frame).pts;
856 if pts != ff_sys::AV_NOPTS_VALUE {
857 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
858 let time_base = (*(*stream)).time_base;
859 let timestamp_secs =
860 pts as f64 * time_base.num as f64 / time_base.den as f64;
861 self.position = Duration::from_secs_f64(timestamp_secs);
862 }
863
864 return Ok(Some(video_frame));
865 } else if ret == ff_sys::error_codes::EAGAIN {
866 let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
869
870 if read_ret == ff_sys::error_codes::EOF {
871 ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
873 self.eof = true;
874 continue;
875 } else if read_ret < 0 {
876 return Err(DecodeError::Ffmpeg(format!(
877 "Failed to read frame: {}",
878 ff_sys::av_error_string(read_ret)
879 )));
880 }
881
882 if (*self.packet).stream_index == self.stream_index {
884 let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
886 ff_sys::av_packet_unref(self.packet);
887
888 if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
889 return Err(DecodeError::Ffmpeg(format!(
890 "Failed to send packet: {}",
891 ff_sys::av_error_string(send_ret)
892 )));
893 }
894 } else {
895 ff_sys::av_packet_unref(self.packet);
897 }
898 } else if ret == ff_sys::error_codes::EOF {
899 self.eof = true;
901 return Ok(None);
902 } else {
903 return Err(DecodeError::DecodingFailed {
904 timestamp: Some(self.position),
905 reason: ff_sys::av_error_string(ret),
906 });
907 }
908 }
909 }
910 }
911
912 unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
914 unsafe {
916 let width = (*self.frame).width as u32;
917 let height = (*self.frame).height as u32;
918 let src_format = (*self.frame).format;
919
920 let dst_format = if let Some(fmt) = self.output_format {
922 Self::pixel_format_to_av(fmt)
923 } else {
924 src_format
925 };
926
927 let needs_conversion = src_format != dst_format;
929
930 if needs_conversion {
931 self.convert_with_sws(width, height, src_format, dst_format)
932 } else {
933 self.av_frame_to_video_frame(self.frame)
934 }
935 }
936 }
937
938 unsafe fn convert_with_sws(
940 &mut self,
941 width: u32,
942 height: u32,
943 src_format: i32,
944 dst_format: i32,
945 ) -> Result<VideoFrame, DecodeError> {
946 unsafe {
948 if self.sws_ctx.is_none() {
950 let ctx = ff_sys::swscale::get_context(
951 width as i32,
952 height as i32,
953 src_format,
954 width as i32,
955 height as i32,
956 dst_format,
957 ff_sys::swscale::scale_flags::BILINEAR,
958 )
959 .map_err(|e| DecodeError::Ffmpeg(format!("Failed to create sws context: {e}")))?;
960
961 self.sws_ctx = Some(ctx);
962 }
963
964 let Some(sws_ctx) = self.sws_ctx else {
965 return Err(DecodeError::Ffmpeg(
966 "SwsContext not initialized".to_string(),
967 ));
968 };
969
970 let dst_frame_guard = AvFrameGuard::new()?;
972 let dst_frame = dst_frame_guard.as_ptr();
973
974 (*dst_frame).width = width as i32;
975 (*dst_frame).height = height as i32;
976 (*dst_frame).format = dst_format;
977
978 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
980 if buffer_ret < 0 {
981 return Err(DecodeError::Ffmpeg(format!(
982 "Failed to allocate frame buffer: {}",
983 ff_sys::av_error_string(buffer_ret)
984 )));
985 }
986
987 ff_sys::swscale::scale(
989 sws_ctx,
990 (*self.frame).data.as_ptr() as *const *const u8,
991 (*self.frame).linesize.as_ptr(),
992 0,
993 height as i32,
994 (*dst_frame).data.as_ptr() as *const *mut u8,
995 (*dst_frame).linesize.as_ptr(),
996 )
997 .map_err(|e| DecodeError::Ffmpeg(format!("Failed to scale frame: {e}")))?;
998
999 (*dst_frame).pts = (*self.frame).pts;
1001
1002 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1004
1005 Ok(video_frame)
1008 }
1009 }
1010
1011 unsafe fn av_frame_to_video_frame(
1013 &self,
1014 frame: *const AVFrame,
1015 ) -> Result<VideoFrame, DecodeError> {
1016 unsafe {
1018 let width = (*frame).width as u32;
1019 let height = (*frame).height as u32;
1020 let format = Self::convert_pixel_format((*frame).format);
1021
1022 let pts = (*frame).pts;
1024 let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1025 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1026 let time_base = (*(*stream)).time_base;
1027 Timestamp::new(
1028 pts as i64,
1029 Rational::new(time_base.num as i32, time_base.den as i32),
1030 )
1031 } else {
1032 Timestamp::default()
1033 };
1034
1035 let (planes, strides) =
1037 self.extract_planes_and_strides(frame, width, height, format)?;
1038
1039 VideoFrame::new(planes, strides, width, height, format, timestamp, false)
1040 .map_err(|e| DecodeError::Ffmpeg(format!("Failed to create VideoFrame: {e}")))
1041 }
1042 }
1043
1044 fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1054 if let Some(ref pool) = self.frame_pool
1055 && let Some(pooled_buffer) = pool.acquire(size)
1056 {
1057 return pooled_buffer;
1060 }
1061
1062 PooledBuffer::standalone(vec![0u8; size])
1064 }
1065
1066 unsafe fn extract_planes_and_strides(
1068 &self,
1069 frame: *const AVFrame,
1070 width: u32,
1071 height: u32,
1072 format: PixelFormat,
1073 ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1074 const BYTES_PER_PIXEL_RGBA: usize = 4;
1076 const BYTES_PER_PIXEL_RGB24: usize = 3;
1077
1078 unsafe {
1080 let mut planes = Vec::new();
1081 let mut strides = Vec::new();
1082
1083 #[allow(clippy::match_same_arms)]
1084 match format {
1085 PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1086 let stride = (*frame).linesize[0] as usize;
1088 let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1089 {
1090 BYTES_PER_PIXEL_RGBA
1091 } else {
1092 BYTES_PER_PIXEL_RGB24
1093 };
1094 let row_size = (width as usize) * bytes_per_pixel;
1095 let buffer_size = row_size * height as usize;
1096 let mut plane_data = self.allocate_buffer(buffer_size);
1097
1098 for y in 0..height as usize {
1099 let src_offset = y * stride;
1100 let dst_offset = y * row_size;
1101 let src_ptr = (*frame).data[0].add(src_offset);
1102 let plane_slice = plane_data.as_mut();
1103 std::ptr::copy_nonoverlapping(
1107 src_ptr,
1108 plane_slice[dst_offset..].as_mut_ptr(),
1109 row_size,
1110 );
1111 }
1112
1113 planes.push(plane_data);
1114 strides.push(row_size);
1115 }
1116 PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1117 let (chroma_width, chroma_height) = match format {
1119 PixelFormat::Yuv420p => (width / 2, height / 2),
1120 PixelFormat::Yuv422p => (width / 2, height),
1121 PixelFormat::Yuv444p => (width, height),
1122 _ => unreachable!(),
1123 };
1124
1125 let y_stride = width as usize;
1127 let y_size = y_stride * height as usize;
1128 let mut y_data = self.allocate_buffer(y_size);
1129 for y in 0..height as usize {
1130 let src_offset = y * (*frame).linesize[0] as usize;
1131 let dst_offset = y * y_stride;
1132 let src_ptr = (*frame).data[0].add(src_offset);
1133 let y_slice = y_data.as_mut();
1134 std::ptr::copy_nonoverlapping(
1137 src_ptr,
1138 y_slice[dst_offset..].as_mut_ptr(),
1139 width as usize,
1140 );
1141 }
1142 planes.push(y_data);
1143 strides.push(y_stride);
1144
1145 let u_stride = chroma_width as usize;
1147 let u_size = u_stride * chroma_height as usize;
1148 let mut u_data = self.allocate_buffer(u_size);
1149 for y in 0..chroma_height as usize {
1150 let src_offset = y * (*frame).linesize[1] as usize;
1151 let dst_offset = y * u_stride;
1152 let src_ptr = (*frame).data[1].add(src_offset);
1153 let u_slice = u_data.as_mut();
1154 std::ptr::copy_nonoverlapping(
1157 src_ptr,
1158 u_slice[dst_offset..].as_mut_ptr(),
1159 chroma_width as usize,
1160 );
1161 }
1162 planes.push(u_data);
1163 strides.push(u_stride);
1164
1165 let v_stride = chroma_width as usize;
1167 let v_size = v_stride * chroma_height as usize;
1168 let mut v_data = self.allocate_buffer(v_size);
1169 for y in 0..chroma_height as usize {
1170 let src_offset = y * (*frame).linesize[2] as usize;
1171 let dst_offset = y * v_stride;
1172 let src_ptr = (*frame).data[2].add(src_offset);
1173 let v_slice = v_data.as_mut();
1174 std::ptr::copy_nonoverlapping(
1177 src_ptr,
1178 v_slice[dst_offset..].as_mut_ptr(),
1179 chroma_width as usize,
1180 );
1181 }
1182 planes.push(v_data);
1183 strides.push(v_stride);
1184 }
1185 PixelFormat::Gray8 => {
1186 let stride = width as usize;
1188 let mut plane_data = self.allocate_buffer(stride * height as usize);
1189
1190 for y in 0..height as usize {
1191 let src_offset = y * (*frame).linesize[0] as usize;
1192 let dst_offset = y * stride;
1193 let src_ptr = (*frame).data[0].add(src_offset);
1194 let plane_slice = plane_data.as_mut();
1195 std::ptr::copy_nonoverlapping(
1198 src_ptr,
1199 plane_slice[dst_offset..].as_mut_ptr(),
1200 width as usize,
1201 );
1202 }
1203
1204 planes.push(plane_data);
1205 strides.push(stride);
1206 }
1207 PixelFormat::Nv12 | PixelFormat::Nv21 => {
1208 let uv_height = height / 2;
1210
1211 let y_stride = width as usize;
1213 let mut y_data = self.allocate_buffer(y_stride * height as usize);
1214 for y in 0..height as usize {
1215 let src_offset = y * (*frame).linesize[0] as usize;
1216 let dst_offset = y * y_stride;
1217 let src_ptr = (*frame).data[0].add(src_offset);
1218 let y_slice = y_data.as_mut();
1219 std::ptr::copy_nonoverlapping(
1222 src_ptr,
1223 y_slice[dst_offset..].as_mut_ptr(),
1224 width as usize,
1225 );
1226 }
1227 planes.push(y_data);
1228 strides.push(y_stride);
1229
1230 let uv_stride = width as usize;
1232 let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1233 for y in 0..uv_height as usize {
1234 let src_offset = y * (*frame).linesize[1] as usize;
1235 let dst_offset = y * uv_stride;
1236 let src_ptr = (*frame).data[1].add(src_offset);
1237 let uv_slice = uv_data.as_mut();
1238 std::ptr::copy_nonoverlapping(
1241 src_ptr,
1242 uv_slice[dst_offset..].as_mut_ptr(),
1243 width as usize,
1244 );
1245 }
1246 planes.push(uv_data);
1247 strides.push(uv_stride);
1248 }
1249 _ => {
1250 return Err(DecodeError::Ffmpeg(format!(
1251 "Unsupported pixel format: {format:?}"
1252 )));
1253 }
1254 }
1255
1256 Ok((planes, strides))
1257 }
1258 }
1259
1260 fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1262 match format {
1263 PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1264 PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1265 PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1266 PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1267 PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1268 PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1269 PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1270 PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1271 PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1272 PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1273 _ => {
1274 log::warn!(
1275 "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1276 );
1277 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1278 }
1279 }
1280 }
1281
1282 pub(crate) fn position(&self) -> Duration {
1284 self.position
1285 }
1286
1287 pub(crate) fn is_eof(&self) -> bool {
1289 self.eof
1290 }
1291
1292 fn duration_to_pts(&self, duration: Duration) -> i64 {
1306 let time_base = unsafe {
1312 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1313 (*(*stream)).time_base
1314 };
1315
1316 let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1318 (duration.as_secs_f64() * time_base_f64) as i64
1319 }
1320
1321 #[allow(dead_code)]
1339 fn pts_to_duration(&self, pts: i64) -> Duration {
1340 unsafe {
1342 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1343 let time_base = (*(*stream)).time_base;
1344
1345 let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1347 Duration::from_secs_f64(duration_secs)
1348 }
1349 }
1350
1351 pub(crate) fn seek(
1375 &mut self,
1376 position: Duration,
1377 mode: crate::SeekMode,
1378 ) -> Result<(), DecodeError> {
1379 use crate::SeekMode;
1380
1381 let timestamp = self.duration_to_pts(position);
1382
1383 let flags = ff_sys::avformat::seek_flags::BACKWARD;
1386
1387 unsafe {
1392 ff_sys::av_packet_unref(self.packet);
1393 ff_sys::av_frame_unref(self.frame);
1394 }
1395
1396 unsafe {
1403 ff_sys::avformat::seek_frame(
1404 self.format_ctx,
1405 self.stream_index as i32,
1406 timestamp,
1407 flags,
1408 )
1409 .map_err(|e| DecodeError::SeekFailed {
1410 target: position,
1411 reason: ff_sys::av_error_string(e),
1412 })?;
1413 }
1414
1415 unsafe {
1418 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1419 }
1420
1421 unsafe {
1427 loop {
1428 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1429 if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1430 break;
1432 } else if ret == 0 {
1433 ff_sys::av_frame_unref(self.frame);
1435 } else {
1436 break;
1438 }
1439 }
1440 }
1441
1442 self.eof = false;
1444 if mode == SeekMode::Exact {
1462 self.skip_to_exact(position)?;
1464 } else {
1465 let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1468 let min_position = position.saturating_sub(tolerance);
1469
1470 while let Some(frame) = self.decode_one()? {
1471 let frame_time = frame.timestamp().as_duration();
1472 if frame_time >= min_position {
1473 break;
1475 }
1476 }
1478 }
1479
1480 Ok(())
1481 }
1482
1483 fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1504 loop {
1505 match self.decode_one()? {
1506 Some(frame) => {
1507 let frame_time = frame.timestamp().as_duration();
1508 if frame_time >= target {
1509 break;
1512 }
1513 }
1515 None => {
1516 return Err(DecodeError::SeekFailed {
1518 target,
1519 reason: "Reached end of stream before target position".to_string(),
1520 });
1521 }
1522 }
1523 }
1524 Ok(())
1525 }
1526
1527 pub(crate) fn flush(&mut self) {
1532 unsafe {
1534 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1535 }
1536 self.eof = false;
1537 }
1538
1539 pub(crate) fn scale_frame(
1578 &mut self,
1579 frame: &VideoFrame,
1580 target_width: u32,
1581 target_height: u32,
1582 ) -> Result<VideoFrame, DecodeError> {
1583 let src_width = frame.width();
1584 let src_height = frame.height();
1585 let src_format = frame.format();
1586
1587 let src_aspect = src_width as f64 / src_height as f64;
1589 let target_aspect = target_width as f64 / target_height as f64;
1590
1591 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1592 let height = (target_width as f64 / src_aspect).round() as u32;
1594 (target_width, height)
1595 } else {
1596 let width = (target_height as f64 * src_aspect).round() as u32;
1598 (width, target_height)
1599 };
1600
1601 let av_format = Self::pixel_format_to_av(src_format);
1603
1604 let cache_key = (
1606 src_width,
1607 src_height,
1608 scaled_width,
1609 scaled_height,
1610 av_format,
1611 );
1612
1613 unsafe {
1615 let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1617 (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1618 {
1619 if cached_key == cache_key {
1620 (cached_ctx, true)
1622 } else {
1623 ff_sys::swscale::free_context(cached_ctx);
1625 self.thumbnail_sws_ctx = None;
1627 self.thumbnail_cache_key = None;
1628
1629 let new_ctx = ff_sys::swscale::get_context(
1630 src_width as i32,
1631 src_height as i32,
1632 av_format,
1633 scaled_width as i32,
1634 scaled_height as i32,
1635 av_format,
1636 ff_sys::swscale::scale_flags::BILINEAR,
1637 )
1638 .map_err(|e| {
1639 DecodeError::Ffmpeg(format!("Failed to create scaling context: {e}"))
1640 })?;
1641
1642 (new_ctx, false)
1644 }
1645 } else {
1646 let new_ctx = ff_sys::swscale::get_context(
1648 src_width as i32,
1649 src_height as i32,
1650 av_format,
1651 scaled_width as i32,
1652 scaled_height as i32,
1653 av_format,
1654 ff_sys::swscale::scale_flags::BILINEAR,
1655 )
1656 .map_err(|e| {
1657 DecodeError::Ffmpeg(format!("Failed to create scaling context: {e}"))
1658 })?;
1659
1660 (new_ctx, false)
1662 };
1663
1664 let src_frame_guard = AvFrameGuard::new()?;
1666 let src_frame = src_frame_guard.as_ptr();
1667
1668 (*src_frame).width = src_width as i32;
1669 (*src_frame).height = src_height as i32;
1670 (*src_frame).format = av_format;
1671
1672 let planes = frame.planes();
1674 let strides = frame.strides();
1675
1676 for (i, plane_data) in planes.iter().enumerate() {
1677 if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1678 break;
1679 }
1680 (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1681 (*src_frame).linesize[i] = strides[i] as i32;
1682 }
1683
1684 let dst_frame_guard = AvFrameGuard::new()?;
1686 let dst_frame = dst_frame_guard.as_ptr();
1687
1688 (*dst_frame).width = scaled_width as i32;
1689 (*dst_frame).height = scaled_height as i32;
1690 (*dst_frame).format = av_format;
1691
1692 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1694 if buffer_ret < 0 {
1695 if !is_cached {
1697 ff_sys::swscale::free_context(sws_ctx);
1698 }
1699 return Err(DecodeError::Ffmpeg(format!(
1700 "Failed to allocate destination frame buffer: {}",
1701 ff_sys::av_error_string(buffer_ret)
1702 )));
1703 }
1704
1705 let scale_result = ff_sys::swscale::scale(
1707 sws_ctx,
1708 (*src_frame).data.as_ptr() as *const *const u8,
1709 (*src_frame).linesize.as_ptr(),
1710 0,
1711 src_height as i32,
1712 (*dst_frame).data.as_ptr() as *const *mut u8,
1713 (*dst_frame).linesize.as_ptr(),
1714 );
1715
1716 if let Err(e) = scale_result {
1717 if !is_cached {
1719 ff_sys::swscale::free_context(sws_ctx);
1720 }
1721 return Err(DecodeError::Ffmpeg(format!("Failed to scale frame: {e}")));
1722 }
1723
1724 if !is_cached {
1726 self.thumbnail_sws_ctx = Some(sws_ctx);
1727 self.thumbnail_cache_key = Some(cache_key);
1728 }
1729
1730 (*dst_frame).pts = frame.timestamp().pts();
1732
1733 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1735
1736 Ok(video_frame)
1737 }
1738 }
1739}
1740
1741impl Drop for VideoDecoderInner {
1742 fn drop(&mut self) {
1743 if let Some(sws_ctx) = self.sws_ctx {
1745 unsafe {
1747 ff_sys::swscale::free_context(sws_ctx);
1748 }
1749 }
1750
1751 if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1753 unsafe {
1755 ff_sys::swscale::free_context(thumbnail_ctx);
1756 }
1757 }
1758
1759 if let Some(hw_ctx) = self.hw_device_ctx {
1761 unsafe {
1763 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
1764 }
1765 }
1766
1767 if !self.frame.is_null() {
1769 unsafe {
1771 ff_sys::av_frame_free(&mut (self.frame as *mut _));
1772 }
1773 }
1774
1775 if !self.packet.is_null() {
1776 unsafe {
1778 ff_sys::av_packet_free(&mut (self.packet as *mut _));
1779 }
1780 }
1781
1782 if !self.codec_ctx.is_null() {
1784 unsafe {
1786 ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
1787 }
1788 }
1789
1790 if !self.format_ctx.is_null() {
1792 unsafe {
1794 ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
1795 }
1796 }
1797 }
1798}
1799
1800unsafe impl Send for VideoDecoderInner {}
1803
1804#[cfg(test)]
1805mod tests {
1806 use ff_format::PixelFormat;
1807 use ff_format::codec::VideoCodec;
1808 use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
1809
1810 use crate::HardwareAccel;
1811
1812 use super::VideoDecoderInner;
1813
1814 #[test]
1819 fn pixel_format_yuv420p() {
1820 assert_eq!(
1821 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
1822 PixelFormat::Yuv420p
1823 );
1824 }
1825
1826 #[test]
1827 fn pixel_format_yuv422p() {
1828 assert_eq!(
1829 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
1830 PixelFormat::Yuv422p
1831 );
1832 }
1833
1834 #[test]
1835 fn pixel_format_yuv444p() {
1836 assert_eq!(
1837 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
1838 PixelFormat::Yuv444p
1839 );
1840 }
1841
1842 #[test]
1843 fn pixel_format_rgb24() {
1844 assert_eq!(
1845 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
1846 PixelFormat::Rgb24
1847 );
1848 }
1849
1850 #[test]
1851 fn pixel_format_bgr24() {
1852 assert_eq!(
1853 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
1854 PixelFormat::Bgr24
1855 );
1856 }
1857
1858 #[test]
1859 fn pixel_format_rgba() {
1860 assert_eq!(
1861 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
1862 PixelFormat::Rgba
1863 );
1864 }
1865
1866 #[test]
1867 fn pixel_format_bgra() {
1868 assert_eq!(
1869 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
1870 PixelFormat::Bgra
1871 );
1872 }
1873
1874 #[test]
1875 fn pixel_format_gray8() {
1876 assert_eq!(
1877 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
1878 PixelFormat::Gray8
1879 );
1880 }
1881
1882 #[test]
1883 fn pixel_format_nv12() {
1884 assert_eq!(
1885 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
1886 PixelFormat::Nv12
1887 );
1888 }
1889
1890 #[test]
1891 fn pixel_format_nv21() {
1892 assert_eq!(
1893 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
1894 PixelFormat::Nv21
1895 );
1896 }
1897
1898 #[test]
1899 fn pixel_format_unknown_falls_back_to_yuv420p() {
1900 assert_eq!(
1901 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
1902 PixelFormat::Yuv420p
1903 );
1904 }
1905
1906 #[test]
1911 fn color_space_bt709() {
1912 assert_eq!(
1913 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
1914 ColorSpace::Bt709
1915 );
1916 }
1917
1918 #[test]
1919 fn color_space_bt470bg_yields_bt601() {
1920 assert_eq!(
1921 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
1922 ColorSpace::Bt601
1923 );
1924 }
1925
1926 #[test]
1927 fn color_space_smpte170m_yields_bt601() {
1928 assert_eq!(
1929 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
1930 ColorSpace::Bt601
1931 );
1932 }
1933
1934 #[test]
1935 fn color_space_bt2020_ncl() {
1936 assert_eq!(
1937 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
1938 ColorSpace::Bt2020
1939 );
1940 }
1941
1942 #[test]
1943 fn color_space_unknown_falls_back_to_bt709() {
1944 assert_eq!(
1945 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
1946 ColorSpace::Bt709
1947 );
1948 }
1949
1950 #[test]
1955 fn color_range_jpeg_yields_full() {
1956 assert_eq!(
1957 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
1958 ColorRange::Full
1959 );
1960 }
1961
1962 #[test]
1963 fn color_range_mpeg_yields_limited() {
1964 assert_eq!(
1965 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
1966 ColorRange::Limited
1967 );
1968 }
1969
1970 #[test]
1971 fn color_range_unknown_falls_back_to_limited() {
1972 assert_eq!(
1973 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
1974 ColorRange::Limited
1975 );
1976 }
1977
1978 #[test]
1983 fn color_primaries_bt709() {
1984 assert_eq!(
1985 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
1986 ColorPrimaries::Bt709
1987 );
1988 }
1989
1990 #[test]
1991 fn color_primaries_bt470bg_yields_bt601() {
1992 assert_eq!(
1993 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
1994 ColorPrimaries::Bt601
1995 );
1996 }
1997
1998 #[test]
1999 fn color_primaries_smpte170m_yields_bt601() {
2000 assert_eq!(
2001 VideoDecoderInner::convert_color_primaries(
2002 ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2003 ),
2004 ColorPrimaries::Bt601
2005 );
2006 }
2007
2008 #[test]
2009 fn color_primaries_bt2020() {
2010 assert_eq!(
2011 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2012 ColorPrimaries::Bt2020
2013 );
2014 }
2015
2016 #[test]
2017 fn color_primaries_unknown_falls_back_to_bt709() {
2018 assert_eq!(
2019 VideoDecoderInner::convert_color_primaries(
2020 ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2021 ),
2022 ColorPrimaries::Bt709
2023 );
2024 }
2025
2026 #[test]
2031 fn codec_h264() {
2032 assert_eq!(
2033 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2034 VideoCodec::H264
2035 );
2036 }
2037
2038 #[test]
2039 fn codec_hevc_yields_h265() {
2040 assert_eq!(
2041 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2042 VideoCodec::H265
2043 );
2044 }
2045
2046 #[test]
2047 fn codec_vp8() {
2048 assert_eq!(
2049 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2050 VideoCodec::Vp8
2051 );
2052 }
2053
2054 #[test]
2055 fn codec_vp9() {
2056 assert_eq!(
2057 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2058 VideoCodec::Vp9
2059 );
2060 }
2061
2062 #[test]
2063 fn codec_av1() {
2064 assert_eq!(
2065 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2066 VideoCodec::Av1
2067 );
2068 }
2069
2070 #[test]
2071 fn codec_mpeg4() {
2072 assert_eq!(
2073 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2074 VideoCodec::Mpeg4
2075 );
2076 }
2077
2078 #[test]
2079 fn codec_prores() {
2080 assert_eq!(
2081 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2082 VideoCodec::ProRes
2083 );
2084 }
2085
2086 #[test]
2087 fn codec_unknown_falls_back_to_h264() {
2088 assert_eq!(
2089 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2090 VideoCodec::H264
2091 );
2092 }
2093
2094 #[test]
2099 fn hw_accel_auto_yields_none() {
2100 assert_eq!(
2101 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2102 None
2103 );
2104 }
2105
2106 #[test]
2107 fn hw_accel_none_yields_none() {
2108 assert_eq!(
2109 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2110 None
2111 );
2112 }
2113
2114 #[test]
2115 fn hw_accel_nvdec_yields_cuda() {
2116 assert_eq!(
2117 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2118 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2119 );
2120 }
2121
2122 #[test]
2123 fn hw_accel_qsv_yields_qsv() {
2124 assert_eq!(
2125 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2126 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2127 );
2128 }
2129
2130 #[test]
2131 fn hw_accel_amf_yields_d3d11va() {
2132 assert_eq!(
2133 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2134 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2135 );
2136 }
2137
2138 #[test]
2139 fn hw_accel_videotoolbox() {
2140 assert_eq!(
2141 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2142 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2143 );
2144 }
2145
2146 #[test]
2147 fn hw_accel_vaapi() {
2148 assert_eq!(
2149 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2150 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2151 );
2152 }
2153
2154 #[test]
2159 fn pixel_format_to_av_round_trip_yuv420p() {
2160 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2161 assert_eq!(
2162 VideoDecoderInner::convert_pixel_format(av),
2163 PixelFormat::Yuv420p
2164 );
2165 }
2166
2167 #[test]
2168 fn pixel_format_to_av_round_trip_yuv422p() {
2169 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2170 assert_eq!(
2171 VideoDecoderInner::convert_pixel_format(av),
2172 PixelFormat::Yuv422p
2173 );
2174 }
2175
2176 #[test]
2177 fn pixel_format_to_av_round_trip_yuv444p() {
2178 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2179 assert_eq!(
2180 VideoDecoderInner::convert_pixel_format(av),
2181 PixelFormat::Yuv444p
2182 );
2183 }
2184
2185 #[test]
2186 fn pixel_format_to_av_round_trip_rgb24() {
2187 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2188 assert_eq!(
2189 VideoDecoderInner::convert_pixel_format(av),
2190 PixelFormat::Rgb24
2191 );
2192 }
2193
2194 #[test]
2195 fn pixel_format_to_av_round_trip_bgr24() {
2196 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2197 assert_eq!(
2198 VideoDecoderInner::convert_pixel_format(av),
2199 PixelFormat::Bgr24
2200 );
2201 }
2202
2203 #[test]
2204 fn pixel_format_to_av_round_trip_rgba() {
2205 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2206 assert_eq!(
2207 VideoDecoderInner::convert_pixel_format(av),
2208 PixelFormat::Rgba
2209 );
2210 }
2211
2212 #[test]
2213 fn pixel_format_to_av_round_trip_bgra() {
2214 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2215 assert_eq!(
2216 VideoDecoderInner::convert_pixel_format(av),
2217 PixelFormat::Bgra
2218 );
2219 }
2220
2221 #[test]
2222 fn pixel_format_to_av_round_trip_gray8() {
2223 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2224 assert_eq!(
2225 VideoDecoderInner::convert_pixel_format(av),
2226 PixelFormat::Gray8
2227 );
2228 }
2229
2230 #[test]
2231 fn pixel_format_to_av_round_trip_nv12() {
2232 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2233 assert_eq!(
2234 VideoDecoderInner::convert_pixel_format(av),
2235 PixelFormat::Nv12
2236 );
2237 }
2238
2239 #[test]
2240 fn pixel_format_to_av_round_trip_nv21() {
2241 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2242 assert_eq!(
2243 VideoDecoderInner::convert_pixel_format(av),
2244 PixelFormat::Nv21
2245 );
2246 }
2247
2248 #[test]
2249 fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2250 assert_eq!(
2252 VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p10le),
2253 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2254 );
2255 }
2256}