1#![allow(unsafe_code)]
8#![allow(clippy::similar_names)]
10#![allow(clippy::too_many_lines)]
11#![allow(clippy::cast_sign_loss)]
12#![allow(clippy::cast_possible_truncation)]
13#![allow(clippy::cast_possible_wrap)]
14#![allow(clippy::module_name_repetitions)]
15#![allow(clippy::match_same_arms)]
16#![allow(clippy::ptr_as_ptr)]
17#![allow(clippy::doc_markdown)]
18#![allow(clippy::unnecessary_cast)]
19#![allow(clippy::if_not_else)]
20#![allow(clippy::unnecessary_wraps)]
21#![allow(clippy::cast_precision_loss)]
22#![allow(clippy::if_same_then_else)]
23#![allow(clippy::cast_lossless)]
24
25use std::ffi::CStr;
26use std::path::Path;
27use std::ptr;
28use std::sync::Arc;
29use std::time::Duration;
30
31use ff_format::PooledBuffer;
32use ff_format::codec::VideoCodec;
33use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
34use ff_format::time::{Rational, Timestamp};
35use ff_format::{PixelFormat, VideoFrame, VideoStreamInfo};
36use ff_sys::{
37 AVBufferRef, AVCodecContext, AVCodecID, AVColorPrimaries, AVColorRange, AVColorSpace,
38 AVFormatContext, AVFrame, AVHWDeviceType, AVMediaType_AVMEDIA_TYPE_VIDEO, AVPacket,
39 AVPixelFormat, SwsContext,
40};
41
42use crate::HardwareAccel;
43use crate::error::DecodeError;
44use ff_common::FramePool;
45
46const KEYFRAME_SEEK_TOLERANCE_SECS: u64 = 1;
52
53struct AvFormatContextGuard(*mut AVFormatContext);
55
56impl AvFormatContextGuard {
57 unsafe fn new(path: &Path) -> Result<Self, DecodeError> {
63 let format_ctx = unsafe {
65 ff_sys::avformat::open_input(path).map_err(|e| DecodeError::Ffmpeg {
66 code: e,
67 message: format!("Failed to open file: {}", ff_sys::av_error_string(e)),
68 })?
69 };
70 Ok(Self(format_ctx))
71 }
72
73 const fn as_ptr(&self) -> *mut AVFormatContext {
75 self.0
76 }
77
78 fn into_raw(self) -> *mut AVFormatContext {
80 let ptr = self.0;
81 std::mem::forget(self);
82 ptr
83 }
84}
85
86impl Drop for AvFormatContextGuard {
87 fn drop(&mut self) {
88 if !self.0.is_null() {
89 unsafe {
91 ff_sys::avformat::close_input(&mut (self.0 as *mut _));
92 }
93 }
94 }
95}
96
97struct AvCodecContextGuard(*mut AVCodecContext);
99
100impl AvCodecContextGuard {
101 unsafe fn new(codec: *const ff_sys::AVCodec) -> Result<Self, DecodeError> {
107 let codec_ctx = unsafe {
109 ff_sys::avcodec::alloc_context3(codec).map_err(|e| DecodeError::Ffmpeg {
110 code: e,
111 message: format!("Failed to allocate codec context: {e}"),
112 })?
113 };
114 Ok(Self(codec_ctx))
115 }
116
117 const fn as_ptr(&self) -> *mut AVCodecContext {
119 self.0
120 }
121
122 fn into_raw(self) -> *mut AVCodecContext {
124 let ptr = self.0;
125 std::mem::forget(self);
126 ptr
127 }
128}
129
130impl Drop for AvCodecContextGuard {
131 fn drop(&mut self) {
132 if !self.0.is_null() {
133 unsafe {
135 ff_sys::avcodec::free_context(&mut (self.0 as *mut _));
136 }
137 }
138 }
139}
140
141struct AvPacketGuard(*mut AVPacket);
143
144impl AvPacketGuard {
145 unsafe fn new() -> Result<Self, DecodeError> {
151 let packet = unsafe { ff_sys::av_packet_alloc() };
153 if packet.is_null() {
154 return Err(DecodeError::Ffmpeg {
155 code: 0,
156 message: "Failed to allocate packet".to_string(),
157 });
158 }
159 Ok(Self(packet))
160 }
161
162 #[allow(dead_code)]
164 const fn as_ptr(&self) -> *mut AVPacket {
165 self.0
166 }
167
168 fn into_raw(self) -> *mut AVPacket {
170 let ptr = self.0;
171 std::mem::forget(self);
172 ptr
173 }
174}
175
176impl Drop for AvPacketGuard {
177 fn drop(&mut self) {
178 if !self.0.is_null() {
179 unsafe {
181 ff_sys::av_packet_free(&mut (self.0 as *mut _));
182 }
183 }
184 }
185}
186
187struct AvFrameGuard(*mut AVFrame);
189
190impl AvFrameGuard {
191 unsafe fn new() -> Result<Self, DecodeError> {
197 let frame = unsafe { ff_sys::av_frame_alloc() };
199 if frame.is_null() {
200 return Err(DecodeError::Ffmpeg {
201 code: 0,
202 message: "Failed to allocate frame".to_string(),
203 });
204 }
205 Ok(Self(frame))
206 }
207
208 const fn as_ptr(&self) -> *mut AVFrame {
210 self.0
211 }
212
213 fn into_raw(self) -> *mut AVFrame {
215 let ptr = self.0;
216 std::mem::forget(self);
217 ptr
218 }
219}
220
221impl Drop for AvFrameGuard {
222 fn drop(&mut self) {
223 if !self.0.is_null() {
224 unsafe {
226 ff_sys::av_frame_free(&mut (self.0 as *mut _));
227 }
228 }
229 }
230}
231
232pub(crate) struct VideoDecoderInner {
237 format_ctx: *mut AVFormatContext,
239 codec_ctx: *mut AVCodecContext,
241 stream_index: i32,
243 sws_ctx: Option<*mut SwsContext>,
245 output_format: Option<PixelFormat>,
247 eof: bool,
249 position: Duration,
251 packet: *mut AVPacket,
253 frame: *mut AVFrame,
255 thumbnail_sws_ctx: Option<*mut SwsContext>,
257 thumbnail_cache_key: Option<(u32, u32, u32, u32, AVPixelFormat)>,
259 hw_device_ctx: Option<*mut AVBufferRef>,
261 active_hw_accel: HardwareAccel,
263 frame_pool: Option<Arc<dyn FramePool>>,
265}
266
267impl VideoDecoderInner {
268 fn hw_accel_to_device_type(accel: HardwareAccel) -> Option<AVHWDeviceType> {
272 match accel {
273 HardwareAccel::Auto => None,
274 HardwareAccel::None => None,
275 HardwareAccel::Nvdec => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA),
276 HardwareAccel::Qsv => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV),
277 HardwareAccel::Amf => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA), HardwareAccel::VideoToolbox => {
279 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
280 }
281 HardwareAccel::Vaapi => Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI),
282 }
283 }
284
285 const fn hw_accel_auto_priority() -> &'static [HardwareAccel] {
287 &[
289 HardwareAccel::Nvdec,
290 HardwareAccel::Qsv,
291 HardwareAccel::VideoToolbox,
292 HardwareAccel::Vaapi,
293 HardwareAccel::Amf,
294 ]
295 }
296
297 unsafe fn init_hardware_accel(
313 codec_ctx: *mut AVCodecContext,
314 accel: HardwareAccel,
315 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
316 match accel {
317 HardwareAccel::Auto => {
318 for &hw_type in Self::hw_accel_auto_priority() {
320 if let Ok((Some(ctx), active)) =
322 unsafe { Self::try_init_hw_device(codec_ctx, hw_type) }
323 {
324 return Ok((Some(ctx), active));
325 }
326 }
328 Ok((None, HardwareAccel::None))
330 }
331 HardwareAccel::None => {
332 Ok((None, HardwareAccel::None))
334 }
335 _ => {
336 unsafe { Self::try_init_hw_device(codec_ctx, accel) }
339 }
340 }
341 }
342
343 unsafe fn try_init_hw_device(
349 codec_ctx: *mut AVCodecContext,
350 accel: HardwareAccel,
351 ) -> Result<(Option<*mut AVBufferRef>, HardwareAccel), DecodeError> {
352 let Some(device_type) = Self::hw_accel_to_device_type(accel) else {
354 return Ok((None, HardwareAccel::None));
355 };
356
357 let mut hw_device_ctx: *mut AVBufferRef = ptr::null_mut();
360 let ret = unsafe {
361 ff_sys::av_hwdevice_ctx_create(
362 ptr::addr_of_mut!(hw_device_ctx),
363 device_type,
364 ptr::null(), ptr::null_mut(), 0, )
368 };
369
370 if ret < 0 {
371 return Err(DecodeError::HwAccelUnavailable { accel });
373 }
374
375 unsafe {
379 (*codec_ctx).hw_device_ctx = hw_device_ctx;
380 }
381
382 let our_ref = unsafe { ff_sys::av_buffer_ref(hw_device_ctx) };
385 if our_ref.is_null() {
386 return Err(DecodeError::HwAccelUnavailable { accel });
389 }
390
391 Ok((Some(our_ref), accel))
392 }
393
394 pub(crate) fn hardware_accel(&self) -> HardwareAccel {
396 self.active_hw_accel
397 }
398
399 const fn is_hardware_format(format: AVPixelFormat) -> bool {
403 matches!(
404 format,
405 ff_sys::AVPixelFormat_AV_PIX_FMT_D3D11
406 | ff_sys::AVPixelFormat_AV_PIX_FMT_CUDA
407 | ff_sys::AVPixelFormat_AV_PIX_FMT_VAAPI
408 | ff_sys::AVPixelFormat_AV_PIX_FMT_VIDEOTOOLBOX
409 | ff_sys::AVPixelFormat_AV_PIX_FMT_QSV
410 | ff_sys::AVPixelFormat_AV_PIX_FMT_VDPAU
411 | ff_sys::AVPixelFormat_AV_PIX_FMT_DXVA2_VLD
412 | ff_sys::AVPixelFormat_AV_PIX_FMT_OPENCL
413 | ff_sys::AVPixelFormat_AV_PIX_FMT_MEDIACODEC
414 | ff_sys::AVPixelFormat_AV_PIX_FMT_VULKAN
415 )
416 }
417
418 unsafe fn transfer_hardware_frame_if_needed(&mut self) -> Result<(), DecodeError> {
427 let frame_format = unsafe { (*self.frame).format };
429
430 if !Self::is_hardware_format(frame_format) {
431 return Ok(());
433 }
434
435 let sw_frame = unsafe { ff_sys::av_frame_alloc() };
438 if sw_frame.is_null() {
439 return Err(DecodeError::Ffmpeg {
440 code: 0,
441 message: "Failed to allocate software frame for hardware transfer".to_string(),
442 });
443 }
444
445 let ret = unsafe {
448 ff_sys::av_hwframe_transfer_data(
449 sw_frame, self.frame, 0, )
451 };
452
453 if ret < 0 {
454 unsafe {
456 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
457 }
458 return Err(DecodeError::Ffmpeg {
459 code: ret,
460 message: format!(
461 "Failed to transfer hardware frame to CPU memory: {}",
462 ff_sys::av_error_string(ret)
463 ),
464 });
465 }
466
467 unsafe {
470 (*sw_frame).pts = (*self.frame).pts;
471 (*sw_frame).pkt_dts = (*self.frame).pkt_dts;
472 (*sw_frame).duration = (*self.frame).duration;
473 (*sw_frame).time_base = (*self.frame).time_base;
474 }
475
476 unsafe {
479 ff_sys::av_frame_unref(self.frame);
480 ff_sys::av_frame_move_ref(self.frame, sw_frame);
481 ff_sys::av_frame_free(&mut (sw_frame as *mut _));
482 }
483
484 Ok(())
485 }
486
487 pub(crate) fn new(
504 path: &Path,
505 output_format: Option<PixelFormat>,
506 hardware_accel: HardwareAccel,
507 thread_count: usize,
508 frame_pool: Option<Arc<dyn FramePool>>,
509 ) -> Result<(Self, VideoStreamInfo), DecodeError> {
510 ff_sys::ensure_initialized();
512
513 let format_ctx_guard = unsafe { AvFormatContextGuard::new(path)? };
516 let format_ctx = format_ctx_guard.as_ptr();
517
518 unsafe {
521 ff_sys::avformat::find_stream_info(format_ctx).map_err(|e| DecodeError::Ffmpeg {
522 code: e,
523 message: format!("Failed to find stream info: {}", ff_sys::av_error_string(e)),
524 })?;
525 }
526
527 let (stream_index, codec_id) =
530 unsafe { Self::find_video_stream(format_ctx) }.ok_or_else(|| {
531 DecodeError::NoVideoStream {
532 path: path.to_path_buf(),
533 }
534 })?;
535
536 let codec_name = unsafe { Self::extract_codec_name(codec_id) };
539 let codec = unsafe {
540 ff_sys::avcodec::find_decoder(codec_id).ok_or_else(|| {
541 DecodeError::UnsupportedCodec {
542 codec: format!("{codec_name} (codec_id={codec_id:?})"),
543 }
544 })?
545 };
546
547 let codec_ctx_guard = unsafe { AvCodecContextGuard::new(codec)? };
550 let codec_ctx = codec_ctx_guard.as_ptr();
551
552 unsafe {
555 let stream = (*format_ctx).streams.add(stream_index as usize);
556 let codecpar = (*(*stream)).codecpar;
557 ff_sys::avcodec::parameters_to_context(codec_ctx, codecpar).map_err(|e| {
558 DecodeError::Ffmpeg {
559 code: e,
560 message: format!(
561 "Failed to copy codec parameters: {}",
562 ff_sys::av_error_string(e)
563 ),
564 }
565 })?;
566
567 if thread_count > 0 {
569 (*codec_ctx).thread_count = thread_count as i32;
570 }
571 }
572
573 let (hw_device_ctx, active_hw_accel) =
576 unsafe { Self::init_hardware_accel(codec_ctx, hardware_accel)? };
577
578 unsafe {
581 ff_sys::avcodec::open2(codec_ctx, codec, ptr::null_mut()).map_err(|e| {
582 if let Some(hw_ctx) = hw_device_ctx {
587 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
588 }
589 DecodeError::Ffmpeg {
590 code: e,
591 message: format!("Failed to open codec: {}", ff_sys::av_error_string(e)),
592 }
593 })?;
594 }
595
596 let stream_info =
599 unsafe { Self::extract_stream_info(format_ctx, stream_index as i32, codec_ctx)? };
600
601 let packet_guard = unsafe { AvPacketGuard::new()? };
604 let frame_guard = unsafe { AvFrameGuard::new()? };
605
606 Ok((
608 Self {
609 format_ctx: format_ctx_guard.into_raw(),
610 codec_ctx: codec_ctx_guard.into_raw(),
611 stream_index: stream_index as i32,
612 sws_ctx: None,
613 output_format,
614 eof: false,
615 position: Duration::ZERO,
616 packet: packet_guard.into_raw(),
617 frame: frame_guard.into_raw(),
618 thumbnail_sws_ctx: None,
619 thumbnail_cache_key: None,
620 hw_device_ctx,
621 active_hw_accel,
622 frame_pool,
623 },
624 stream_info,
625 ))
626 }
627
628 unsafe fn find_video_stream(format_ctx: *mut AVFormatContext) -> Option<(usize, AVCodecID)> {
638 unsafe {
640 let nb_streams = (*format_ctx).nb_streams as usize;
641
642 for i in 0..nb_streams {
643 let stream = (*format_ctx).streams.add(i);
644 let codecpar = (*(*stream)).codecpar;
645
646 if (*codecpar).codec_type == AVMediaType_AVMEDIA_TYPE_VIDEO {
647 return Some((i, (*codecpar).codec_id));
648 }
649 }
650
651 None
652 }
653 }
654
655 unsafe fn extract_codec_name(codec_id: ff_sys::AVCodecID) -> String {
657 let name_ptr = unsafe { ff_sys::avcodec_get_name(codec_id) };
659 if name_ptr.is_null() {
660 return String::from("unknown");
661 }
662 unsafe { CStr::from_ptr(name_ptr).to_string_lossy().into_owned() }
664 }
665
666 unsafe fn extract_stream_info(
668 format_ctx: *mut AVFormatContext,
669 stream_index: i32,
670 codec_ctx: *mut AVCodecContext,
671 ) -> Result<VideoStreamInfo, DecodeError> {
672 let (
674 width,
675 height,
676 fps_rational,
677 duration_val,
678 pix_fmt,
679 color_space_val,
680 color_range_val,
681 color_primaries_val,
682 codec_id,
683 ) = unsafe {
684 let stream = (*format_ctx).streams.add(stream_index as usize);
685 let codecpar = (*(*stream)).codecpar;
686
687 (
688 (*codecpar).width as u32,
689 (*codecpar).height as u32,
690 (*(*stream)).avg_frame_rate,
691 (*format_ctx).duration,
692 (*codec_ctx).pix_fmt,
693 (*codecpar).color_space,
694 (*codecpar).color_range,
695 (*codecpar).color_primaries,
696 (*codecpar).codec_id,
697 )
698 };
699
700 let frame_rate = if fps_rational.den != 0 {
702 Rational::new(fps_rational.num as i32, fps_rational.den as i32)
703 } else {
704 log::warn!(
705 "invalid frame rate, falling back to 30fps num={} den=0 fallback=30/1",
706 fps_rational.num
707 );
708 Rational::new(30, 1)
709 };
710
711 let duration = if duration_val > 0 {
713 let duration_secs = duration_val as f64 / 1_000_000.0;
714 Some(Duration::from_secs_f64(duration_secs))
715 } else {
716 None
717 };
718
719 let pixel_format = Self::convert_pixel_format(pix_fmt);
721
722 let color_space = Self::convert_color_space(color_space_val);
724 let color_range = Self::convert_color_range(color_range_val);
725 let color_primaries = Self::convert_color_primaries(color_primaries_val);
726
727 let codec = Self::convert_codec(codec_id);
729 let codec_name = unsafe { Self::extract_codec_name(codec_id) };
730
731 let mut builder = VideoStreamInfo::builder()
733 .index(stream_index as u32)
734 .codec(codec)
735 .codec_name(codec_name)
736 .width(width)
737 .height(height)
738 .frame_rate(frame_rate)
739 .pixel_format(pixel_format)
740 .color_space(color_space)
741 .color_range(color_range)
742 .color_primaries(color_primaries);
743
744 if let Some(d) = duration {
745 builder = builder.duration(d);
746 }
747
748 Ok(builder.build())
749 }
750
751 fn convert_pixel_format(fmt: AVPixelFormat) -> PixelFormat {
753 if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P {
754 PixelFormat::Yuv420p
755 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P {
756 PixelFormat::Yuv422p
757 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P {
758 PixelFormat::Yuv444p
759 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24 {
760 PixelFormat::Rgb24
761 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24 {
762 PixelFormat::Bgr24
763 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA {
764 PixelFormat::Rgba
765 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA {
766 PixelFormat::Bgra
767 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8 {
768 PixelFormat::Gray8
769 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV12 {
770 PixelFormat::Nv12
771 } else if fmt == ff_sys::AVPixelFormat_AV_PIX_FMT_NV21 {
772 PixelFormat::Nv21
773 } else {
774 log::warn!(
775 "pixel_format unsupported, falling back to Yuv420p requested={fmt} fallback=Yuv420p"
776 );
777 PixelFormat::Yuv420p
778 }
779 }
780
781 fn convert_color_space(space: AVColorSpace) -> ColorSpace {
783 if space == ff_sys::AVColorSpace_AVCOL_SPC_BT709 {
784 ColorSpace::Bt709
785 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT470BG
786 || space == ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M
787 {
788 ColorSpace::Bt601
789 } else if space == ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL {
790 ColorSpace::Bt2020
791 } else {
792 log::warn!(
793 "color_space unsupported, falling back to Bt709 requested={space} fallback=Bt709"
794 );
795 ColorSpace::Bt709
796 }
797 }
798
799 fn convert_color_range(range: AVColorRange) -> ColorRange {
801 if range == ff_sys::AVColorRange_AVCOL_RANGE_JPEG {
802 ColorRange::Full
803 } else if range == ff_sys::AVColorRange_AVCOL_RANGE_MPEG {
804 ColorRange::Limited
805 } else {
806 log::warn!(
807 "color_range unsupported, falling back to Limited requested={range} fallback=Limited"
808 );
809 ColorRange::Limited
810 }
811 }
812
813 fn convert_color_primaries(primaries: AVColorPrimaries) -> ColorPrimaries {
815 if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT709 {
816 ColorPrimaries::Bt709
817 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG
818 || primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
819 {
820 ColorPrimaries::Bt601
821 } else if primaries == ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020 {
822 ColorPrimaries::Bt2020
823 } else {
824 log::warn!(
825 "color_primaries unsupported, falling back to Bt709 requested={primaries} fallback=Bt709"
826 );
827 ColorPrimaries::Bt709
828 }
829 }
830
831 fn convert_codec(codec_id: AVCodecID) -> VideoCodec {
833 if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_H264 {
834 VideoCodec::H264
835 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_HEVC {
836 VideoCodec::H265
837 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP8 {
838 VideoCodec::Vp8
839 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_VP9 {
840 VideoCodec::Vp9
841 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_AV1 {
842 VideoCodec::Av1
843 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_MPEG4 {
844 VideoCodec::Mpeg4
845 } else if codec_id == ff_sys::AVCodecID_AV_CODEC_ID_PRORES {
846 VideoCodec::ProRes
847 } else {
848 log::warn!(
849 "video codec unsupported, falling back to H264 codec_id={codec_id} fallback=H264"
850 );
851 VideoCodec::H264
852 }
853 }
854
855 pub(crate) fn decode_one(&mut self) -> Result<Option<VideoFrame>, DecodeError> {
863 if self.eof {
864 return Ok(None);
865 }
866
867 unsafe {
868 loop {
869 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
871
872 if ret == 0 {
873 self.transfer_hardware_frame_if_needed()?;
876
877 let video_frame = self.convert_frame_to_video_frame()?;
878
879 let pts = (*self.frame).pts;
881 if pts != ff_sys::AV_NOPTS_VALUE {
882 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
883 let time_base = (*(*stream)).time_base;
884 let timestamp_secs =
885 pts as f64 * time_base.num as f64 / time_base.den as f64;
886 self.position = Duration::from_secs_f64(timestamp_secs);
887 }
888
889 return Ok(Some(video_frame));
890 } else if ret == ff_sys::error_codes::EAGAIN {
891 let read_ret = ff_sys::av_read_frame(self.format_ctx, self.packet);
894
895 if read_ret == ff_sys::error_codes::EOF {
896 ff_sys::avcodec_send_packet(self.codec_ctx, ptr::null());
898 self.eof = true;
899 continue;
900 } else if read_ret < 0 {
901 return Err(DecodeError::Ffmpeg {
902 code: read_ret,
903 message: format!(
904 "Failed to read frame: {}",
905 ff_sys::av_error_string(read_ret)
906 ),
907 });
908 }
909
910 if (*self.packet).stream_index == self.stream_index {
912 let send_ret = ff_sys::avcodec_send_packet(self.codec_ctx, self.packet);
914 ff_sys::av_packet_unref(self.packet);
915
916 if send_ret < 0 && send_ret != ff_sys::error_codes::EAGAIN {
917 return Err(DecodeError::Ffmpeg {
918 code: send_ret,
919 message: format!(
920 "Failed to send packet: {}",
921 ff_sys::av_error_string(send_ret)
922 ),
923 });
924 }
925 } else {
926 ff_sys::av_packet_unref(self.packet);
928 }
929 } else if ret == ff_sys::error_codes::EOF {
930 self.eof = true;
932 return Ok(None);
933 } else {
934 return Err(DecodeError::DecodingFailed {
935 timestamp: Some(self.position),
936 reason: ff_sys::av_error_string(ret),
937 });
938 }
939 }
940 }
941 }
942
943 unsafe fn convert_frame_to_video_frame(&mut self) -> Result<VideoFrame, DecodeError> {
945 unsafe {
947 let width = (*self.frame).width as u32;
948 let height = (*self.frame).height as u32;
949 let src_format = (*self.frame).format;
950
951 let dst_format = if let Some(fmt) = self.output_format {
953 Self::pixel_format_to_av(fmt)
954 } else {
955 src_format
956 };
957
958 let needs_conversion = src_format != dst_format;
960
961 if needs_conversion {
962 self.convert_with_sws(width, height, src_format, dst_format)
963 } else {
964 self.av_frame_to_video_frame(self.frame)
965 }
966 }
967 }
968
969 unsafe fn convert_with_sws(
971 &mut self,
972 width: u32,
973 height: u32,
974 src_format: i32,
975 dst_format: i32,
976 ) -> Result<VideoFrame, DecodeError> {
977 unsafe {
979 if self.sws_ctx.is_none() {
981 let ctx = ff_sys::swscale::get_context(
982 width as i32,
983 height as i32,
984 src_format,
985 width as i32,
986 height as i32,
987 dst_format,
988 ff_sys::swscale::scale_flags::BILINEAR,
989 )
990 .map_err(|e| DecodeError::Ffmpeg {
991 code: 0,
992 message: format!("Failed to create sws context: {e}"),
993 })?;
994
995 self.sws_ctx = Some(ctx);
996 }
997
998 let Some(sws_ctx) = self.sws_ctx else {
999 return Err(DecodeError::Ffmpeg {
1000 code: 0,
1001 message: "SwsContext not initialized".to_string(),
1002 });
1003 };
1004
1005 let dst_frame_guard = AvFrameGuard::new()?;
1007 let dst_frame = dst_frame_guard.as_ptr();
1008
1009 (*dst_frame).width = width as i32;
1010 (*dst_frame).height = height as i32;
1011 (*dst_frame).format = dst_format;
1012
1013 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1015 if buffer_ret < 0 {
1016 return Err(DecodeError::Ffmpeg {
1017 code: buffer_ret,
1018 message: format!(
1019 "Failed to allocate frame buffer: {}",
1020 ff_sys::av_error_string(buffer_ret)
1021 ),
1022 });
1023 }
1024
1025 ff_sys::swscale::scale(
1027 sws_ctx,
1028 (*self.frame).data.as_ptr() as *const *const u8,
1029 (*self.frame).linesize.as_ptr(),
1030 0,
1031 height as i32,
1032 (*dst_frame).data.as_ptr() as *const *mut u8,
1033 (*dst_frame).linesize.as_ptr(),
1034 )
1035 .map_err(|e| DecodeError::Ffmpeg {
1036 code: 0,
1037 message: format!("Failed to scale frame: {e}"),
1038 })?;
1039
1040 (*dst_frame).pts = (*self.frame).pts;
1042
1043 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1045
1046 Ok(video_frame)
1049 }
1050 }
1051
1052 unsafe fn av_frame_to_video_frame(
1054 &self,
1055 frame: *const AVFrame,
1056 ) -> Result<VideoFrame, DecodeError> {
1057 unsafe {
1059 let width = (*frame).width as u32;
1060 let height = (*frame).height as u32;
1061 let format = Self::convert_pixel_format((*frame).format);
1062
1063 let pts = (*frame).pts;
1065 let timestamp = if pts != ff_sys::AV_NOPTS_VALUE {
1066 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1067 let time_base = (*(*stream)).time_base;
1068 Timestamp::new(
1069 pts as i64,
1070 Rational::new(time_base.num as i32, time_base.den as i32),
1071 )
1072 } else {
1073 Timestamp::default()
1074 };
1075
1076 let (planes, strides) =
1078 self.extract_planes_and_strides(frame, width, height, format)?;
1079
1080 VideoFrame::new(planes, strides, width, height, format, timestamp, false).map_err(|e| {
1081 DecodeError::Ffmpeg {
1082 code: 0,
1083 message: format!("Failed to create VideoFrame: {e}"),
1084 }
1085 })
1086 }
1087 }
1088
1089 fn allocate_buffer(&self, size: usize) -> PooledBuffer {
1099 if let Some(ref pool) = self.frame_pool {
1100 if let Some(pooled_buffer) = pool.acquire(size) {
1101 return pooled_buffer;
1102 }
1103 return PooledBuffer::new(vec![0u8; size], Arc::downgrade(pool));
1108 }
1109 PooledBuffer::standalone(vec![0u8; size])
1110 }
1111
1112 unsafe fn extract_planes_and_strides(
1114 &self,
1115 frame: *const AVFrame,
1116 width: u32,
1117 height: u32,
1118 format: PixelFormat,
1119 ) -> Result<(Vec<PooledBuffer>, Vec<usize>), DecodeError> {
1120 const BYTES_PER_PIXEL_RGBA: usize = 4;
1122 const BYTES_PER_PIXEL_RGB24: usize = 3;
1123
1124 unsafe {
1126 let mut planes = Vec::new();
1127 let mut strides = Vec::new();
1128
1129 #[allow(clippy::match_same_arms)]
1130 match format {
1131 PixelFormat::Rgba | PixelFormat::Bgra | PixelFormat::Rgb24 | PixelFormat::Bgr24 => {
1132 let stride = (*frame).linesize[0] as usize;
1134 let bytes_per_pixel = if matches!(format, PixelFormat::Rgba | PixelFormat::Bgra)
1135 {
1136 BYTES_PER_PIXEL_RGBA
1137 } else {
1138 BYTES_PER_PIXEL_RGB24
1139 };
1140 let row_size = (width as usize) * bytes_per_pixel;
1141 let buffer_size = row_size * height as usize;
1142 let mut plane_data = self.allocate_buffer(buffer_size);
1143
1144 for y in 0..height as usize {
1145 let src_offset = y * stride;
1146 let dst_offset = y * row_size;
1147 let src_ptr = (*frame).data[0].add(src_offset);
1148 let plane_slice = plane_data.as_mut();
1149 std::ptr::copy_nonoverlapping(
1153 src_ptr,
1154 plane_slice[dst_offset..].as_mut_ptr(),
1155 row_size,
1156 );
1157 }
1158
1159 planes.push(plane_data);
1160 strides.push(row_size);
1161 }
1162 PixelFormat::Yuv420p | PixelFormat::Yuv422p | PixelFormat::Yuv444p => {
1163 let (chroma_width, chroma_height) = match format {
1165 PixelFormat::Yuv420p => (width / 2, height / 2),
1166 PixelFormat::Yuv422p => (width / 2, height),
1167 PixelFormat::Yuv444p => (width, height),
1168 _ => unreachable!(),
1169 };
1170
1171 let y_stride = width as usize;
1173 let y_size = y_stride * height as usize;
1174 let mut y_data = self.allocate_buffer(y_size);
1175 for y in 0..height as usize {
1176 let src_offset = y * (*frame).linesize[0] as usize;
1177 let dst_offset = y * y_stride;
1178 let src_ptr = (*frame).data[0].add(src_offset);
1179 let y_slice = y_data.as_mut();
1180 std::ptr::copy_nonoverlapping(
1183 src_ptr,
1184 y_slice[dst_offset..].as_mut_ptr(),
1185 width as usize,
1186 );
1187 }
1188 planes.push(y_data);
1189 strides.push(y_stride);
1190
1191 let u_stride = chroma_width as usize;
1193 let u_size = u_stride * chroma_height as usize;
1194 let mut u_data = self.allocate_buffer(u_size);
1195 for y in 0..chroma_height as usize {
1196 let src_offset = y * (*frame).linesize[1] as usize;
1197 let dst_offset = y * u_stride;
1198 let src_ptr = (*frame).data[1].add(src_offset);
1199 let u_slice = u_data.as_mut();
1200 std::ptr::copy_nonoverlapping(
1203 src_ptr,
1204 u_slice[dst_offset..].as_mut_ptr(),
1205 chroma_width as usize,
1206 );
1207 }
1208 planes.push(u_data);
1209 strides.push(u_stride);
1210
1211 let v_stride = chroma_width as usize;
1213 let v_size = v_stride * chroma_height as usize;
1214 let mut v_data = self.allocate_buffer(v_size);
1215 for y in 0..chroma_height as usize {
1216 let src_offset = y * (*frame).linesize[2] as usize;
1217 let dst_offset = y * v_stride;
1218 let src_ptr = (*frame).data[2].add(src_offset);
1219 let v_slice = v_data.as_mut();
1220 std::ptr::copy_nonoverlapping(
1223 src_ptr,
1224 v_slice[dst_offset..].as_mut_ptr(),
1225 chroma_width as usize,
1226 );
1227 }
1228 planes.push(v_data);
1229 strides.push(v_stride);
1230 }
1231 PixelFormat::Gray8 => {
1232 let stride = width as usize;
1234 let mut plane_data = self.allocate_buffer(stride * height as usize);
1235
1236 for y in 0..height as usize {
1237 let src_offset = y * (*frame).linesize[0] as usize;
1238 let dst_offset = y * stride;
1239 let src_ptr = (*frame).data[0].add(src_offset);
1240 let plane_slice = plane_data.as_mut();
1241 std::ptr::copy_nonoverlapping(
1244 src_ptr,
1245 plane_slice[dst_offset..].as_mut_ptr(),
1246 width as usize,
1247 );
1248 }
1249
1250 planes.push(plane_data);
1251 strides.push(stride);
1252 }
1253 PixelFormat::Nv12 | PixelFormat::Nv21 => {
1254 let uv_height = height / 2;
1256
1257 let y_stride = width as usize;
1259 let mut y_data = self.allocate_buffer(y_stride * height as usize);
1260 for y in 0..height as usize {
1261 let src_offset = y * (*frame).linesize[0] as usize;
1262 let dst_offset = y * y_stride;
1263 let src_ptr = (*frame).data[0].add(src_offset);
1264 let y_slice = y_data.as_mut();
1265 std::ptr::copy_nonoverlapping(
1268 src_ptr,
1269 y_slice[dst_offset..].as_mut_ptr(),
1270 width as usize,
1271 );
1272 }
1273 planes.push(y_data);
1274 strides.push(y_stride);
1275
1276 let uv_stride = width as usize;
1278 let mut uv_data = self.allocate_buffer(uv_stride * uv_height as usize);
1279 for y in 0..uv_height as usize {
1280 let src_offset = y * (*frame).linesize[1] as usize;
1281 let dst_offset = y * uv_stride;
1282 let src_ptr = (*frame).data[1].add(src_offset);
1283 let uv_slice = uv_data.as_mut();
1284 std::ptr::copy_nonoverlapping(
1287 src_ptr,
1288 uv_slice[dst_offset..].as_mut_ptr(),
1289 width as usize,
1290 );
1291 }
1292 planes.push(uv_data);
1293 strides.push(uv_stride);
1294 }
1295 _ => {
1296 return Err(DecodeError::Ffmpeg {
1297 code: 0,
1298 message: format!("Unsupported pixel format: {format:?}"),
1299 });
1300 }
1301 }
1302
1303 Ok((planes, strides))
1304 }
1305 }
1306
1307 fn pixel_format_to_av(format: PixelFormat) -> AVPixelFormat {
1309 match format {
1310 PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
1311 PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
1312 PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
1313 PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
1314 PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
1315 PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
1316 PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
1317 PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
1318 PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
1319 PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
1320 _ => {
1321 log::warn!(
1322 "pixel_format has no AV mapping, falling back to Yuv420p format={format:?} fallback=AV_PIX_FMT_YUV420P"
1323 );
1324 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
1325 }
1326 }
1327 }
1328
1329 pub(crate) fn position(&self) -> Duration {
1331 self.position
1332 }
1333
1334 pub(crate) fn is_eof(&self) -> bool {
1336 self.eof
1337 }
1338
1339 fn duration_to_pts(&self, duration: Duration) -> i64 {
1353 let time_base = unsafe {
1359 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1360 (*(*stream)).time_base
1361 };
1362
1363 let time_base_f64 = time_base.den as f64 / time_base.num as f64;
1365 (duration.as_secs_f64() * time_base_f64) as i64
1366 }
1367
1368 #[allow(dead_code)]
1386 fn pts_to_duration(&self, pts: i64) -> Duration {
1387 unsafe {
1389 let stream = (*self.format_ctx).streams.add(self.stream_index as usize);
1390 let time_base = (*(*stream)).time_base;
1391
1392 let duration_secs = pts as f64 * time_base.num as f64 / time_base.den as f64;
1394 Duration::from_secs_f64(duration_secs)
1395 }
1396 }
1397
1398 pub(crate) fn seek(
1422 &mut self,
1423 position: Duration,
1424 mode: crate::SeekMode,
1425 ) -> Result<(), DecodeError> {
1426 use crate::SeekMode;
1427
1428 let timestamp = self.duration_to_pts(position);
1429
1430 let flags = ff_sys::avformat::seek_flags::BACKWARD;
1433
1434 unsafe {
1439 ff_sys::av_packet_unref(self.packet);
1440 ff_sys::av_frame_unref(self.frame);
1441 }
1442
1443 unsafe {
1450 ff_sys::avformat::seek_frame(
1451 self.format_ctx,
1452 self.stream_index as i32,
1453 timestamp,
1454 flags,
1455 )
1456 .map_err(|e| DecodeError::SeekFailed {
1457 target: position,
1458 reason: ff_sys::av_error_string(e),
1459 })?;
1460 }
1461
1462 unsafe {
1465 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1466 }
1467
1468 unsafe {
1474 loop {
1475 let ret = ff_sys::avcodec_receive_frame(self.codec_ctx, self.frame);
1476 if ret == ff_sys::error_codes::EAGAIN || ret == ff_sys::error_codes::EOF {
1477 break;
1479 } else if ret == 0 {
1480 ff_sys::av_frame_unref(self.frame);
1482 } else {
1483 break;
1485 }
1486 }
1487 }
1488
1489 self.eof = false;
1491 if mode == SeekMode::Exact {
1509 self.skip_to_exact(position)?;
1511 } else {
1512 let tolerance = Duration::from_secs(KEYFRAME_SEEK_TOLERANCE_SECS);
1515 let min_position = position.saturating_sub(tolerance);
1516
1517 while let Some(frame) = self.decode_one()? {
1518 let frame_time = frame.timestamp().as_duration();
1519 if frame_time >= min_position {
1520 break;
1522 }
1523 }
1525 }
1526
1527 Ok(())
1528 }
1529
1530 fn skip_to_exact(&mut self, target: Duration) -> Result<(), DecodeError> {
1551 loop {
1552 match self.decode_one()? {
1553 Some(frame) => {
1554 let frame_time = frame.timestamp().as_duration();
1555 if frame_time >= target {
1556 break;
1559 }
1560 }
1562 None => {
1563 return Err(DecodeError::SeekFailed {
1565 target,
1566 reason: "Reached end of stream before target position".to_string(),
1567 });
1568 }
1569 }
1570 }
1571 Ok(())
1572 }
1573
1574 pub(crate) fn flush(&mut self) {
1579 unsafe {
1581 ff_sys::avcodec::flush_buffers(self.codec_ctx);
1582 }
1583 self.eof = false;
1584 }
1585
1586 pub(crate) fn scale_frame(
1625 &mut self,
1626 frame: &VideoFrame,
1627 target_width: u32,
1628 target_height: u32,
1629 ) -> Result<VideoFrame, DecodeError> {
1630 let src_width = frame.width();
1631 let src_height = frame.height();
1632 let src_format = frame.format();
1633
1634 let src_aspect = src_width as f64 / src_height as f64;
1636 let target_aspect = target_width as f64 / target_height as f64;
1637
1638 let (scaled_width, scaled_height) = if src_aspect > target_aspect {
1639 let height = (target_width as f64 / src_aspect).round() as u32;
1641 (target_width, height)
1642 } else {
1643 let width = (target_height as f64 * src_aspect).round() as u32;
1645 (width, target_height)
1646 };
1647
1648 let av_format = Self::pixel_format_to_av(src_format);
1650
1651 let cache_key = (
1653 src_width,
1654 src_height,
1655 scaled_width,
1656 scaled_height,
1657 av_format,
1658 );
1659
1660 unsafe {
1662 let (sws_ctx, is_cached) = if let (Some(cached_ctx), Some(cached_key)) =
1664 (self.thumbnail_sws_ctx, self.thumbnail_cache_key)
1665 {
1666 if cached_key == cache_key {
1667 (cached_ctx, true)
1669 } else {
1670 ff_sys::swscale::free_context(cached_ctx);
1672 self.thumbnail_sws_ctx = None;
1674 self.thumbnail_cache_key = None;
1675
1676 let new_ctx = ff_sys::swscale::get_context(
1677 src_width as i32,
1678 src_height as i32,
1679 av_format,
1680 scaled_width as i32,
1681 scaled_height as i32,
1682 av_format,
1683 ff_sys::swscale::scale_flags::BILINEAR,
1684 )
1685 .map_err(|e| DecodeError::Ffmpeg {
1686 code: 0,
1687 message: format!("Failed to create scaling context: {e}"),
1688 })?;
1689
1690 (new_ctx, false)
1692 }
1693 } else {
1694 let new_ctx = ff_sys::swscale::get_context(
1696 src_width as i32,
1697 src_height as i32,
1698 av_format,
1699 scaled_width as i32,
1700 scaled_height as i32,
1701 av_format,
1702 ff_sys::swscale::scale_flags::BILINEAR,
1703 )
1704 .map_err(|e| DecodeError::Ffmpeg {
1705 code: 0,
1706 message: format!("Failed to create scaling context: {e}"),
1707 })?;
1708
1709 (new_ctx, false)
1711 };
1712
1713 let src_frame_guard = AvFrameGuard::new()?;
1715 let src_frame = src_frame_guard.as_ptr();
1716
1717 (*src_frame).width = src_width as i32;
1718 (*src_frame).height = src_height as i32;
1719 (*src_frame).format = av_format;
1720
1721 let planes = frame.planes();
1723 let strides = frame.strides();
1724
1725 for (i, plane_data) in planes.iter().enumerate() {
1726 if i >= ff_sys::AV_NUM_DATA_POINTERS as usize {
1727 break;
1728 }
1729 (*src_frame).data[i] = plane_data.as_ref().as_ptr().cast_mut();
1730 (*src_frame).linesize[i] = strides[i] as i32;
1731 }
1732
1733 let dst_frame_guard = AvFrameGuard::new()?;
1735 let dst_frame = dst_frame_guard.as_ptr();
1736
1737 (*dst_frame).width = scaled_width as i32;
1738 (*dst_frame).height = scaled_height as i32;
1739 (*dst_frame).format = av_format;
1740
1741 let buffer_ret = ff_sys::av_frame_get_buffer(dst_frame, 0);
1743 if buffer_ret < 0 {
1744 if !is_cached {
1746 ff_sys::swscale::free_context(sws_ctx);
1747 }
1748 return Err(DecodeError::Ffmpeg {
1749 code: buffer_ret,
1750 message: format!(
1751 "Failed to allocate destination frame buffer: {}",
1752 ff_sys::av_error_string(buffer_ret)
1753 ),
1754 });
1755 }
1756
1757 let scale_result = ff_sys::swscale::scale(
1759 sws_ctx,
1760 (*src_frame).data.as_ptr() as *const *const u8,
1761 (*src_frame).linesize.as_ptr(),
1762 0,
1763 src_height as i32,
1764 (*dst_frame).data.as_ptr() as *const *mut u8,
1765 (*dst_frame).linesize.as_ptr(),
1766 );
1767
1768 if let Err(e) = scale_result {
1769 if !is_cached {
1771 ff_sys::swscale::free_context(sws_ctx);
1772 }
1773 return Err(DecodeError::Ffmpeg {
1774 code: 0,
1775 message: format!("Failed to scale frame: {e}"),
1776 });
1777 }
1778
1779 if !is_cached {
1781 self.thumbnail_sws_ctx = Some(sws_ctx);
1782 self.thumbnail_cache_key = Some(cache_key);
1783 }
1784
1785 (*dst_frame).pts = frame.timestamp().pts();
1787
1788 let video_frame = self.av_frame_to_video_frame(dst_frame)?;
1790
1791 Ok(video_frame)
1792 }
1793 }
1794}
1795
1796impl Drop for VideoDecoderInner {
1797 fn drop(&mut self) {
1798 if let Some(sws_ctx) = self.sws_ctx {
1800 unsafe {
1802 ff_sys::swscale::free_context(sws_ctx);
1803 }
1804 }
1805
1806 if let Some(thumbnail_ctx) = self.thumbnail_sws_ctx {
1808 unsafe {
1810 ff_sys::swscale::free_context(thumbnail_ctx);
1811 }
1812 }
1813
1814 if let Some(hw_ctx) = self.hw_device_ctx {
1816 unsafe {
1818 ff_sys::av_buffer_unref(&mut (hw_ctx as *mut _));
1819 }
1820 }
1821
1822 if !self.frame.is_null() {
1824 unsafe {
1826 ff_sys::av_frame_free(&mut (self.frame as *mut _));
1827 }
1828 }
1829
1830 if !self.packet.is_null() {
1831 unsafe {
1833 ff_sys::av_packet_free(&mut (self.packet as *mut _));
1834 }
1835 }
1836
1837 if !self.codec_ctx.is_null() {
1839 unsafe {
1841 ff_sys::avcodec::free_context(&mut (self.codec_ctx as *mut _));
1842 }
1843 }
1844
1845 if !self.format_ctx.is_null() {
1847 unsafe {
1849 ff_sys::avformat::close_input(&mut (self.format_ctx as *mut _));
1850 }
1851 }
1852 }
1853}
1854
1855unsafe impl Send for VideoDecoderInner {}
1858
1859#[cfg(test)]
1860mod tests {
1861 use ff_format::PixelFormat;
1862 use ff_format::codec::VideoCodec;
1863 use ff_format::color::{ColorPrimaries, ColorRange, ColorSpace};
1864
1865 use crate::HardwareAccel;
1866
1867 use super::VideoDecoderInner;
1868
1869 #[test]
1874 fn pixel_format_yuv420p() {
1875 assert_eq!(
1876 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P),
1877 PixelFormat::Yuv420p
1878 );
1879 }
1880
1881 #[test]
1882 fn pixel_format_yuv422p() {
1883 assert_eq!(
1884 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P),
1885 PixelFormat::Yuv422p
1886 );
1887 }
1888
1889 #[test]
1890 fn pixel_format_yuv444p() {
1891 assert_eq!(
1892 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P),
1893 PixelFormat::Yuv444p
1894 );
1895 }
1896
1897 #[test]
1898 fn pixel_format_rgb24() {
1899 assert_eq!(
1900 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24),
1901 PixelFormat::Rgb24
1902 );
1903 }
1904
1905 #[test]
1906 fn pixel_format_bgr24() {
1907 assert_eq!(
1908 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24),
1909 PixelFormat::Bgr24
1910 );
1911 }
1912
1913 #[test]
1914 fn pixel_format_rgba() {
1915 assert_eq!(
1916 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA),
1917 PixelFormat::Rgba
1918 );
1919 }
1920
1921 #[test]
1922 fn pixel_format_bgra() {
1923 assert_eq!(
1924 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA),
1925 PixelFormat::Bgra
1926 );
1927 }
1928
1929 #[test]
1930 fn pixel_format_gray8() {
1931 assert_eq!(
1932 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8),
1933 PixelFormat::Gray8
1934 );
1935 }
1936
1937 #[test]
1938 fn pixel_format_nv12() {
1939 assert_eq!(
1940 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV12),
1941 PixelFormat::Nv12
1942 );
1943 }
1944
1945 #[test]
1946 fn pixel_format_nv21() {
1947 assert_eq!(
1948 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NV21),
1949 PixelFormat::Nv21
1950 );
1951 }
1952
1953 #[test]
1954 fn pixel_format_unknown_falls_back_to_yuv420p() {
1955 assert_eq!(
1956 VideoDecoderInner::convert_pixel_format(ff_sys::AVPixelFormat_AV_PIX_FMT_NONE),
1957 PixelFormat::Yuv420p
1958 );
1959 }
1960
1961 #[test]
1966 fn color_space_bt709() {
1967 assert_eq!(
1968 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT709),
1969 ColorSpace::Bt709
1970 );
1971 }
1972
1973 #[test]
1974 fn color_space_bt470bg_yields_bt601() {
1975 assert_eq!(
1976 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT470BG),
1977 ColorSpace::Bt601
1978 );
1979 }
1980
1981 #[test]
1982 fn color_space_smpte170m_yields_bt601() {
1983 assert_eq!(
1984 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_SMPTE170M),
1985 ColorSpace::Bt601
1986 );
1987 }
1988
1989 #[test]
1990 fn color_space_bt2020_ncl() {
1991 assert_eq!(
1992 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_BT2020_NCL),
1993 ColorSpace::Bt2020
1994 );
1995 }
1996
1997 #[test]
1998 fn color_space_unknown_falls_back_to_bt709() {
1999 assert_eq!(
2000 VideoDecoderInner::convert_color_space(ff_sys::AVColorSpace_AVCOL_SPC_UNSPECIFIED),
2001 ColorSpace::Bt709
2002 );
2003 }
2004
2005 #[test]
2010 fn color_range_jpeg_yields_full() {
2011 assert_eq!(
2012 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_JPEG),
2013 ColorRange::Full
2014 );
2015 }
2016
2017 #[test]
2018 fn color_range_mpeg_yields_limited() {
2019 assert_eq!(
2020 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_MPEG),
2021 ColorRange::Limited
2022 );
2023 }
2024
2025 #[test]
2026 fn color_range_unknown_falls_back_to_limited() {
2027 assert_eq!(
2028 VideoDecoderInner::convert_color_range(ff_sys::AVColorRange_AVCOL_RANGE_UNSPECIFIED),
2029 ColorRange::Limited
2030 );
2031 }
2032
2033 #[test]
2038 fn color_primaries_bt709() {
2039 assert_eq!(
2040 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT709),
2041 ColorPrimaries::Bt709
2042 );
2043 }
2044
2045 #[test]
2046 fn color_primaries_bt470bg_yields_bt601() {
2047 assert_eq!(
2048 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT470BG),
2049 ColorPrimaries::Bt601
2050 );
2051 }
2052
2053 #[test]
2054 fn color_primaries_smpte170m_yields_bt601() {
2055 assert_eq!(
2056 VideoDecoderInner::convert_color_primaries(
2057 ff_sys::AVColorPrimaries_AVCOL_PRI_SMPTE170M
2058 ),
2059 ColorPrimaries::Bt601
2060 );
2061 }
2062
2063 #[test]
2064 fn color_primaries_bt2020() {
2065 assert_eq!(
2066 VideoDecoderInner::convert_color_primaries(ff_sys::AVColorPrimaries_AVCOL_PRI_BT2020),
2067 ColorPrimaries::Bt2020
2068 );
2069 }
2070
2071 #[test]
2072 fn color_primaries_unknown_falls_back_to_bt709() {
2073 assert_eq!(
2074 VideoDecoderInner::convert_color_primaries(
2075 ff_sys::AVColorPrimaries_AVCOL_PRI_UNSPECIFIED
2076 ),
2077 ColorPrimaries::Bt709
2078 );
2079 }
2080
2081 #[test]
2086 fn codec_h264() {
2087 assert_eq!(
2088 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_H264),
2089 VideoCodec::H264
2090 );
2091 }
2092
2093 #[test]
2094 fn codec_hevc_yields_h265() {
2095 assert_eq!(
2096 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_HEVC),
2097 VideoCodec::H265
2098 );
2099 }
2100
2101 #[test]
2102 fn codec_vp8() {
2103 assert_eq!(
2104 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP8),
2105 VideoCodec::Vp8
2106 );
2107 }
2108
2109 #[test]
2110 fn codec_vp9() {
2111 assert_eq!(
2112 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_VP9),
2113 VideoCodec::Vp9
2114 );
2115 }
2116
2117 #[test]
2118 fn codec_av1() {
2119 assert_eq!(
2120 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_AV1),
2121 VideoCodec::Av1
2122 );
2123 }
2124
2125 #[test]
2126 fn codec_mpeg4() {
2127 assert_eq!(
2128 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_MPEG4),
2129 VideoCodec::Mpeg4
2130 );
2131 }
2132
2133 #[test]
2134 fn codec_prores() {
2135 assert_eq!(
2136 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_PRORES),
2137 VideoCodec::ProRes
2138 );
2139 }
2140
2141 #[test]
2142 fn codec_unknown_falls_back_to_h264() {
2143 assert_eq!(
2144 VideoDecoderInner::convert_codec(ff_sys::AVCodecID_AV_CODEC_ID_NONE),
2145 VideoCodec::H264
2146 );
2147 }
2148
2149 #[test]
2154 fn hw_accel_auto_yields_none() {
2155 assert_eq!(
2156 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Auto),
2157 None
2158 );
2159 }
2160
2161 #[test]
2162 fn hw_accel_none_yields_none() {
2163 assert_eq!(
2164 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::None),
2165 None
2166 );
2167 }
2168
2169 #[test]
2170 fn hw_accel_nvdec_yields_cuda() {
2171 assert_eq!(
2172 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Nvdec),
2173 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_CUDA)
2174 );
2175 }
2176
2177 #[test]
2178 fn hw_accel_qsv_yields_qsv() {
2179 assert_eq!(
2180 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Qsv),
2181 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_QSV)
2182 );
2183 }
2184
2185 #[test]
2186 fn hw_accel_amf_yields_d3d11va() {
2187 assert_eq!(
2188 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Amf),
2189 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_D3D11VA)
2190 );
2191 }
2192
2193 #[test]
2194 fn hw_accel_videotoolbox() {
2195 assert_eq!(
2196 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::VideoToolbox),
2197 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VIDEOTOOLBOX)
2198 );
2199 }
2200
2201 #[test]
2202 fn hw_accel_vaapi() {
2203 assert_eq!(
2204 VideoDecoderInner::hw_accel_to_device_type(HardwareAccel::Vaapi),
2205 Some(ff_sys::AVHWDeviceType_AV_HWDEVICE_TYPE_VAAPI)
2206 );
2207 }
2208
2209 #[test]
2214 fn pixel_format_to_av_round_trip_yuv420p() {
2215 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p);
2216 assert_eq!(
2217 VideoDecoderInner::convert_pixel_format(av),
2218 PixelFormat::Yuv420p
2219 );
2220 }
2221
2222 #[test]
2223 fn pixel_format_to_av_round_trip_yuv422p() {
2224 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv422p);
2225 assert_eq!(
2226 VideoDecoderInner::convert_pixel_format(av),
2227 PixelFormat::Yuv422p
2228 );
2229 }
2230
2231 #[test]
2232 fn pixel_format_to_av_round_trip_yuv444p() {
2233 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv444p);
2234 assert_eq!(
2235 VideoDecoderInner::convert_pixel_format(av),
2236 PixelFormat::Yuv444p
2237 );
2238 }
2239
2240 #[test]
2241 fn pixel_format_to_av_round_trip_rgb24() {
2242 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgb24);
2243 assert_eq!(
2244 VideoDecoderInner::convert_pixel_format(av),
2245 PixelFormat::Rgb24
2246 );
2247 }
2248
2249 #[test]
2250 fn pixel_format_to_av_round_trip_bgr24() {
2251 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgr24);
2252 assert_eq!(
2253 VideoDecoderInner::convert_pixel_format(av),
2254 PixelFormat::Bgr24
2255 );
2256 }
2257
2258 #[test]
2259 fn pixel_format_to_av_round_trip_rgba() {
2260 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Rgba);
2261 assert_eq!(
2262 VideoDecoderInner::convert_pixel_format(av),
2263 PixelFormat::Rgba
2264 );
2265 }
2266
2267 #[test]
2268 fn pixel_format_to_av_round_trip_bgra() {
2269 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Bgra);
2270 assert_eq!(
2271 VideoDecoderInner::convert_pixel_format(av),
2272 PixelFormat::Bgra
2273 );
2274 }
2275
2276 #[test]
2277 fn pixel_format_to_av_round_trip_gray8() {
2278 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Gray8);
2279 assert_eq!(
2280 VideoDecoderInner::convert_pixel_format(av),
2281 PixelFormat::Gray8
2282 );
2283 }
2284
2285 #[test]
2286 fn pixel_format_to_av_round_trip_nv12() {
2287 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv12);
2288 assert_eq!(
2289 VideoDecoderInner::convert_pixel_format(av),
2290 PixelFormat::Nv12
2291 );
2292 }
2293
2294 #[test]
2295 fn pixel_format_to_av_round_trip_nv21() {
2296 let av = VideoDecoderInner::pixel_format_to_av(PixelFormat::Nv21);
2297 assert_eq!(
2298 VideoDecoderInner::convert_pixel_format(av),
2299 PixelFormat::Nv21
2300 );
2301 }
2302
2303 #[test]
2304 fn pixel_format_to_av_unknown_falls_back_to_yuv420p_av() {
2305 assert_eq!(
2307 VideoDecoderInner::pixel_format_to_av(PixelFormat::Yuv420p10le),
2308 ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P
2309 );
2310 }
2311
2312 #[test]
2317 fn codec_name_should_return_h264_for_h264_codec_id() {
2318 let name =
2319 unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_H264) };
2320 assert_eq!(name, "h264");
2321 }
2322
2323 #[test]
2324 fn codec_name_should_return_none_for_none_codec_id() {
2325 let name =
2326 unsafe { VideoDecoderInner::extract_codec_name(ff_sys::AVCodecID_AV_CODEC_ID_NONE) };
2327 assert_eq!(name, "none");
2328 }
2329
2330 #[test]
2331 fn unsupported_codec_error_should_include_codec_name() {
2332 let codec_id = ff_sys::AVCodecID_AV_CODEC_ID_H264;
2333 let codec_name = unsafe { VideoDecoderInner::extract_codec_name(codec_id) };
2334 let error = crate::error::DecodeError::UnsupportedCodec {
2335 codec: format!("{codec_name} (codec_id={codec_id:?})"),
2336 };
2337 let msg = error.to_string();
2338 assert!(msg.contains("h264"), "expected codec name in error: {msg}");
2339 assert!(
2340 msg.contains("codec_id="),
2341 "expected codec_id in error: {msg}"
2342 );
2343 }
2344}