use std::{collections::VecDeque, mem::ManuallyDrop, ptr};
use ffmpeg_next::{
Codec, Packet, Rational,
codec::{
self,
Context,
packet::{Mut as PacketMut, Ref as PacketRef},
},
ffi::{
AVBufferRef, AVCodec, AVFrame, AVHWFramesContext, AVMediaType, av_buffer_ref, av_buffer_unref,
av_frame_move_ref, av_frame_unref, av_hwdevice_ctx_create, av_hwframe_transfer_data,
av_packet_ref, avcodec_alloc_context3, avcodec_free_context, avcodec_parameters_alloc,
avcodec_parameters_copy, avcodec_parameters_free, avcodec_parameters_to_context,
},
frame,
};
mod c_shims {
use super::AVCodec;
use libc::c_int;
unsafe extern "C" {
pub fn avcodec_find_decoder(id: c_int) -> *const AVCodec;
}
}
use crate::{
backend::{self, Backend},
error::{AllBackendsFailed, Error, HwDeviceInitFailed, Result},
ffi::{CallbackState, codec_supports_hwaccel, get_hw_format},
frame::Frame,
};
pub struct VideoDecoder {
state: DecoderState,
hw_frame: frame::Video,
probe: Option<ProbeState>,
pending_frames: VecDeque<frame::Video>,
max_probe_pending_bytes: usize,
}
struct DecoderState {
inner: ManuallyDrop<ffmpeg_next::decoder::Video>,
backend: Backend,
hw_device_ref: *mut AVBufferRef,
callback_state: *mut CallbackState,
}
const MAX_PROBE_PACKETS: usize = 256;
const MAX_PROBE_PACKET_BYTES: usize = 64 * 1024 * 1024;
const MAX_PROBE_PACKET_SIDE_DATA_ENTRIES: usize = 64;
const SIDE_DATA_ENTRY_OVERHEAD: usize = 80;
const WORST_CASE_BYTES_PER_PIXEL: usize = 8;
const MAX_PROBE_PENDING_FRAMES: usize = 16;
pub const DEFAULT_MAX_PROBE_PENDING_BYTES: usize = 256 * 1024 * 1024;
struct ProbeState {
parameters: codec::Parameters,
codec: Codec,
remaining_backends: Vec<Backend>,
buffered_packets: Vec<Packet>,
buffered_bytes: usize,
eof_sent: bool,
attempts: Vec<(Backend, Box<Error>)>,
}
unsafe impl Send for DecoderState {}
unsafe impl Send for VideoDecoder {}
impl Drop for DecoderState {
fn drop(&mut self) {
unsafe {
ManuallyDrop::drop(&mut self.inner);
if !self.callback_state.is_null() {
drop(Box::from_raw(self.callback_state));
self.callback_state = ptr::null_mut();
}
if !self.hw_device_ref.is_null() {
av_buffer_unref(&mut self.hw_device_ref);
}
}
}
}
impl VideoDecoder {
pub fn open(parameters: codec::Parameters) -> Result<Self> {
let codec = find_decoder(¶meters)?;
let order = backend::probe_order();
let mut attempts: Vec<(Backend, Box<Error>)> = Vec::new();
for (i, &backend) in order.iter().enumerate() {
let cloned_for_build = match try_clone_parameters(¶meters) {
Ok(p) => p,
Err(e) => {
tracing::warn!(?backend, error = %e, "hwdecode: parameters clone failed");
attempts.push((backend, Box::new(Error::Ffmpeg(e))));
continue;
}
};
match Self::build_state(cloned_for_build, codec, backend) {
Ok(state) => {
tracing::info!(?backend, "hwdecode: opened video decoder (probing)");
let remaining = order[(i + 1)..].to_vec();
let probe = match try_clone_parameters(¶meters) {
Ok(probe_params) => ProbeState {
parameters: probe_params,
codec,
remaining_backends: remaining,
buffered_packets: Vec::new(),
buffered_bytes: 0,
eof_sent: false,
attempts: std::mem::take(&mut attempts),
},
Err(e) => {
tracing::warn!(
error = %e,
"hwdecode: parameters clone failed for probe state at open; \
failing closed instead of returning a decoder without rescue"
);
return Err(Error::Ffmpeg(e));
}
};
return Ok(Self {
state,
hw_frame: alloc_av_frame().map_err(Error::Ffmpeg)?,
probe: Some(probe),
pending_frames: VecDeque::new(),
max_probe_pending_bytes: DEFAULT_MAX_PROBE_PENDING_BYTES,
});
}
Err(e) => {
tracing::warn!(?backend, error = %e, "hwdecode: backend open failed");
attempts.push((backend, Box::new(e)));
}
}
}
Err(Error::AllBackendsFailed(AllBackendsFailed::new(
attempts,
Vec::new(),
)))
}
pub fn open_with(parameters: codec::Parameters, backend: Backend) -> Result<Self> {
let codec = find_decoder(¶meters)?;
let state = Self::build_state(parameters, codec, backend)?;
Ok(Self {
state,
hw_frame: alloc_av_frame().map_err(Error::Ffmpeg)?,
probe: None,
pending_frames: VecDeque::new(),
max_probe_pending_bytes: DEFAULT_MAX_PROBE_PENDING_BYTES,
})
}
#[must_use]
pub fn with_max_probe_pending_bytes(mut self, bytes: usize) -> Self {
self.max_probe_pending_bytes = bytes;
self
}
pub fn backend(&self) -> Backend {
self.state.backend
}
pub fn width(&self) -> u32 {
self.state.inner.width()
}
pub fn height(&self) -> u32 {
self.state.inner.height()
}
pub fn time_base(&self) -> Rational {
self.state.inner.time_base()
}
pub fn frame_rate(&self) -> Option<Rational> {
self.state.inner.frame_rate()
}
pub fn send_packet(&mut self, packet: &Packet) -> Result<()> {
loop {
let staged_clone: Option<(Packet, usize)> = if let Some(probe) = self.probe.as_ref() {
let side_count = packet_side_data_count(packet);
if side_count > MAX_PROBE_PACKET_SIDE_DATA_ENTRIES {
let probe = self.probe.take().expect("probe present");
tracing::warn!(
side_data_entries = side_count,
max_side_data_entries = MAX_PROBE_PACKET_SIDE_DATA_ENTRIES,
trigger = "side_data_entry_cap",
"hwdecode: probe rescue exhausted before consuming packet; \
returning AllBackendsFailed without invoking decoder"
);
return Err(Error::AllBackendsFailed(AllBackendsFailed::new(
probe.attempts,
probe.buffered_packets,
)));
}
let pkt_size = packet.size().saturating_add(packet_side_data_bytes(
packet,
MAX_PROBE_PACKET_SIDE_DATA_ENTRIES,
));
let new_count = probe.buffered_packets.len() + 1;
let new_bytes = probe.buffered_bytes.saturating_add(pkt_size);
if new_count > MAX_PROBE_PACKETS || new_bytes > MAX_PROBE_PACKET_BYTES {
let probe = self.probe.take().expect("probe present");
tracing::warn!(
packets = new_count,
bytes = new_bytes,
side_data_entries = side_count,
max_packets = MAX_PROBE_PACKETS,
max_bytes = MAX_PROBE_PACKET_BYTES,
trigger = "byte_or_packet_cap",
"hwdecode: probe rescue exhausted before consuming packet; \
returning AllBackendsFailed without invoking decoder"
);
return Err(Error::AllBackendsFailed(AllBackendsFailed::new(
probe.attempts,
probe.buffered_packets,
)));
}
match try_clone_packet(packet) {
Ok(c) => Some((c, new_bytes)),
Err(e) => {
let probe = self.probe.take().expect("probe present");
tracing::warn!(
error = %e,
"hwdecode: packet clone failed before consuming; \
returning AllBackendsFailed without invoking decoder"
);
return Err(Error::AllBackendsFailed(AllBackendsFailed::new(
probe.attempts,
probe.buffered_packets,
)));
}
}
} else {
None
};
match self.state.inner.send_packet(packet) {
Ok(()) => {
if let Some((cloned, new_bytes)) = staged_clone {
if let Some(probe) = self.probe.as_mut() {
probe.buffered_packets.push(cloned);
probe.buffered_bytes = new_bytes;
}
}
return Ok(());
}
Err(e) if is_transient(&e) => {
return Err(Error::Ffmpeg(e));
}
Err(e) => {
if self.probe.is_some() {
self.advance_probe(Error::Ffmpeg(e))?;
continue;
}
return Err(Error::Ffmpeg(e));
}
}
}
}
pub fn send_eof(&mut self) -> Result<()> {
loop {
match self.state.inner.send_eof() {
Ok(()) => {
if let Some(probe) = self.probe.as_mut() {
probe.eof_sent = true;
}
return Ok(());
}
Err(e) if is_transient(&e) => return Err(Error::Ffmpeg(e)),
Err(e) => {
if self.probe.is_some() {
self.advance_probe(Error::Ffmpeg(e))?;
continue;
}
return Err(Error::Ffmpeg(e));
}
}
}
}
pub fn receive_frame(&mut self, frame: &mut Frame) -> Result<()> {
if self.try_pop_pending(frame) {
return Ok(());
}
loop {
let res = self.state.inner.receive_frame(&mut self.hw_frame);
match res {
Err(e) => {
if is_eagain(&e) {
return Err(Error::Ffmpeg(e));
}
if self.probe.is_some() {
self.advance_probe(Error::Ffmpeg(e))?;
if self.try_pop_pending(frame) {
return Ok(());
}
continue;
}
return Err(Error::Ffmpeg(e));
}
Ok(()) => {
match unsafe { transfer_hw_frame(frame, &mut self.hw_frame) } {
Ok(()) => {
self.probe = None;
return Ok(());
}
Err(e) => {
if self.probe.is_some() {
self.advance_probe(Error::Ffmpeg(e))?;
unsafe { av_frame_unref(frame.as_inner_mut().as_mut_ptr()) };
if self.try_pop_pending(frame) {
return Ok(());
}
continue;
}
return Err(Error::Ffmpeg(e));
}
}
}
}
}
}
fn try_pop_pending(&mut self, frame: &mut Frame) -> bool {
let Some(mut buffered) = self.pending_frames.pop_front() else {
return false;
};
unsafe {
av_frame_unref(frame.as_inner_mut().as_mut_ptr());
av_frame_move_ref(frame.as_inner_mut().as_mut_ptr(), buffered.as_mut_ptr());
}
self.probe = None;
true
}
pub fn flush(&mut self) {
self.state.inner.flush();
unsafe { av_frame_unref(self.hw_frame.as_mut_ptr()) };
self.pending_frames.clear();
if let Some(probe) = self.probe.as_mut() {
probe.buffered_packets.clear();
probe.buffered_bytes = 0;
probe.eof_sent = false;
}
}
fn advance_probe(&mut self, last_error: Error) -> Result<()> {
let active_backend = self.state.backend;
match self.probe.as_mut() {
Some(probe) => probe.attempts.push((active_backend, Box::new(last_error))),
None => return Err(last_error),
}
self.pending_frames.clear();
loop {
let (next_backend, parameters, codec) = match self.probe.as_ref() {
Some(probe) if !probe.remaining_backends.is_empty() => {
let parameters = match try_clone_parameters(&probe.parameters) {
Ok(p) => p,
Err(e) => {
tracing::warn!(
error = %e,
"hwdecode: parameters clone failed during probe advance; popping backend and trying next"
);
let popped = self
.probe
.as_mut()
.expect("probe state present")
.remaining_backends
.remove(0);
self
.probe
.as_mut()
.expect("probe state present")
.attempts
.push((popped, Box::new(Error::Ffmpeg(e))));
continue;
}
};
(probe.remaining_backends[0], parameters, probe.codec)
}
_ => {
let (attempts, unconsumed_packets) = self
.probe
.take()
.map(|p| (p.attempts, p.buffered_packets))
.unwrap_or_default();
return Err(Error::AllBackendsFailed(AllBackendsFailed::new(
attempts,
unconsumed_packets,
)));
}
};
let prev_backend = self.state.backend;
tracing::warn!(from = ?prev_backend, to = ?next_backend, "hwdecode: advancing probe");
let mut candidate_state = match Self::build_state(parameters, codec, next_backend) {
Ok(s) => s,
Err(e) => {
tracing::warn!(?next_backend, error = %e, "hwdecode: candidate build failed");
self
.probe
.as_mut()
.expect("probe state present")
.remaining_backends
.remove(0);
self
.probe
.as_mut()
.expect("probe state present")
.attempts
.push((next_backend, Box::new(e)));
continue;
}
};
let mut local_pending: VecDeque<frame::Video> = VecDeque::new();
let mut local_pending_bytes: usize = 0;
let max_pending_bytes = self.max_probe_pending_bytes;
let replay_result: std::result::Result<(), ffmpeg_next::Error> = {
let probe = self.probe.as_ref().expect("probe state present");
let mut hw_buf = match alloc_av_frame() {
Ok(f) => f,
Err(e) => return Err(Error::Ffmpeg(e)),
};
let mut r: std::result::Result<(), ffmpeg_next::Error> = Ok(());
'replay: for pkt in &probe.buffered_packets {
loop {
match candidate_state.inner.send_packet(pkt) {
Ok(()) => break,
Err(e) if is_eagain(&e) => {
if let Err(de) = drain_into_pending(
&mut candidate_state.inner,
&mut hw_buf,
&mut local_pending,
&mut local_pending_bytes,
max_pending_bytes,
) {
r = Err(de);
break 'replay;
}
}
Err(e) => {
r = Err(e);
break 'replay;
}
}
}
}
if r.is_ok() && probe.eof_sent {
loop {
match candidate_state.inner.send_eof() {
Ok(()) => break,
Err(e) if is_eagain(&e) => {
if let Err(de) = drain_into_pending(
&mut candidate_state.inner,
&mut hw_buf,
&mut local_pending,
&mut local_pending_bytes,
max_pending_bytes,
) {
r = Err(de);
break;
}
}
Err(e) => {
r = Err(e);
break;
}
}
}
}
r
};
if let Err(e) = replay_result {
tracing::warn!(?next_backend, error = %e, "hwdecode: candidate replay failed");
drop(candidate_state);
drop(local_pending);
self
.probe
.as_mut()
.expect("probe state present")
.remaining_backends
.remove(0);
self
.probe
.as_mut()
.expect("probe state present")
.attempts
.push((next_backend, Box::new(Error::Ffmpeg(e))));
continue;
}
self.state = candidate_state;
unsafe { av_frame_unref(self.hw_frame.as_mut_ptr()) };
self.pending_frames.append(&mut local_pending);
self
.probe
.as_mut()
.expect("probe state present")
.remaining_backends
.remove(0);
return Ok(());
}
}
fn build_state(
parameters: codec::Parameters,
codec: Codec,
backend: Backend,
) -> Result<DecoderState> {
let mut ctx = build_codec_context(¶meters)?;
let av_type = backend.av_hwdevice_type();
let hw_pix_fmt = backend.hw_pixel_format();
if !codec_supports_hwaccel(unsafe { codec.as_ptr() }, av_type, hw_pix_fmt as i32) {
return Err(Error::BackendUnsupportedByCodec(backend));
}
let mut hw_device_ref: *mut AVBufferRef = ptr::null_mut();
let ret = unsafe {
av_hwdevice_ctx_create(&mut hw_device_ref, av_type, ptr::null(), ptr::null_mut(), 0)
};
if ret < 0 {
return Err(Error::HwDeviceInitFailed(HwDeviceInitFailed::new(
backend,
ffmpeg_next::Error::from(ret),
)));
}
let callback_state = Box::into_raw(Box::new(CallbackState {
wanted: hw_pix_fmt,
wanted_int: hw_pix_fmt as i32,
}));
let guard = PartialBuildState {
hw_device_ref,
callback_state,
};
let device_ref_for_ctx = unsafe { av_buffer_ref(hw_device_ref) };
if device_ref_for_ctx.is_null() {
return Err(Error::Ffmpeg(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
}));
}
unsafe {
let raw = ctx.as_mut_ptr();
(*raw).hw_device_ctx = device_ref_for_ctx;
(*raw).opaque = callback_state.cast();
(*raw).get_format = Some(get_hw_format);
}
let opened = ctx.decoder().open_as(codec).map_err(Error::Ffmpeg)?;
let codec_type_int: i32 =
unsafe { ptr::read(ptr::addr_of!((*opened.as_ptr()).codec_type) as *const i32) };
let video_type_int: i32 = AVMediaType::AVMEDIA_TYPE_VIDEO as i32;
if codec_type_int != video_type_int {
return Err(Error::Ffmpeg(ffmpeg_next::Error::InvalidData));
}
let opened = ffmpeg_next::decoder::Video(opened);
let (hw_device_ref, callback_state) = guard.into_owned();
Ok(DecoderState {
inner: ManuallyDrop::new(opened),
backend,
hw_device_ref,
callback_state,
})
}
}
struct PartialBuildState {
hw_device_ref: *mut AVBufferRef,
callback_state: *mut CallbackState,
}
impl PartialBuildState {
fn into_owned(mut self) -> (*mut AVBufferRef, *mut CallbackState) {
let hw = std::mem::replace(&mut self.hw_device_ref, ptr::null_mut());
let cb = std::mem::replace(&mut self.callback_state, ptr::null_mut());
(hw, cb)
}
}
impl Drop for PartialBuildState {
fn drop(&mut self) {
unsafe {
if !self.hw_device_ref.is_null() {
let mut hw = self.hw_device_ref;
av_buffer_unref(&mut hw);
}
if !self.callback_state.is_null() {
drop(Box::from_raw(self.callback_state));
}
}
}
}
unsafe fn transfer_hw_frame(
dst: &mut Frame,
src: &mut frame::Video,
) -> std::result::Result<(), ffmpeg_next::Error> {
unsafe {
av_frame_unref(dst.as_inner_mut().as_mut_ptr());
let ret = av_hwframe_transfer_data(dst.as_inner_mut().as_mut_ptr(), src.as_ptr(), 0);
if ret < 0 {
return Err(ffmpeg_next::Error::from(ret));
}
let dst_raw_fmt: i32 = (*dst.as_inner_mut().as_ptr()).format;
let dst_pix_fmt = crate::boundary::from_av_pixel_format(dst_raw_fmt);
if !crate::frame::is_supported_cpu_pix_fmt(dst_pix_fmt) {
tracing::warn!(
pix_fmt = dst_raw_fmt,
"hwdecode: hw->cpu transfer produced unsupported pix_fmt; \
treating as backend failure"
);
av_frame_unref(dst.as_inner_mut().as_mut_ptr());
return Err(ffmpeg_next::Error::Other {
errno: libc::EINVAL,
});
}
if let Err(e) = copy_frame_props_minimal(dst.as_inner_mut().as_mut_ptr(), src.as_ptr()) {
av_frame_unref(dst.as_inner_mut().as_mut_ptr());
return Err(e);
}
}
Ok(())
}
unsafe fn sum_side_data_bytes(frame: *const AVFrame) -> usize {
let raw = unsafe { (*frame).nb_side_data };
let arr = unsafe { (*frame).side_data };
if raw <= 0 || arr.is_null() {
return 0;
}
let count = (raw as usize).min(HW_COPY_SIDE_DATA_MAX_ENTRIES);
let mut total: usize = 0;
for i in 0..count {
let entry = unsafe { *arr.add(i) };
if entry.is_null() {
continue;
}
let sz = unsafe { (*entry).size };
total = total.saturating_add(sz);
if total >= HW_COPY_SIDE_DATA_MAX_TOTAL_BYTES {
total = HW_COPY_SIDE_DATA_MAX_TOTAL_BYTES;
break;
}
}
total
}
const HW_COPY_SIDE_DATA_MAX_ENTRIES: usize = 64;
const HW_COPY_SIDE_DATA_MAX_TOTAL_BYTES: usize = 256 * 1024;
fn whitelisted_side_data_kind(kind_raw: i32) -> Option<ffmpeg_next::ffi::AVFrameSideDataType> {
use ffmpeg_next::ffi::AVFrameSideDataType;
let kind = match kind_raw {
x if x == AVFrameSideDataType::AV_FRAME_DATA_PANSCAN as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_PANSCAN
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_A53_CC as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_A53_CC
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_STEREO3D as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_STEREO3D
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_DISPLAYMATRIX as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_DISPLAYMATRIX
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_AFD as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_AFD
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_MASTERING_DISPLAY_METADATA as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_GOP_TIMECODE as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_GOP_TIMECODE
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_SPHERICAL as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_SPHERICAL
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_CONTENT_LIGHT_LEVEL as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_ICC_PROFILE as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_ICC_PROFILE
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_S12M_TIMECODE as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_S12M_TIMECODE
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_DYNAMIC_HDR_PLUS as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_DYNAMIC_HDR_PLUS
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_REGIONS_OF_INTEREST as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_REGIONS_OF_INTEREST
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_SEI_UNREGISTERED as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_SEI_UNREGISTERED
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_FILM_GRAIN_PARAMS as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_FILM_GRAIN_PARAMS
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_DOVI_RPU_BUFFER as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_DOVI_RPU_BUFFER
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_DOVI_METADATA as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_DOVI_METADATA
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_DYNAMIC_HDR_VIVID as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_DYNAMIC_HDR_VIVID
}
x if x == AVFrameSideDataType::AV_FRAME_DATA_AMBIENT_VIEWING_ENVIRONMENT as i32 => {
AVFrameSideDataType::AV_FRAME_DATA_AMBIENT_VIEWING_ENVIRONMENT
}
_ => return None,
};
Some(kind)
}
unsafe fn copy_frame_props_minimal(
dst: *mut AVFrame,
src: *const AVFrame,
) -> std::result::Result<(), ffmpeg_next::Error> {
use core::ptr::{addr_of, addr_of_mut, read_unaligned, write_unaligned};
use ffmpeg_next::ffi::av_frame_new_side_data;
unsafe {
(*dst).pts = (*src).pts;
(*dst).pkt_dts = (*src).pkt_dts;
(*dst).duration = (*src).duration;
(*dst).best_effort_timestamp = (*src).best_effort_timestamp;
(*dst).quality = (*src).quality;
(*dst).repeat_pict = (*src).repeat_pict;
(*dst).flags = (*src).flags;
(*dst).sample_aspect_ratio = (*src).sample_aspect_ratio;
(*dst).crop_left = (*src).crop_left;
(*dst).crop_top = (*src).crop_top;
(*dst).crop_right = (*src).crop_right;
(*dst).crop_bottom = (*src).crop_bottom;
(*dst).time_base = (*src).time_base;
let pict_type_raw = read_unaligned(addr_of!((*src).pict_type) as *const i32);
write_unaligned(addr_of_mut!((*dst).pict_type) as *mut i32, pict_type_raw);
let cp_raw = read_unaligned(addr_of!((*src).color_primaries) as *const i32);
write_unaligned(addr_of_mut!((*dst).color_primaries) as *mut i32, cp_raw);
let trc_raw = read_unaligned(addr_of!((*src).color_trc) as *const i32);
write_unaligned(addr_of_mut!((*dst).color_trc) as *mut i32, trc_raw);
let cs_raw = read_unaligned(addr_of!((*src).colorspace) as *const i32);
write_unaligned(addr_of_mut!((*dst).colorspace) as *mut i32, cs_raw);
let cr_raw = read_unaligned(addr_of!((*src).color_range) as *const i32);
write_unaligned(addr_of_mut!((*dst).color_range) as *mut i32, cr_raw);
let cl_raw = read_unaligned(addr_of!((*src).chroma_location) as *const i32);
write_unaligned(addr_of_mut!((*dst).chroma_location) as *mut i32, cl_raw);
let nb_side_data_raw = (*src).nb_side_data;
let src_arr = (*src).side_data;
if nb_side_data_raw > 0 && !src_arr.is_null() {
let count_raw = nb_side_data_raw as usize;
let count = count_raw.min(HW_COPY_SIDE_DATA_MAX_ENTRIES);
if count_raw > HW_COPY_SIDE_DATA_MAX_ENTRIES {
tracing::warn!(
cap = HW_COPY_SIDE_DATA_MAX_ENTRIES,
requested = count_raw,
"mediadecode-ffmpeg: HW->CPU transfer side-data entry cap reached; truncating",
);
}
let mut total_bytes: usize = 0;
for i in 0..count {
let entry = *src_arr.add(i);
if entry.is_null() {
continue;
}
let kind_raw = read_unaligned(addr_of!((*entry).type_) as *const i32);
let size = (*entry).size;
let data_ptr = (*entry).data;
if size == 0 || data_ptr.is_null() {
continue;
}
let Some(kind_enum) = whitelisted_side_data_kind(kind_raw) else {
tracing::debug!(
kind_raw,
"mediadecode-ffmpeg: unknown AV_FRAME_DATA type during HW->CPU transfer; dropping",
);
continue;
};
let projected = total_bytes.saturating_add(size);
if projected > HW_COPY_SIDE_DATA_MAX_TOTAL_BYTES {
tracing::warn!(
cap = HW_COPY_SIDE_DATA_MAX_TOTAL_BYTES,
projected,
"mediadecode-ffmpeg: HW->CPU transfer side-data byte cap reached; dropping rest",
);
break;
}
let new_entry = av_frame_new_side_data(dst, kind_enum, size);
if new_entry.is_null() {
tracing::warn!("mediadecode-ffmpeg: av_frame_new_side_data OOM during HW->CPU transfer",);
break;
}
core::ptr::copy_nonoverlapping(data_ptr, (*new_entry).data, size);
total_bytes = projected;
}
}
}
Ok(())
}
fn is_transient(e: &ffmpeg_next::Error) -> bool {
is_eagain(e) || matches!(e, ffmpeg_next::Error::Eof)
}
fn ensure_parameters_non_null(parameters: &codec::Parameters) -> Result<()> {
if unsafe { parameters.as_ptr() }.is_null() {
return Err(Error::Ffmpeg(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
}));
}
Ok(())
}
fn alloc_av_frame() -> std::result::Result<frame::Video, ffmpeg_next::Error> {
let inner = frame::Video::empty();
if unsafe { inner.as_ptr() }.is_null() {
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
Ok(inner)
}
pub(crate) fn build_codec_context(parameters: &codec::Parameters) -> Result<Context> {
ensure_parameters_non_null(parameters)?;
let ctx_ptr = unsafe { avcodec_alloc_context3(ptr::null()) };
if ctx_ptr.is_null() {
return Err(Error::Ffmpeg(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
}));
}
let ret = unsafe { avcodec_parameters_to_context(ctx_ptr, parameters.as_ptr()) };
if ret < 0 {
let mut p = ctx_ptr;
unsafe { avcodec_free_context(&mut p) };
return Err(Error::Ffmpeg(ffmpeg_next::Error::from(ret)));
}
Ok(unsafe { Context::wrap(ctx_ptr, None) })
}
pub(crate) fn try_clone_parameters(
src: &codec::Parameters,
) -> std::result::Result<codec::Parameters, ffmpeg_next::Error> {
if unsafe { src.as_ptr() }.is_null() {
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
let dst_ptr = unsafe { avcodec_parameters_alloc() };
if dst_ptr.is_null() {
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
let ret = unsafe { avcodec_parameters_copy(dst_ptr, src.as_ptr()) };
if ret < 0 {
let mut p = dst_ptr;
unsafe { avcodec_parameters_free(&mut p) };
return Err(ffmpeg_next::Error::from(ret));
}
Ok(unsafe { codec::Parameters::wrap(dst_ptr, None) })
}
fn try_clone_packet(src: &Packet) -> std::result::Result<Packet, ffmpeg_next::Error> {
let mut dst = Packet::empty();
let ret = unsafe { av_packet_ref(dst.as_mut_ptr(), src.as_ptr()) };
if ret < 0 {
return Err(ffmpeg_next::Error::from(ret));
}
Ok(dst)
}
fn packet_side_data_bytes(packet: &Packet, max_entries: usize) -> usize {
unsafe {
let raw = packet.as_ptr();
let nel = (*raw).side_data_elems;
let arr = (*raw).side_data;
if arr.is_null() || nel <= 0 || max_entries == 0 {
return 0;
}
let count = (nel as usize).min(max_entries);
let mut total = count.saturating_mul(SIDE_DATA_ENTRY_OVERHEAD);
for i in 0..count {
let entry = arr.add(i);
total = total.saturating_add((*entry).size);
}
total
}
}
fn packet_side_data_count(packet: &Packet) -> usize {
let nel = unsafe { (*packet.as_ptr()).side_data_elems };
if nel <= 0 { 0 } else { nel as usize }
}
fn is_eagain(e: &ffmpeg_next::Error) -> bool {
matches!(e, ffmpeg_next::Error::Other { errno } if *errno == ffmpeg_next::error::EAGAIN)
}
fn find_decoder(parameters: &codec::Parameters) -> Result<Codec> {
ensure_parameters_non_null(parameters)?;
let raw_id: u32 =
unsafe { ptr::read(ptr::addr_of!((*parameters.as_ptr()).codec_id) as *const u32) };
let codec_ptr = unsafe { c_shims::avcodec_find_decoder(raw_id as libc::c_int) };
if codec_ptr.is_null() {
return Err(Error::NoCodec(raw_id));
}
Ok(unsafe { Codec::wrap(codec_ptr) })
}
fn drain_into_pending(
decoder: &mut ffmpeg_next::decoder::Video,
hw_buf: &mut frame::Video,
pending: &mut VecDeque<frame::Video>,
pending_bytes: &mut usize,
max_bytes: usize,
) -> std::result::Result<(), ffmpeg_next::Error> {
loop {
match decoder.receive_frame(hw_buf) {
Ok(()) => {
if pending.len() >= MAX_PROBE_PENDING_FRAMES || *pending_bytes >= max_bytes {
tracing::warn!(
frames = pending.len(),
bytes = *pending_bytes,
max_frames = MAX_PROBE_PENDING_FRAMES,
max_bytes = max_bytes,
"hwdecode: probe pending cap reached; failing candidate replay"
);
unsafe { av_frame_unref(hw_buf.as_mut_ptr()) };
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
let estimated_bytes = match estimate_transfer_bytes(hw_buf) {
Some(b) => b,
None => {
let (w, h) = unsafe {
let raw = hw_buf.as_ptr();
((*raw).width, (*raw).height)
};
tracing::warn!(
width = w,
height = h,
"hwdecode: HW frame dimensions invalid for sizing; failing candidate replay"
);
unsafe { av_frame_unref(hw_buf.as_mut_ptr()) };
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
};
let estimated_total = pending_bytes.saturating_add(estimated_bytes);
if estimated_total > max_bytes {
let (w, h) = unsafe {
let raw = hw_buf.as_ptr();
((*raw).width, (*raw).height)
};
tracing::warn!(
pending_bytes = *pending_bytes,
estimated_bytes,
width = w,
height = h,
max_bytes = max_bytes,
"hwdecode: pre-transfer size estimate exceeds cap; \
refusing candidate replay before allocating CPU frame"
);
unsafe { av_frame_unref(hw_buf.as_mut_ptr()) };
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
let mut cpu = alloc_av_frame()?;
unsafe {
let r1 = av_hwframe_transfer_data(cpu.as_mut_ptr(), hw_buf.as_ptr(), 0);
if r1 < 0 {
return Err(ffmpeg_next::Error::from(r1));
}
}
let cpu_raw_fmt: i32 = unsafe { (*cpu.as_ptr()).format };
let cpu_pix_fmt = crate::boundary::from_av_pixel_format(cpu_raw_fmt);
if !crate::frame::is_supported_cpu_pix_fmt(cpu_pix_fmt) {
tracing::warn!(
pix_fmt = cpu_raw_fmt,
"hwdecode: candidate produced unsupported CPU pix_fmt during \
probe replay; failing candidate"
);
return Err(ffmpeg_next::Error::Other {
errno: libc::EINVAL,
});
}
let pixel_bytes = match cpu_frame_bytes(&cpu) {
Some(b) => b,
None => {
let pix_fmt: i32 = unsafe { (*cpu.as_ptr()).format };
tracing::warn!(
pix_fmt,
"hwdecode: cannot size unknown CPU pix_fmt during replay; failing candidate"
);
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
};
let side_data_bytes = unsafe { sum_side_data_bytes(hw_buf.as_ptr()) };
let new_total = pending_bytes
.saturating_add(pixel_bytes)
.saturating_add(side_data_bytes);
if new_total > max_bytes {
tracing::warn!(
pending_bytes = *pending_bytes,
pixel_bytes,
side_data_bytes,
max_bytes,
"hwdecode: queueing this frame would exceed byte cap; \
failing candidate replay"
);
return Err(ffmpeg_next::Error::Other {
errno: libc::ENOMEM,
});
}
unsafe { copy_frame_props_minimal(cpu.as_mut_ptr(), hw_buf.as_ptr()) }?;
*pending_bytes = new_total;
pending.push_back(cpu);
}
Err(e) if is_transient(&e) => return Ok(()),
Err(e) => return Err(e),
}
}
}
fn hw_frames_ctx_dimensions(frame: &frame::Video) -> Option<(i32, i32)> {
unsafe {
let raw = frame.as_ptr();
let hw_ctx_ref = (*raw).hw_frames_ctx;
if hw_ctx_ref.is_null() {
return None;
}
let data = (*hw_ctx_ref).data;
if data.is_null() {
return None;
}
let frames_ctx = data as *const AVHWFramesContext;
let w: i32 = ptr::read(ptr::addr_of!((*frames_ctx).width));
let h: i32 = ptr::read(ptr::addr_of!((*frames_ctx).height));
if w <= 0 || h <= 0 {
return None;
}
Some((w, h))
}
}
fn estimate_transfer_bytes(hw_buf: &frame::Video) -> Option<usize> {
let (w, h) = hw_frames_ctx_dimensions(hw_buf)?;
Some(
(w as usize)
.saturating_mul(h as usize)
.saturating_mul(WORST_CASE_BYTES_PER_PIXEL),
)
}
fn cpu_frame_bytes(frame: &frame::Video) -> Option<usize> {
unsafe {
let raw = frame.as_ptr();
let first_linesize = (*raw).linesize[0];
if first_linesize < 0 {
return None;
}
let mut total: usize = 0;
for i in 0..(*raw).buf.len() {
let buf = (*raw).buf[i];
if buf.is_null() {
continue;
}
total = total.saturating_add((*buf).size);
}
Some(total)
}
}
#[allow(dead_code)]
fn _assert_send() {
fn check<T: Send>() {}
check::<VideoDecoder>();
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn no_codec_for_unknown_id() {
let err = Error::NoCodec(0);
assert!(format!("{err}").contains("no decoder"));
}
#[test]
fn videodecoder_is_send() {
_assert_send();
}
#[test]
fn is_transient_recognises_eagain_and_eof() {
let eagain = ffmpeg_next::Error::Other {
errno: ffmpeg_next::error::EAGAIN,
};
assert!(is_transient(&eagain));
assert!(is_transient(&ffmpeg_next::Error::Eof));
let other = ffmpeg_next::Error::InvalidData;
assert!(!is_transient(&other));
}
#[test]
fn open_rejects_null_parameters() {
let null_params = unsafe { codec::Parameters::wrap(std::ptr::null_mut(), None) };
match VideoDecoder::open(null_params) {
Ok(_) => panic!("open should fail on null parameters"),
Err(Error::Ffmpeg(ffmpeg_next::Error::Other { errno })) => {
assert_eq!(errno, libc::ENOMEM, "expected ENOMEM, got {errno}");
}
Err(other) => panic!("expected Ffmpeg(Other {{ ENOMEM }}), got {other:?}"),
}
}
#[test]
fn open_with_rejects_null_parameters() {
let null_params = unsafe { codec::Parameters::wrap(std::ptr::null_mut(), None) };
match VideoDecoder::open_with(null_params, Backend::VideoToolbox) {
Ok(_) => panic!("open_with should fail on null parameters"),
Err(Error::Ffmpeg(ffmpeg_next::Error::Other { errno })) => {
assert_eq!(errno, libc::ENOMEM, "expected ENOMEM, got {errno}");
}
Err(other) => panic!("expected Ffmpeg(Other {{ ENOMEM }}), got {other:?}"),
}
}
#[test]
fn packet_side_data_counts_against_probe_budget() {
use ffmpeg_next::ffi::{AVPacketSideDataType, av_packet_new_side_data};
const PAYLOAD_SIZE: usize = 16;
const SIDE_DATA_SIZE: usize = 1024 * 1024;
let mut packet = Packet::new(PAYLOAD_SIZE);
let p = unsafe {
av_packet_new_side_data(
packet.as_mut_ptr(),
AVPacketSideDataType::AV_PKT_DATA_NEW_EXTRADATA,
SIDE_DATA_SIZE,
)
};
assert!(!p.is_null(), "av_packet_new_side_data returned NULL");
assert_eq!(packet.size(), PAYLOAD_SIZE);
let side = packet_side_data_bytes(&packet, MAX_PROBE_PACKET_SIDE_DATA_ENTRIES);
assert!(
side >= SIDE_DATA_SIZE,
"side-data accounting must include the attached buffer; got {side}"
);
let total = packet.size().saturating_add(side);
assert!(
total >= PAYLOAD_SIZE + SIDE_DATA_SIZE,
"probe budget must charge payload + side data; got {total}"
);
}
#[test]
fn packet_side_data_is_zero_when_no_side_data() {
let packet = Packet::new(64);
assert_eq!(
packet_side_data_bytes(&packet, MAX_PROBE_PACKET_SIDE_DATA_ENTRIES),
0
);
assert_eq!(packet_side_data_count(&packet), 0);
}
#[test]
fn packet_side_data_bytes_charges_descriptor_overhead_for_zero_size_entries() {
use ffmpeg_next::ffi::{AVPacketSideDataType, av_packet_new_side_data};
let mut packet = Packet::new(0);
let p1 = unsafe {
av_packet_new_side_data(
packet.as_mut_ptr(),
AVPacketSideDataType::AV_PKT_DATA_NEW_EXTRADATA,
0,
)
};
let p2 = unsafe {
av_packet_new_side_data(
packet.as_mut_ptr(),
AVPacketSideDataType::AV_PKT_DATA_PALETTE,
0,
)
};
assert!(
!p1.is_null() && !p2.is_null(),
"av_packet_new_side_data NULL"
);
assert_eq!(packet_side_data_count(&packet), 2);
let bytes = packet_side_data_bytes(&packet, MAX_PROBE_PACKET_SIDE_DATA_ENTRIES);
assert!(
bytes >= 2 * SIDE_DATA_ENTRY_OVERHEAD,
"must charge descriptor overhead per entry even at zero payload; got {bytes}"
);
}
#[test]
fn packet_side_data_bytes_respects_max_entries_cap() {
use ffmpeg_next::ffi::{AVPacketSideDataType, av_packet_new_side_data};
let mut packet = Packet::new(0);
let types_and_sizes: [(AVPacketSideDataType, usize); 5] = [
(AVPacketSideDataType::AV_PKT_DATA_NEW_EXTRADATA, 100),
(AVPacketSideDataType::AV_PKT_DATA_PALETTE, 200),
(AVPacketSideDataType::AV_PKT_DATA_REPLAYGAIN, 300),
(AVPacketSideDataType::AV_PKT_DATA_DISPLAYMATRIX, 400),
(AVPacketSideDataType::AV_PKT_DATA_STEREO3D, 500),
];
for (ty, size) in types_and_sizes {
let p = unsafe { av_packet_new_side_data(packet.as_mut_ptr(), ty, size) };
assert!(!p.is_null(), "av_packet_new_side_data returned NULL");
}
assert_eq!(packet_side_data_count(&packet), 5);
let walked_2 = packet_side_data_bytes(&packet, 2);
let walked_5 = packet_side_data_bytes(&packet, 5);
assert_eq!(
walked_2,
2 * SIDE_DATA_ENTRY_OVERHEAD + 100 + 200,
"max_entries=2 must walk exactly the first two entries"
);
assert_eq!(
walked_5,
5 * SIDE_DATA_ENTRY_OVERHEAD + 100 + 200 + 300 + 400 + 500,
"max_entries=5 must walk all five entries"
);
assert_eq!(packet_side_data_bytes(&packet, 0), 0);
let walked_huge = packet_side_data_bytes(&packet, 1_000_000);
assert_eq!(walked_huge, walked_5);
}
#[test]
fn packet_side_data_count_reports_attached_entries() {
use ffmpeg_next::ffi::{AVPacketSideDataType, av_packet_new_side_data};
let mut packet = Packet::new(0);
let _p1 = unsafe {
av_packet_new_side_data(
packet.as_mut_ptr(),
AVPacketSideDataType::AV_PKT_DATA_NEW_EXTRADATA,
4,
)
};
let _p2 = unsafe {
av_packet_new_side_data(
packet.as_mut_ptr(),
AVPacketSideDataType::AV_PKT_DATA_PALETTE,
4,
)
};
assert_eq!(packet_side_data_count(&packet), 2);
}
#[test]
fn cpu_frame_bytes_rejects_negative_first_plane_linesize() {
let mut f = frame::Video::empty();
unsafe {
let raw = f.as_mut_ptr();
(*raw).format = ffmpeg_next::ffi::AVPixelFormat::AV_PIX_FMT_NV12 as i32;
(*raw).width = 1920;
(*raw).height = 1080;
(*raw).linesize[0] = -1920;
(*raw).linesize[1] = -1920;
}
assert!(
cpu_frame_bytes(&f).is_none(),
"negative linesize must be unsizeable, not Some(0)"
);
}
fn make_hw_frames_ctx_ref(w: i32, h: i32) -> *mut ffmpeg_next::ffi::AVBufferRef {
use ffmpeg_next::ffi::av_buffer_alloc;
use std::mem::size_of;
unsafe {
let buf = av_buffer_alloc(size_of::<AVHWFramesContext>());
assert!(!buf.is_null(), "av_buffer_alloc returned NULL");
let data = (*buf).data as *mut AVHWFramesContext;
std::ptr::write_bytes(data, 0, 1);
(*data).width = w;
(*data).height = h;
buf
}
}
#[test]
fn cpu_frame_bytes_sums_buf_sizes() {
use ffmpeg_next::ffi::av_buffer_alloc;
let mut f = frame::Video::empty();
let buf0 = unsafe { av_buffer_alloc(4096) };
let buf1 = unsafe { av_buffer_alloc(2048) };
assert!(!buf0.is_null() && !buf1.is_null());
unsafe {
let raw = f.as_mut_ptr();
(*raw).buf[0] = buf0;
(*raw).buf[1] = buf1;
(*raw).linesize[0] = 256;
}
assert_eq!(cpu_frame_bytes(&f), Some(4096 + 2048));
}
#[test]
fn cpu_frame_bytes_zero_for_empty_frame() {
let f = frame::Video::empty();
assert_eq!(cpu_frame_bytes(&f), Some(0));
}
#[test]
fn cpu_frame_bytes_uses_buf_size_independent_of_display_height() {
use ffmpeg_next::ffi::av_buffer_alloc;
let buf0 = unsafe { av_buffer_alloc(256) };
assert!(!buf0.is_null());
let mut f = frame::Video::empty();
unsafe {
let raw = f.as_mut_ptr();
(*raw).format = ffmpeg_next::ffi::AVPixelFormat::AV_PIX_FMT_NV12 as i32;
(*raw).width = 1;
(*raw).height = 1;
(*raw).linesize[0] = 32;
(*raw).buf[0] = buf0;
}
assert_eq!(
cpu_frame_bytes(&f),
Some(256),
"cropped/aligned frames must be sized by buf[i].size, not display dims"
);
}
#[test]
fn estimate_transfer_bytes_reads_alloc_dims_from_hw_frames_ctx() {
let buf = make_hw_frames_ctx_ref(8192, 8192);
let mut f = frame::Video::empty();
unsafe {
let raw = f.as_mut_ptr();
(*raw).width = 100;
(*raw).height = 100;
(*raw).hw_frames_ctx = buf;
}
assert_eq!(
estimate_transfer_bytes(&f),
Some(8192usize * 8192 * WORST_CASE_BYTES_PER_PIXEL),
);
}
#[test]
fn estimate_transfer_bytes_returns_none_without_hw_frames_ctx() {
let mut f = frame::Video::empty();
unsafe {
let raw = f.as_mut_ptr();
(*raw).width = 1920;
(*raw).height = 1080;
}
assert!(estimate_transfer_bytes(&f).is_none());
}
#[test]
fn estimate_transfer_bytes_rejects_non_positive_alloc_dimensions() {
let mut f = frame::Video::empty();
let buf = make_hw_frames_ctx_ref(0, 1080);
unsafe {
(*f.as_mut_ptr()).hw_frames_ctx = buf;
}
assert!(estimate_transfer_bytes(&f).is_none());
}
#[test]
fn estimate_transfer_bytes_8k_fits_default_cap() {
let buf = make_hw_frames_ctx_ref(7680, 4320);
let mut f = frame::Video::empty();
unsafe {
(*f.as_mut_ptr()).hw_frames_ctx = buf;
}
let estimate = estimate_transfer_bytes(&f).expect("8K is sizable");
assert!(
estimate <= DEFAULT_MAX_PROBE_PENDING_BYTES,
"8K estimate {estimate} must fit DEFAULT_MAX_PROBE_PENDING_BYTES \
{DEFAULT_MAX_PROBE_PENDING_BYTES}; otherwise the default cap rejects \
even a single 8K frame at probe time"
);
assert!(
estimate > 96 * 1024 * 1024,
"estimate must over-charge real 8K P010 to bound the worst case; got {estimate}"
);
}
#[test]
fn partial_build_state_drop_is_no_op_on_null_pointers() {
let _g = PartialBuildState {
hw_device_ref: ptr::null_mut(),
callback_state: ptr::null_mut(),
};
}
#[test]
fn partial_build_state_into_owned_disarms_and_returns_originals() {
use ffmpeg_next::ffi::{AVPixelFormat, av_buffer_alloc, av_buffer_unref};
let hw_ptr = unsafe { av_buffer_alloc(64) };
assert!(!hw_ptr.is_null(), "av_buffer_alloc(64) returned NULL");
let cb_ptr = Box::into_raw(Box::new(CallbackState {
wanted: AVPixelFormat::AV_PIX_FMT_NONE,
wanted_int: AVPixelFormat::AV_PIX_FMT_NONE as i32,
}));
let g = PartialBuildState {
hw_device_ref: hw_ptr,
callback_state: cb_ptr,
};
let (hw_back, cb_back) = g.into_owned();
assert_eq!(
hw_back, hw_ptr,
"into_owned must return the original device ref"
);
assert_eq!(
cb_back, cb_ptr,
"into_owned must return the original callback box"
);
unsafe {
let mut hw = hw_back;
av_buffer_unref(&mut hw);
drop(Box::from_raw(cb_back));
}
}
#[test]
#[ignore = "requires HWDECODE_SAMPLE_VIDEO and a working hardware backend"]
fn cap_overflow_does_not_consume_packet_and_preserves_pending() {
use ffmpeg_next::{format, media};
let path = std::env::var_os("HWDECODE_SAMPLE_VIDEO")
.expect("HWDECODE_SAMPLE_VIDEO must be set for this test");
ffmpeg_next::init().expect("ffmpeg init");
let mut input = format::input(&path).expect("open input");
let stream_index = input
.streams()
.best(media::Type::Video)
.expect("video stream")
.index();
let stream_params = input
.streams()
.best(media::Type::Video)
.expect("video stream")
.parameters();
let mut decoder = VideoDecoder::open(stream_params).expect("open decoder");
assert!(
decoder.probe.is_some(),
"probe must be active immediately after open"
);
decoder.pending_frames.push_back(frame::Video::empty());
decoder.pending_frames.push_back(frame::Video::empty());
let pending_before = decoder.pending_frames.len();
let pre_existing = Packet::new(8);
decoder
.probe
.as_mut()
.expect("probe present")
.buffered_packets
.push(pre_existing);
decoder
.probe
.as_mut()
.expect("probe present")
.buffered_bytes = MAX_PROBE_PACKET_BYTES;
let mut hit_bailout = false;
for (s, packet) in input.packets() {
if s.index() != stream_index {
continue;
}
match decoder.send_packet(&packet) {
Err(Error::AllBackendsFailed(p)) => {
let attempts = p.attempts();
let unconsumed_packets = p.unconsumed_packets();
assert_eq!(
unconsumed_packets.len(),
1,
"rescue history must contain the pre-existing packet only — \
the triggering packet must NOT have been consumed"
);
assert_eq!(
unconsumed_packets[0].size(),
8,
"the pre-existing packet must come back unmodified"
);
assert!(
attempts.is_empty(),
"no backend failure occurred; attempts must be empty when \
bailout fires from cap overflow alone"
);
hit_bailout = true;
break;
}
Ok(()) => panic!("send_packet must bail out when probe is at the byte cap"),
Err(other) => panic!("expected AllBackendsFailed bailout, got {other:?}"),
}
}
assert!(
hit_bailout,
"expected at least one send_packet to trip the cap-overflow bailout"
);
assert!(
decoder.probe.is_none(),
"probe must be abandoned after cap overflow"
);
assert_eq!(
decoder.pending_frames.len(),
pending_before,
"pending_frames belong to the active backend; abandon must not drop them"
);
}
#[test]
#[ignore = "requires HWDECODE_SAMPLE_VIDEO and a working hardware backend"]
fn all_backends_failed_returns_buffered_packets_to_caller() {
use ffmpeg_next::{format, media};
let path = std::env::var_os("HWDECODE_SAMPLE_VIDEO")
.expect("HWDECODE_SAMPLE_VIDEO must be set for this test");
ffmpeg_next::init().expect("ffmpeg init");
let input = format::input(&path).expect("open input");
let stream_params = input
.streams()
.best(media::Type::Video)
.expect("video stream")
.parameters();
let mut decoder = VideoDecoder::open(stream_params).expect("open decoder");
assert!(
decoder.probe.is_some(),
"probe must be active immediately after open"
);
let p1 = Packet::new(16);
let p2 = Packet::new(32);
{
let probe = decoder.probe.as_mut().expect("probe");
probe.buffered_packets.push(p1);
probe.buffered_packets.push(p2);
probe.remaining_backends.clear();
}
let result = decoder.advance_probe(Error::Ffmpeg(ffmpeg_next::Error::InvalidData));
match result {
Err(Error::AllBackendsFailed(p)) => {
let attempts = p.attempts();
let unconsumed_packets = p.unconsumed_packets();
assert_eq!(
unconsumed_packets.len(),
2,
"buffered probe packets must be returned to the caller for SW fallback"
);
assert_eq!(unconsumed_packets[0].size(), 16);
assert_eq!(unconsumed_packets[1].size(), 32);
assert!(
!attempts.is_empty(),
"the active backend's failure should be in attempts"
);
}
other => panic!("expected AllBackendsFailed, got {other:?}"),
}
}
#[test]
#[ignore = "requires HWDECODE_SAMPLE_VIDEO and a working hardware backend"]
fn all_backends_failed_preserves_earlier_open_failures() {
use ffmpeg_next::{format, media};
let path = std::env::var_os("HWDECODE_SAMPLE_VIDEO")
.expect("HWDECODE_SAMPLE_VIDEO must be set for this test");
ffmpeg_next::init().expect("ffmpeg init");
let input = format::input(&path).expect("open input");
let stream_params = input
.streams()
.best(media::Type::Video)
.expect("video stream")
.parameters();
let mut decoder = VideoDecoder::open(stream_params).expect("open decoder");
let active_backend = decoder.backend();
let earlier_backend = match active_backend {
Backend::VideoToolbox => Backend::Vaapi,
Backend::Vaapi => Backend::Cuda,
Backend::Cuda => Backend::Vaapi,
Backend::D3d11va => Backend::Cuda,
};
let synthetic_earlier = Error::BackendUnsupportedByCodec(earlier_backend);
{
let probe = decoder.probe.as_mut().expect("probe present");
probe
.attempts
.push((earlier_backend, Box::new(synthetic_earlier)));
probe.remaining_backends.clear(); }
let result = decoder.advance_probe(Error::Ffmpeg(ffmpeg_next::Error::InvalidData));
match result {
Err(Error::AllBackendsFailed(p)) => {
let attempts = p.attempts();
assert_eq!(
attempts.len(),
2,
"AllBackendsFailed must surface BOTH the seeded earlier failure \
and the active backend's runtime failure"
);
assert_eq!(
attempts[0].0, earlier_backend,
"earlier open failure must come first in probe order"
);
assert!(
matches!(*attempts[0].1, Error::BackendUnsupportedByCodec(_)),
"earlier failure must preserve its original error variant"
);
assert_eq!(
attempts[1].0, active_backend,
"active backend's runtime failure must come second"
);
assert!(
matches!(
*attempts[1].1,
Error::Ffmpeg(ffmpeg_next::Error::InvalidData)
),
"active backend's failure must preserve the synthetic InvalidData"
);
}
other => panic!("expected AllBackendsFailed, got {other:?}"),
}
}
}