use core::ptr::{addr_of, read_unaligned};
use ffmpeg_next::ffi::{
AV_NOPTS_VALUE, AVChromaLocation, AVColorPrimaries, AVColorRange, AVColorSpace,
AVColorTransferCharacteristic, AVFrame, AVPictureType, AVSubtitleType, av_buffer_alloc,
};
use mediadecode::{
PixelFormat, Timebase, Timestamp,
channel::AudioChannelLayout,
color::{ChromaLocation, ColorInfo, ColorMatrix, ColorPrimaries, ColorRange, ColorTransfer},
frame::{AudioFrame, Dimensions, Plane, Rect, SubtitleFrame, VideoFrame},
subtitle::SubtitlePayload,
};
use crate::{
FfmpegBuffer, boundary,
extras::{AudioFrameExtra, PictureType, SideDataEntry, SubtitleFrameExtra, VideoFrameExtra},
frame::{is_supported_cpu_pix_fmt, plane_height_for, plane_row_bytes_for},
sample_format::SampleFormat,
};
#[derive(Debug)]
#[non_exhaustive]
pub enum ConvertError {
NullFrame,
UnsupportedPixelFormat(PixelFormat),
InvalidPlaneLayout {
plane: usize,
},
BufferAcquireFailed {
plane: usize,
},
}
impl core::fmt::Display for ConvertError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::NullFrame => write!(f, "convert: AVFrame pointer was null"),
Self::UnsupportedPixelFormat(pf) => {
write!(f, "convert: unsupported pixel format {pf:?}")
}
Self::InvalidPlaneLayout { plane } => {
write!(f, "convert: invalid layout on plane {plane}")
}
Self::BufferAcquireFailed { plane } => {
write!(f, "convert: could not acquire buffer ref for plane {plane}")
}
}
}
}
impl core::error::Error for ConvertError {}
pub fn video_frame_from(
frame: &ffmpeg_next::Frame,
time_base: Timebase,
) -> Result<VideoFrame<mediadecode::PixelFormat, VideoFrameExtra, FfmpegBuffer>, ConvertError> {
unsafe { av_frame_to_video_frame(frame.as_ptr(), time_base) }
}
pub fn audio_frame_from(
frame: &ffmpeg_next::frame::Audio,
time_base: Timebase,
) -> Result<AudioFrame<SampleFormat, AudioChannelLayout, AudioFrameExtra, FfmpegBuffer>, ConvertError>
{
unsafe { av_frame_to_audio_frame(frame.as_ptr(), time_base) }
}
pub fn subtitle_frame_from(
subtitle: &ffmpeg_next::Subtitle,
time_base: Timebase,
) -> Result<SubtitleFrame<SubtitleFrameExtra, FfmpegBuffer>, ConvertError> {
unsafe { av_subtitle_to_subtitle_frame(subtitle.as_ptr(), time_base) }
}
pub unsafe fn av_frame_to_video_frame(
av_frame: *const AVFrame,
time_base: Timebase,
) -> Result<VideoFrame<mediadecode::PixelFormat, VideoFrameExtra, FfmpegBuffer>, ConvertError> {
if av_frame.is_null() {
return Err(ConvertError::NullFrame);
}
let format_raw = unsafe { (*av_frame).format };
let width_raw = unsafe { (*av_frame).width };
let height_raw = unsafe { (*av_frame).height };
let pts_raw = unsafe { (*av_frame).pts };
let duration_raw = unsafe { (*av_frame).duration };
let pix_fmt = boundary::from_av_pixel_format(format_raw);
let width = width_raw.max(0) as u32;
let height = height_raw.max(0) as u32;
if !is_supported_cpu_pix_fmt(pix_fmt) {
return Err(ConvertError::UnsupportedPixelFormat(pix_fmt));
}
let mut planes_out: [Plane<FfmpegBuffer>; 4] = [
plane_placeholder()?,
plane_placeholder()?,
plane_placeholder()?,
plane_placeholder()?,
];
let mut plane_count: u8 = 0;
#[allow(clippy::needless_range_loop)]
for plane_idx in 0..4 {
let linesize = unsafe { (*av_frame).linesize[plane_idx] };
if linesize <= 0 {
if linesize == 0 {
break;
}
return Err(ConvertError::InvalidPlaneLayout { plane: plane_idx });
}
let data_ptr = unsafe { (*av_frame).data[plane_idx] };
if data_ptr.is_null() {
return Err(ConvertError::InvalidPlaneLayout { plane: plane_idx });
}
let plane_h = plane_height_for(pix_fmt, plane_idx, height as usize)
.ok_or(ConvertError::InvalidPlaneLayout { plane: plane_idx })?;
let row_bytes = plane_row_bytes_for(pix_fmt, plane_idx, width as usize)
.ok_or(ConvertError::InvalidPlaneLayout { plane: plane_idx })?;
if row_bytes > linesize as usize {
return Err(ConvertError::InvalidPlaneLayout { plane: plane_idx });
}
let (view, exported_stride) = if (linesize as usize) == row_bytes {
let plane_bytes = (plane_h)
.checked_mul(linesize as usize)
.ok_or(ConvertError::InvalidPlaneLayout { plane: plane_idx })?;
let buf = unsafe { find_backing_buffer(av_frame, data_ptr, plane_bytes) }
.ok_or(ConvertError::BufferAcquireFailed { plane: plane_idx })?;
let offset = unsafe { (data_ptr as usize).wrapping_sub((*buf).data as usize) };
let view = unsafe { FfmpegBuffer::from_ref_view(buf, offset, plane_bytes) }
.ok_or(ConvertError::BufferAcquireFailed { plane: plane_idx })?;
(view, linesize as u32)
} else {
let total_bytes = row_bytes
.checked_mul(plane_h)
.ok_or(ConvertError::InvalidPlaneLayout { plane: plane_idx })?;
let last_row_offset = (plane_h.saturating_sub(1))
.checked_mul(linesize as usize)
.ok_or(ConvertError::InvalidPlaneLayout { plane: plane_idx })?;
let readable_extent = last_row_offset
.checked_add(row_bytes)
.ok_or(ConvertError::InvalidPlaneLayout { plane: plane_idx })?;
unsafe { find_backing_buffer(av_frame, data_ptr, readable_extent) }
.ok_or(ConvertError::BufferAcquireFailed { plane: plane_idx })?;
let mut packed: std::vec::Vec<u8> = std::vec::Vec::new();
packed
.try_reserve_exact(total_bytes)
.map_err(|_| ConvertError::BufferAcquireFailed { plane: plane_idx })?;
for row_idx in 0..plane_h {
let row_offset = (row_idx)
.checked_mul(linesize as usize)
.ok_or(ConvertError::InvalidPlaneLayout { plane: plane_idx })?;
let row_slice =
unsafe { core::slice::from_raw_parts(data_ptr.add(row_offset) as *const u8, row_bytes) };
packed.extend_from_slice(row_slice);
}
let buf = FfmpegBuffer::copy_from_slice(&packed)
.ok_or(ConvertError::BufferAcquireFailed { plane: plane_idx })?;
(buf, row_bytes as u32)
};
planes_out[plane_idx] = Plane::new(view, exported_stride);
plane_count = (plane_idx + 1) as u8;
}
let pts = if pts_raw != AV_NOPTS_VALUE {
Some(Timestamp::new(pts_raw, time_base))
} else {
None
};
let duration = if duration_raw > 0 {
Some(Timestamp::new(duration_raw, time_base))
} else {
None
};
let visible_rect = unsafe { build_visible_rect(av_frame, width, height) };
let color_primaries_raw =
unsafe { read_unaligned(addr_of!((*av_frame).color_primaries) as *const i32) };
let color_trc_raw = unsafe { read_unaligned(addr_of!((*av_frame).color_trc) as *const i32) };
let colorspace_raw = unsafe { read_unaligned(addr_of!((*av_frame).colorspace) as *const i32) };
let color_range_raw = unsafe { read_unaligned(addr_of!((*av_frame).color_range) as *const i32) };
let chroma_location_raw =
unsafe { read_unaligned(addr_of!((*av_frame).chroma_location) as *const i32) };
let color = ColorInfo::UNSPECIFIED
.with_primaries(map_primaries(color_primaries_raw))
.with_transfer(map_transfer(color_trc_raw))
.with_matrix(map_matrix(colorspace_raw))
.with_range(map_range(color_range_raw))
.with_chroma_location(map_chroma_loc(chroma_location_raw));
let extra = unsafe { build_video_frame_extra(av_frame) };
let mut out = VideoFrame::new(
Dimensions::new(width, height),
pix_fmt,
planes_out,
plane_count,
extra,
)
.with_pts(pts)
.with_duration(duration)
.with_color(color);
if let Some(r) = visible_rect {
out = out.with_visible_rect(Some(r));
}
Ok(out)
}
fn plane_placeholder() -> Result<Plane<FfmpegBuffer>, ConvertError> {
let raw = unsafe { av_buffer_alloc(0) };
let raw = if raw.is_null() {
unsafe { av_buffer_alloc(1) }
} else {
raw
};
if raw.is_null() {
return Err(ConvertError::BufferAcquireFailed { plane: 4 });
}
let buf =
unsafe { FfmpegBuffer::take(raw) }.ok_or(ConvertError::BufferAcquireFailed { plane: 4 })?;
Ok(Plane::new(buf, 0))
}
unsafe fn build_visible_rect(av_frame: *const AVFrame, width: u32, height: u32) -> Option<Rect> {
let crop_left = unsafe { (*av_frame).crop_left } as u32;
let crop_top = unsafe { (*av_frame).crop_top } as u32;
let crop_right = unsafe { (*av_frame).crop_right } as u32;
let crop_bottom = unsafe { (*av_frame).crop_bottom } as u32;
if crop_left == 0 && crop_top == 0 && crop_right == 0 && crop_bottom == 0 {
return None;
}
let x = crop_left;
let y = crop_top;
let w = width.saturating_sub(crop_left).saturating_sub(crop_right);
let h = height.saturating_sub(crop_top).saturating_sub(crop_bottom);
Some(Rect::new(x, y, w, h))
}
unsafe fn build_video_frame_extra(av_frame: *const AVFrame) -> VideoFrameExtra {
let mut out = VideoFrameExtra::default();
let sar_num = unsafe { (*av_frame).sample_aspect_ratio.num };
let sar_den = unsafe { (*av_frame).sample_aspect_ratio.den };
if sar_num > 0 && sar_den > 0 && (sar_num != 1 || sar_den != 1) {
out.set_sample_aspect_ratio(Some((sar_num as u32, sar_den as u32)));
}
let pict_type_raw = unsafe { read_unaligned(addr_of!((*av_frame).pict_type) as *const i32) };
out.set_picture_type(map_picture_type_raw(pict_type_raw));
let flags = unsafe { (*av_frame).flags };
out.set_key_frame(flags & ffmpeg_next::ffi::AV_FRAME_FLAG_KEY != 0);
out.set_interlaced(flags & ffmpeg_next::ffi::AV_FRAME_FLAG_INTERLACED != 0);
out.set_top_field_first(flags & ffmpeg_next::ffi::AV_FRAME_FLAG_TOP_FIELD_FIRST != 0);
let bet = unsafe { (*av_frame).best_effort_timestamp };
if bet != AV_NOPTS_VALUE {
out.set_best_effort_timestamp(Some(bet));
}
out.set_side_data(unsafe { collect_side_data(av_frame) });
out
}
const SIDE_DATA_MAX_ENTRIES: usize = 64;
const SIDE_DATA_MAX_TOTAL_BYTES: usize = 256 * 1024;
const SUBTITLE_MAX_RECTS: usize = 64;
const SUBTITLE_MAX_TEXT_BYTES_PER_RECT: usize = 64 * 1024;
const SUBTITLE_MAX_TEXT_TOTAL_BYTES: usize = 256 * 1024;
const SUBTITLE_MAX_BITMAP_BYTES_PER_RECT: usize = 16 * 1024 * 1024;
const SUBTITLE_MAX_BITMAP_TOTAL_BYTES: usize = 32 * 1024 * 1024;
unsafe fn bounded_cstr_bytes<'a>(ptr: *const core::ffi::c_char, cap: usize) -> Option<&'a [u8]> {
let max = cap.saturating_add(1);
for i in 0..max {
let byte = unsafe { *(ptr.add(i) as *const u8) };
if byte == 0 {
return Some(unsafe { core::slice::from_raw_parts(ptr as *const u8, i) });
}
}
None
}
unsafe fn collect_side_data(av_frame: *const AVFrame) -> std::vec::Vec<SideDataEntry> {
let nb_side_data_raw = unsafe { (*av_frame).nb_side_data };
let side_data = unsafe { (*av_frame).side_data };
if nb_side_data_raw <= 0 || side_data.is_null() {
return Vec::new();
}
let count_raw = nb_side_data_raw as usize;
let count = count_raw.min(SIDE_DATA_MAX_ENTRIES);
if count_raw > SIDE_DATA_MAX_ENTRIES {
tracing::warn!(
cap = SIDE_DATA_MAX_ENTRIES,
requested = count_raw,
"mediadecode-ffmpeg: AVFrame.nb_side_data exceeds entry cap; truncating",
);
}
let mut out: Vec<SideDataEntry> = Vec::new();
if out.try_reserve_exact(count).is_err() {
return Vec::new();
}
let mut total_bytes: usize = 0;
for i in 0..count {
let sd = unsafe { *side_data.add(i) };
if sd.is_null() {
continue;
}
let kind = unsafe { read_unaligned(addr_of!((*sd).type_) as *const i32) };
let size = unsafe { (*sd).size };
let data_ptr = unsafe { (*sd).data };
let data_slice = if size == 0 || data_ptr.is_null() {
Vec::new()
} else {
let projected = total_bytes.saturating_add(size);
if projected > SIDE_DATA_MAX_TOTAL_BYTES {
tracing::warn!(
cap = SIDE_DATA_MAX_TOTAL_BYTES,
projected,
"mediadecode-ffmpeg: AVFrame side-data byte cap reached; dropping remaining entries",
);
break;
}
total_bytes = projected;
let mut buf: Vec<u8> = Vec::new();
if buf.try_reserve_exact(size).is_err() {
continue;
}
let src = unsafe { core::slice::from_raw_parts(data_ptr, size) };
buf.extend_from_slice(src);
buf
};
out.push(SideDataEntry::new(kind, data_slice));
}
out
}
unsafe fn find_backing_buffer(
av_frame: *const AVFrame,
data_ptr: *const u8,
bytes: usize,
) -> Option<*mut ffmpeg_next::ffi::AVBufferRef> {
let buf_array_len = unsafe { (*av_frame).buf.len() };
for i in 0..buf_array_len {
let buf = unsafe { (*av_frame).buf[i] };
if buf.is_null() {
continue;
}
let buf_data = unsafe { (*buf).data as *const u8 };
let buf_size = unsafe { (*buf).size };
if buf_data.is_null() {
continue;
}
let start = buf_data as usize;
let Some(end) = start.checked_add(buf_size) else {
continue;
};
let dp = data_ptr as usize;
let Some(dp_end) = dp.checked_add(bytes) else {
continue;
};
if dp >= start && dp_end <= end {
return Some(buf);
}
}
None
}
fn map_primaries(raw: i32) -> ColorPrimaries {
match raw {
x if x == AVColorPrimaries::AVCOL_PRI_BT709 as i32 => ColorPrimaries::Bt709,
x if x == AVColorPrimaries::AVCOL_PRI_UNSPECIFIED as i32 => ColorPrimaries::Unspecified,
x if x == AVColorPrimaries::AVCOL_PRI_BT470M as i32 => ColorPrimaries::Bt470M,
x if x == AVColorPrimaries::AVCOL_PRI_BT470BG as i32 => ColorPrimaries::Bt470Bg,
x if x == AVColorPrimaries::AVCOL_PRI_SMPTE170M as i32 => ColorPrimaries::Smpte170M,
x if x == AVColorPrimaries::AVCOL_PRI_SMPTE240M as i32 => ColorPrimaries::Smpte240M,
x if x == AVColorPrimaries::AVCOL_PRI_FILM as i32 => ColorPrimaries::Film,
x if x == AVColorPrimaries::AVCOL_PRI_BT2020 as i32 => ColorPrimaries::Bt2020,
x if x == AVColorPrimaries::AVCOL_PRI_SMPTE428 as i32 => ColorPrimaries::SmpteSt428,
x if x == AVColorPrimaries::AVCOL_PRI_SMPTE431 as i32 => ColorPrimaries::SmpteRp431,
x if x == AVColorPrimaries::AVCOL_PRI_SMPTE432 as i32 => ColorPrimaries::SmpteEg432,
x if x == AVColorPrimaries::AVCOL_PRI_EBU3213 as i32 => ColorPrimaries::Ebu3213E,
_ => ColorPrimaries::Unspecified,
}
}
fn map_transfer(raw: i32) -> ColorTransfer {
match raw {
x if x == AVColorTransferCharacteristic::AVCOL_TRC_BT709 as i32 => ColorTransfer::Bt709,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_UNSPECIFIED as i32 => {
ColorTransfer::Unspecified
}
x if x == AVColorTransferCharacteristic::AVCOL_TRC_GAMMA22 as i32 => ColorTransfer::Bt470M,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_GAMMA28 as i32 => ColorTransfer::Bt470Bg,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_SMPTE170M as i32 => ColorTransfer::Smpte170M,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_SMPTE240M as i32 => ColorTransfer::Smpte240M,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_LINEAR as i32 => ColorTransfer::Linear,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_LOG as i32 => ColorTransfer::Log100,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_LOG_SQRT as i32 => ColorTransfer::Log316,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_IEC61966_2_4 as i32 => {
ColorTransfer::Iec6196624
}
x if x == AVColorTransferCharacteristic::AVCOL_TRC_BT1361_ECG as i32 => {
ColorTransfer::Bt1361Ecg
}
x if x == AVColorTransferCharacteristic::AVCOL_TRC_IEC61966_2_1 as i32 => {
ColorTransfer::Iec6196621
}
x if x == AVColorTransferCharacteristic::AVCOL_TRC_BT2020_10 as i32 => {
ColorTransfer::Bt2020_10Bit
}
x if x == AVColorTransferCharacteristic::AVCOL_TRC_BT2020_12 as i32 => {
ColorTransfer::Bt2020_12Bit
}
x if x == AVColorTransferCharacteristic::AVCOL_TRC_SMPTE2084 as i32 => {
ColorTransfer::SmpteSt2084Pq
}
x if x == AVColorTransferCharacteristic::AVCOL_TRC_SMPTE428 as i32 => ColorTransfer::SmpteSt428,
x if x == AVColorTransferCharacteristic::AVCOL_TRC_ARIB_STD_B67 as i32 => {
ColorTransfer::AribStdB67Hlg
}
_ => ColorTransfer::Unspecified,
}
}
fn map_matrix(raw: i32) -> ColorMatrix {
match raw {
x if x == AVColorSpace::AVCOL_SPC_BT709 as i32 => ColorMatrix::Bt709,
x if x == AVColorSpace::AVCOL_SPC_BT2020_NCL as i32 => ColorMatrix::Bt2020Ncl,
x if x == AVColorSpace::AVCOL_SPC_SMPTE170M as i32 => ColorMatrix::Bt601,
x if x == AVColorSpace::AVCOL_SPC_BT470BG as i32 => ColorMatrix::Bt601,
x if x == AVColorSpace::AVCOL_SPC_SMPTE240M as i32 => ColorMatrix::Smpte240m,
x if x == AVColorSpace::AVCOL_SPC_FCC as i32 => ColorMatrix::Fcc,
x if x == AVColorSpace::AVCOL_SPC_YCGCO as i32 => ColorMatrix::YCgCo,
_ => ColorMatrix::Bt709, }
}
fn map_range(raw: i32) -> ColorRange {
match raw {
x if x == AVColorRange::AVCOL_RANGE_JPEG as i32 => ColorRange::Full,
x if x == AVColorRange::AVCOL_RANGE_MPEG as i32 => ColorRange::Limited,
_ => ColorRange::Unspecified,
}
}
fn map_chroma_loc(raw: i32) -> ChromaLocation {
match raw {
x if x == AVChromaLocation::AVCHROMA_LOC_LEFT as i32 => ChromaLocation::Left,
x if x == AVChromaLocation::AVCHROMA_LOC_CENTER as i32 => ChromaLocation::Center,
x if x == AVChromaLocation::AVCHROMA_LOC_TOPLEFT as i32 => ChromaLocation::TopLeft,
x if x == AVChromaLocation::AVCHROMA_LOC_TOP as i32 => ChromaLocation::Top,
x if x == AVChromaLocation::AVCHROMA_LOC_BOTTOMLEFT as i32 => ChromaLocation::BottomLeft,
x if x == AVChromaLocation::AVCHROMA_LOC_BOTTOM as i32 => ChromaLocation::Bottom,
_ => ChromaLocation::Unspecified,
}
}
pub unsafe fn av_frame_to_audio_frame(
av_frame: *const AVFrame,
time_base: Timebase,
) -> Result<AudioFrame<SampleFormat, AudioChannelLayout, AudioFrameExtra, FfmpegBuffer>, ConvertError>
{
if av_frame.is_null() {
return Err(ConvertError::NullFrame);
}
let format_raw = unsafe { (*av_frame).format };
let sample_rate_raw = unsafe { (*av_frame).sample_rate };
let nb_samples_raw = unsafe { (*av_frame).nb_samples };
let pts_raw = unsafe { (*av_frame).pts };
let duration_raw = unsafe { (*av_frame).duration };
let bet_raw = unsafe { (*av_frame).best_effort_timestamp };
let sample_format = SampleFormat::from_raw(format_raw);
let sample_rate = sample_rate_raw.max(0) as u32;
let nb_samples = nb_samples_raw.max(0) as u32;
let ch_layout_ptr = unsafe { addr_of!((*av_frame).ch_layout) };
let channel_layout =
unsafe { crate::channel_layout::audio_channel_layout_from_raw_ptr(ch_layout_ptr) };
let channel_count_full = channel_layout.channels();
let channel_count = channel_count_full.min(255) as u8;
let is_planar = sample_format.is_planar();
let plane_count_full = if is_planar { channel_count as usize } else { 1 };
if plane_count_full > 8 {
return Err(ConvertError::InvalidPlaneLayout { plane: 8 });
}
let plane_count = plane_count_full as u8;
let linesize0 = unsafe { (*av_frame).linesize[0] };
if nb_samples > 0 && linesize0 <= 0 {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
let plane_bytes = linesize0.max(0) as usize;
if nb_samples > 0 {
let bytes_per_sample = sample_format
.bytes_per_sample()
.ok_or(ConvertError::InvalidPlaneLayout { plane: 0 })? as usize;
let expected_per_plane = if is_planar {
(nb_samples as usize)
.checked_mul(bytes_per_sample)
.ok_or(ConvertError::InvalidPlaneLayout { plane: 0 })?
} else {
(nb_samples as usize)
.checked_mul(bytes_per_sample)
.and_then(|x| x.checked_mul(channel_count.max(1) as usize))
.ok_or(ConvertError::InvalidPlaneLayout { plane: 0 })?
};
if plane_bytes < expected_per_plane {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
}
let mut planes_out: [Plane<FfmpegBuffer>; 8] = [
audio_plane_placeholder()?,
audio_plane_placeholder()?,
audio_plane_placeholder()?,
audio_plane_placeholder()?,
audio_plane_placeholder()?,
audio_plane_placeholder()?,
audio_plane_placeholder()?,
audio_plane_placeholder()?,
];
#[allow(clippy::needless_range_loop)]
for plane_idx in 0..plane_count as usize {
let data_ptr = unsafe { (*av_frame).data[plane_idx] };
if data_ptr.is_null() {
return Err(ConvertError::InvalidPlaneLayout { plane: plane_idx });
}
let buf = unsafe { find_audio_backing_buffer(av_frame, data_ptr, plane_bytes) }
.ok_or(ConvertError::BufferAcquireFailed { plane: plane_idx })?;
let offset = unsafe { (data_ptr as usize).wrapping_sub((*buf).data as usize) };
let view = unsafe { FfmpegBuffer::from_ref_view(buf, offset, plane_bytes) }
.ok_or(ConvertError::BufferAcquireFailed { plane: plane_idx })?;
planes_out[plane_idx] = Plane::new(view, plane_bytes as u32);
}
let pts = if pts_raw != AV_NOPTS_VALUE {
Some(Timestamp::new(pts_raw, time_base))
} else {
None
};
let duration = if duration_raw > 0 {
Some(Timestamp::new(duration_raw, time_base))
} else {
None
};
let mut extra = AudioFrameExtra::default();
if bet_raw != AV_NOPTS_VALUE {
extra.set_best_effort_timestamp(Some(bet_raw));
}
extra.set_side_data(unsafe { collect_side_data(av_frame) });
Ok(
AudioFrame::new(
sample_rate,
nb_samples,
channel_count,
sample_format,
channel_layout,
planes_out,
plane_count,
extra,
)
.with_pts(pts)
.with_duration(duration),
)
}
fn audio_plane_placeholder() -> Result<Plane<FfmpegBuffer>, ConvertError> {
let raw = unsafe { av_buffer_alloc(1) };
if raw.is_null() {
return Err(ConvertError::BufferAcquireFailed { plane: 8 });
}
let buf =
unsafe { FfmpegBuffer::take(raw) }.ok_or(ConvertError::BufferAcquireFailed { plane: 8 })?;
Ok(Plane::new(buf, 0))
}
unsafe fn find_audio_backing_buffer(
av_frame: *const AVFrame,
data_ptr: *const u8,
bytes: usize,
) -> Option<*mut ffmpeg_next::ffi::AVBufferRef> {
let buf_array_len = unsafe { (*av_frame).buf.len() };
for i in 0..buf_array_len {
let buf = unsafe { (*av_frame).buf[i] };
if buf.is_null() {
continue;
}
let buf_data = unsafe { (*buf).data as *const u8 };
let buf_size = unsafe { (*buf).size };
if buf_data.is_null() {
continue;
}
let start = buf_data as usize;
let Some(end) = start.checked_add(buf_size) else {
continue;
};
let dp = data_ptr as usize;
let Some(dp_end) = dp.checked_add(bytes) else {
continue;
};
if dp >= start && dp_end <= end {
return Some(buf);
}
}
None
}
pub unsafe fn av_subtitle_to_subtitle_frame(
av_subtitle: *const ffmpeg_next::ffi::AVSubtitle,
time_base: Timebase,
) -> Result<SubtitleFrame<SubtitleFrameExtra, FfmpegBuffer>, ConvertError> {
if av_subtitle.is_null() {
return Err(ConvertError::NullFrame);
}
let mut text_chunks: std::vec::Vec<u8> = std::vec::Vec::new();
let mut bitmap_regions: std::vec::Vec<mediadecode::subtitle::BitmapRegion<FfmpegBuffer>> =
std::vec::Vec::new();
let count_raw = unsafe { (*av_subtitle).num_rects } as usize;
let rects_ptr = unsafe { (*av_subtitle).rects };
if count_raw > 0 && rects_ptr.is_null() {
return Err(ConvertError::NullFrame);
}
let count = count_raw.min(SUBTITLE_MAX_RECTS);
if count_raw > SUBTITLE_MAX_RECTS {
tracing::warn!(
cap = SUBTITLE_MAX_RECTS,
requested = count_raw,
"mediadecode-ffmpeg: AVSubtitle.num_rects exceeds rect cap; truncating",
);
}
let mut text_total_bytes: usize = 0;
let mut bitmap_total_bytes: usize = 0;
let text_kind = AVSubtitleType::SUBTITLE_TEXT as i32;
let ass_kind = AVSubtitleType::SUBTITLE_ASS as i32;
let bitmap_kind = AVSubtitleType::SUBTITLE_BITMAP as i32;
for i in 0..count {
let rect_ptr = unsafe { *rects_ptr.add(i) };
if rect_ptr.is_null() {
continue;
}
let rect_type_raw = unsafe { read_unaligned(addr_of!((*rect_ptr).type_) as *const i32) };
let rect_text_ptr = unsafe { (*rect_ptr).text };
let rect_ass_ptr = unsafe { (*rect_ptr).ass };
let rect_data0_ptr = unsafe { (*rect_ptr).data[0] };
let rect_data1_ptr = unsafe { (*rect_ptr).data[1] };
let rect_linesize0 = unsafe { (*rect_ptr).linesize[0] };
let rect_w = unsafe { (*rect_ptr).w };
let rect_h = unsafe { (*rect_ptr).h };
let rect_x = unsafe { (*rect_ptr).x };
let rect_y = unsafe { (*rect_ptr).y };
match rect_type_raw {
x if x == text_kind && !rect_text_ptr.is_null() => {
let bytes = unsafe { bounded_cstr_bytes(rect_text_ptr, SUBTITLE_MAX_TEXT_BYTES_PER_RECT) }
.ok_or(ConvertError::InvalidPlaneLayout { plane: 0 })?;
if bytes.len() > SUBTITLE_MAX_TEXT_BYTES_PER_RECT {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
let separator = if text_chunks.is_empty() { 0 } else { 1 };
let projected = text_total_bytes
.saturating_add(bytes.len())
.saturating_add(separator);
if projected > SUBTITLE_MAX_TEXT_TOTAL_BYTES {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
if separator == 1 {
text_chunks.push(b'\n');
}
text_chunks.extend_from_slice(bytes);
text_total_bytes = projected;
}
x if x == ass_kind && !rect_ass_ptr.is_null() => {
let bytes = unsafe { bounded_cstr_bytes(rect_ass_ptr, SUBTITLE_MAX_TEXT_BYTES_PER_RECT) }
.ok_or(ConvertError::InvalidPlaneLayout { plane: 0 })?;
if bytes.len() > SUBTITLE_MAX_TEXT_BYTES_PER_RECT {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
let separator = if text_chunks.is_empty() { 0 } else { 1 };
let projected = text_total_bytes
.saturating_add(bytes.len())
.saturating_add(separator);
if projected > SUBTITLE_MAX_TEXT_TOTAL_BYTES {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
if separator == 1 {
text_chunks.push(b'\n');
}
text_chunks.extend_from_slice(bytes);
text_total_bytes = projected;
}
x if x == bitmap_kind => {
let w = rect_w.max(0) as u32;
let h = rect_h.max(0) as u32;
let stride = rect_linesize0.max(0) as u32;
if rect_data0_ptr.is_null() || stride == 0 || h == 0 {
continue;
}
let data_len = (stride as usize)
.checked_mul(h as usize)
.ok_or(ConvertError::InvalidPlaneLayout { plane: 0 })?;
if data_len > SUBTITLE_MAX_BITMAP_BYTES_PER_RECT {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
let projected_total = bitmap_total_bytes.saturating_add(data_len);
if projected_total > SUBTITLE_MAX_BITMAP_TOTAL_BYTES {
return Err(ConvertError::InvalidPlaneLayout { plane: 0 });
}
let data_slice = unsafe { core::slice::from_raw_parts(rect_data0_ptr, data_len) };
let data_buf = FfmpegBuffer::copy_from_slice(data_slice)
.ok_or(ConvertError::BufferAcquireFailed { plane: 0 })?;
let palette_len = 256 * 4;
let palette_buf = if rect_data1_ptr.is_null() {
FfmpegBuffer::copy_from_slice(&[])
.ok_or(ConvertError::BufferAcquireFailed { plane: 1 })?
} else {
let p = unsafe { core::slice::from_raw_parts(rect_data1_ptr, palette_len) };
FfmpegBuffer::copy_from_slice(p).ok_or(ConvertError::BufferAcquireFailed { plane: 1 })?
};
bitmap_regions.push(mediadecode::subtitle::BitmapRegion::new(
rect_x.max(0) as u32,
rect_y.max(0) as u32,
w,
h,
stride,
data_buf,
palette_buf,
));
bitmap_total_bytes = projected_total;
}
_ => {}
}
}
let payload = if !text_chunks.is_empty() {
let buf = FfmpegBuffer::copy_from_slice(&text_chunks)
.ok_or(ConvertError::BufferAcquireFailed { plane: 0 })?;
SubtitlePayload::Text {
text: buf,
language: None,
}
} else if !bitmap_regions.is_empty() {
SubtitlePayload::Bitmap {
regions: bitmap_regions,
}
} else {
let buf =
FfmpegBuffer::copy_from_slice(&[]).ok_or(ConvertError::BufferAcquireFailed { plane: 0 })?;
SubtitlePayload::Text {
text: buf,
language: None,
}
};
let sub_pts = unsafe { (*av_subtitle).pts };
let pts = if sub_pts != AV_NOPTS_VALUE {
Some(Timestamp::new(sub_pts, time_base))
} else {
None
};
let extra = SubtitleFrameExtra::new(unsafe { (*av_subtitle).start_display_time }, unsafe {
(*av_subtitle).end_display_time
});
Ok(SubtitleFrame::new(payload, extra).with_pts(pts))
}
fn map_picture_type_raw(raw: i32) -> PictureType {
match raw {
x if x == AVPictureType::AV_PICTURE_TYPE_I as i32 => PictureType::I,
x if x == AVPictureType::AV_PICTURE_TYPE_P as i32 => PictureType::P,
x if x == AVPictureType::AV_PICTURE_TYPE_B as i32 => PictureType::B,
x if x == AVPictureType::AV_PICTURE_TYPE_S as i32 => PictureType::S,
x if x == AVPictureType::AV_PICTURE_TYPE_SI as i32 => PictureType::Si,
x if x == AVPictureType::AV_PICTURE_TYPE_SP as i32 => PictureType::Sp,
x if x == AVPictureType::AV_PICTURE_TYPE_BI as i32 => PictureType::Bi,
_ => PictureType::Unspecified,
}
}