#[cfg(with_dav1d)]
mod async_decoder_wrapper;
#[cfg(with_dav1d)]
mod av1;
#[cfg(with_ffmpeg)]
mod ffmpeg_cli;
#[cfg(with_ffmpeg)]
pub use ffmpeg_cli::FFmpegCliDecoder;
#[cfg(with_ffmpeg)]
pub use ffmpeg_cli::{
Error as FFmpegError, FFmpegVersion, FFmpegVersionParseError, ffmpeg_download_url,
};
#[cfg(target_arch = "wasm32")]
mod webcodecs;
use crate::{SampleIndex, Time, VideoDataDescription, player::VideoPlaybackIssueSeverity};
#[derive(thiserror::Error, Debug, Clone)]
pub enum DecodeError {
#[error("Unsupported codec: {0}")]
UnsupportedCodec(String),
#[cfg(with_dav1d)]
#[error("dav1d: {0}")]
Dav1d(#[from] dav1d::Error),
#[error("To enabled native AV1 decoding, compile Rerun with the `nasm` feature enabled.")]
Dav1dWithoutNasm,
#[error(
"Rerun does not yet support native AV1 decoding on Linux ARM64. See https://github.com/rerun-io/rerun/issues/7755"
)]
NoDav1dOnLinuxArm64,
#[cfg(target_arch = "wasm32")]
#[error(transparent)]
WebDecoder(#[from] webcodecs::WebError),
#[cfg(with_ffmpeg)]
#[error(transparent)]
Ffmpeg(std::sync::Arc<FFmpegError>),
#[error("Unsupported bits per component: {0}")]
BadBitsPerComponent(usize),
}
impl re_byte_size::SizeBytes for DecodeError {
fn heap_size_bytes(&self) -> u64 {
0
}
}
impl DecodeError {
pub fn should_request_more_frames(&self) -> bool {
match self {
Self::UnsupportedCodec(_) | Self::Dav1dWithoutNasm | Self::NoDav1dOnLinuxArm64 => false,
#[cfg(with_dav1d)]
Self::Dav1d(_) => true,
#[cfg(target_arch = "wasm32")]
Self::WebDecoder(_) => true,
#[cfg(with_ffmpeg)]
Self::Ffmpeg(err) => err.should_request_more_frames(),
Self::BadBitsPerComponent(_) => false,
}
}
pub fn severity(&self) -> VideoPlaybackIssueSeverity {
match self {
#[cfg(with_dav1d)]
Self::Dav1d(err) => match err {
dav1d::Error::Again => VideoPlaybackIssueSeverity::Loading,
_ => VideoPlaybackIssueSeverity::Error,
},
#[cfg(target_arch = "wasm32")]
Self::WebDecoder(err) => err.severity(),
#[cfg(with_ffmpeg)]
Self::Ffmpeg(_) => VideoPlaybackIssueSeverity::Error,
Self::UnsupportedCodec(_)
| Self::Dav1dWithoutNasm
| Self::NoDav1dOnLinuxArm64
| Self::BadBitsPerComponent(_) => VideoPlaybackIssueSeverity::Error,
}
}
}
pub type Result<T = (), E = DecodeError> = std::result::Result<T, E>;
pub type FrameResult = Result<Frame>;
pub trait AsyncDecoder: Send + Sync {
fn submit_chunk(&mut self, chunk: Chunk) -> Result<()>;
fn end_of_video(&mut self) -> Result<()> {
Ok(())
}
fn reset(&mut self, video_descr: &VideoDataDescription) -> Result<()>;
fn min_num_samples_to_enqueue_ahead(&self) -> usize {
0
}
}
pub fn new_decoder(
debug_name: &str,
video: &crate::VideoDataDescription,
decode_settings: &DecodeSettings,
output_sender: crate::Sender<FrameResult>,
) -> Result<Box<dyn AsyncDecoder>> {
#![allow(clippy::allow_attributes, unused_variables, clippy::needless_return)]
re_tracing::profile_function!();
re_log::trace!(
"Looking for decoder for {}",
video.human_readable_codec_string()
);
#[cfg(target_arch = "wasm32")]
return Ok(Box::new(webcodecs::WebVideoDecoder::new(
video,
decode_settings.hw_acceleration,
output_sender,
)?));
#[cfg(not(target_arch = "wasm32"))]
match video.codec {
#[cfg(feature = "av1")]
crate::VideoCodec::AV1 => {
#[cfg(linux_arm64)]
{
return Err(DecodeError::NoDav1dOnLinuxArm64);
}
#[cfg(with_dav1d)]
{
re_log::trace!("Decoding AV1…");
return Ok(Box::new(async_decoder_wrapper::AsyncDecoderWrapper::new(
debug_name.to_owned(),
Box::new(av1::SyncDav1dDecoder::new(debug_name.to_owned())?),
output_sender,
)));
}
}
#[cfg(with_ffmpeg)]
crate::VideoCodec::H264 | crate::VideoCodec::H265 => Ok(Box::new(FFmpegCliDecoder::new(
debug_name.to_owned(),
&video.encoding_details,
output_sender,
decode_settings.ffmpeg_path.clone(),
&video.codec,
)?)),
_ => Err(DecodeError::UnsupportedCodec(
video.human_readable_codec_string(),
)),
}
}
pub struct Chunk {
pub is_sync: bool,
pub data: Vec<u8>,
pub sample_idx: usize,
pub frame_nr: u32,
pub decode_timestamp: Time,
pub presentation_timestamp: Time,
pub duration: Option<Time>,
}
impl re_byte_size::SizeBytes for Chunk {
fn heap_size_bytes(&self) -> u64 {
let Self {
is_sync: _,
data,
sample_idx: _,
frame_nr: _,
decode_timestamp: _,
presentation_timestamp: _,
duration: _,
} = self;
data.heap_size_bytes()
}
}
#[cfg(not(target_arch = "wasm32"))]
pub struct FrameContent {
pub data: Vec<u8>,
pub width: u32,
pub height: u32,
pub format: PixelFormat,
}
#[cfg(not(target_arch = "wasm32"))]
impl re_byte_size::SizeBytes for FrameContent {
fn heap_size_bytes(&self) -> u64 {
let Self {
data,
width: _,
height: _,
format: _,
} = self;
data.heap_size_bytes()
}
}
#[cfg(not(target_arch = "wasm32"))]
impl FrameContent {
pub fn width(&self) -> u32 {
self.width
}
pub fn height(&self) -> u32 {
self.height
}
}
#[cfg(target_arch = "wasm32")]
pub type FrameContent = webcodecs::WebVideoFrame;
#[cfg(target_arch = "wasm32")]
impl FrameContent {
pub fn width(&self) -> u32 {
self.display_width()
}
pub fn height(&self) -> u32 {
self.display_height()
}
}
#[derive(Debug, Clone)]
pub struct FrameInfo {
pub is_sync: Option<bool>,
pub sample_idx: Option<SampleIndex>,
pub frame_nr: Option<u32>,
pub presentation_timestamp: Time,
pub duration: Option<Time>,
pub latest_decode_timestamp: Option<Time>,
}
impl FrameInfo {
pub fn presentation_time_range(&self) -> std::ops::Range<Time> {
if let Some(duration) = self.duration {
self.presentation_timestamp..self.presentation_timestamp + duration
} else {
self.presentation_timestamp..Time::MAX
}
}
}
pub struct Frame {
pub content: FrameContent,
pub info: FrameInfo,
}
impl re_byte_size::SizeBytes for Frame {
fn heap_size_bytes(&self) -> u64 {
let Self { content, info: _ } = self;
content.heap_size_bytes()
}
}
#[derive(Debug, Clone)]
pub enum PixelFormat {
Rgb8Unorm,
Rgba8Unorm,
Yuv {
layout: YuvPixelLayout,
range: YuvRange,
coefficients: YuvMatrixCoefficients,
},
}
impl PixelFormat {
pub fn bits_per_pixel(&self) -> u32 {
match self {
Self::Rgb8Unorm { .. } => 24,
Self::Rgba8Unorm { .. } => 32,
Self::Yuv { layout, .. } => match layout {
YuvPixelLayout::Y_U_V444 => 24,
YuvPixelLayout::Y_U_V422 => 16,
YuvPixelLayout::Y_U_V420 => 12,
YuvPixelLayout::Y400 => 8,
},
}
}
}
#[expect(non_camel_case_types)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum YuvPixelLayout {
Y_U_V444,
Y_U_V422,
Y_U_V420,
Y400,
}
#[derive(Debug, Clone, Copy)]
pub enum YuvRange {
Limited,
Full,
}
#[derive(Debug, Clone, Copy)]
pub enum YuvMatrixCoefficients {
Identity,
Bt601,
Bt709,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Hash)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub enum DecodeHardwareAcceleration {
#[default]
Auto,
PreferSoftware,
PreferHardware,
}
#[derive(Debug, Clone, PartialEq, Eq, Default, Hash)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub struct DecodeSettings {
pub hw_acceleration: DecodeHardwareAcceleration,
#[cfg(not(target_arch = "wasm32"))]
pub ffmpeg_path: Option<std::path::PathBuf>,
}
impl std::fmt::Display for DecodeHardwareAcceleration {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Auto => write!(f, "Auto"),
Self::PreferSoftware => write!(f, "Prefer software"),
Self::PreferHardware => write!(f, "Prefer hardware"),
}
}
}
impl std::str::FromStr for DecodeHardwareAcceleration {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.trim().to_lowercase().replace('-', "_").as_str() {
"auto" => Ok(Self::Auto),
"prefer_software" | "software" => Ok(Self::PreferSoftware),
"prefer_hardware" | "hardware" => Ok(Self::PreferHardware),
_ => Err(()),
}
}
}