use std::{num::NonZeroU32, sync::Arc};
use mediadecode::{
Timebase, Timestamp,
color::{ColorInfo, ColorMatrix, ColorPrimaries, ColorRange, ColorTransfer},
frame::{Dimensions, Plane, Rect, VideoFrame},
future::local::VideoStreamDecoder,
packet::{PacketFlags, VideoPacket},
pixel_format::PixelFormat,
};
use wasm_bindgen::{JsCast, JsValue};
use wasm_bindgen_futures::{JsFuture, spawn_local};
use crate::{
adapter::WebCodecs,
buffer::WebCodecsBuffer,
codec_id::VideoCodecId,
codec_string,
dispatch::{
allocate_value_handler, allocate_void_handler, free_value_handler, free_void_handler,
make_value_trampoline, make_void_trampoline,
},
error::{Error, VideoDecodeError},
extras::{VideoFrameExtra, VideoPacketExtra},
state::{DecodedFrame, PendingOutput, PendingPush, SharedState},
};
pub const MAX_PENDING_DECODE: u32 = 32;
pub const MAX_FRAME_ALLOCATION_BYTES: u32 = 256 * 1024 * 1024;
pub const MAX_INFLIGHT_BYTES: u64 = 256 * 1024 * 1024;
pub const MAX_CODEC_DESCRIPTION_BYTES: usize = 64 * 1024;
pub const MAX_INPUT_PACKET_BYTES: usize = 16 * 1024 * 1024;
pub const MAX_INPUT_INFLIGHT_BYTES: u64 = 64 * 1024 * 1024;
pub const MAX_QUEUED_OUTPUT: u32 = 16;
pub(crate) struct DecodedVideoFrame {
pts: Option<Timestamp>,
duration: Option<Timestamp>,
dimensions: Dimensions,
visible_rect: Option<Rect>,
format: PixelFormat,
planes: [(WebCodecsBuffer, u32); 4],
plane_count: u8,
key: bool,
byte_size: u32,
color: ColorInfo,
}
impl DecodedVideoFrame {
#[allow(clippy::too_many_arguments)]
pub fn new(
pts: Option<Timestamp>,
duration: Option<Timestamp>,
dimensions: Dimensions,
visible_rect: Option<Rect>,
format: PixelFormat,
planes: [(WebCodecsBuffer, u32); 4],
plane_count: u8,
key: bool,
byte_size: u32,
color: ColorInfo,
) -> Self {
Self {
pts,
duration,
dimensions,
visible_rect,
format,
planes,
plane_count,
key,
byte_size,
color,
}
}
pub const fn pts(&self) -> Option<Timestamp> {
self.pts
}
pub const fn duration(&self) -> Option<Timestamp> {
self.duration
}
pub const fn dimensions(&self) -> Dimensions {
self.dimensions
}
pub const fn visible_rect(&self) -> Option<Rect> {
self.visible_rect
}
pub const fn format(&self) -> PixelFormat {
self.format
}
pub fn into_planes(self) -> [(WebCodecsBuffer, u32); 4] {
self.planes
}
pub const fn plane_count(&self) -> u8 {
self.plane_count
}
pub const fn key(&self) -> bool {
self.key
}
pub const fn byte_size(&self) -> u32 {
self.byte_size
}
pub const fn color(&self) -> ColorInfo {
self.color
}
}
const MICROS: Timebase = match NonZeroU32::new(1_000_000) {
Some(d) => Timebase::new(1, d),
None => unreachable!(),
};
pub struct WebCodecsVideoStreamDecoder {
decoder: web_sys::VideoDecoder,
config: web_sys::VideoDecoderConfig,
state: SharedState<DecodedVideoFrame>,
time_base: Timebase,
output_slot_id: u64,
error_slot_id: u64,
dequeue_slot_id: u64,
eof: bool,
}
impl WebCodecsVideoStreamDecoder {
pub fn open(
codec: VideoCodecId,
extradata: Option<&[u8]>,
coded_dimensions: Dimensions,
time_base: Timebase,
) -> Result<Self, VideoDecodeError> {
let codec_string = codec_string::for_video(codec, extradata)?;
Self::open_with_codec_string(&codec_string, extradata, coded_dimensions, time_base)
}
pub fn open_with_codec_string(
codec_string: &str,
description: Option<&[u8]>,
coded_dimensions: Dimensions,
time_base: Timebase,
) -> Result<Self, VideoDecodeError> {
let state = SharedState::<DecodedVideoFrame>::try_new(
MAX_QUEUED_OUTPUT as usize + MAX_PENDING_DECODE as usize,
)
.map_err(VideoDecodeError::Js)?;
let output_handler: Box<dyn FnMut(JsValue)> = {
let state = state.clone();
Box::new(move |value: JsValue| {
let Ok(frame) = value.dyn_into::<web_sys::VideoFrame>() else {
return;
};
let submission_id = frame.timestamp() as i64;
let (resolved, just_closed) = 'budget: {
let mut inner = state.borrow_mut();
if inner.is_closed() {
break 'budget (None, false);
}
let current_epoch = inner.epoch();
let floor = inner.epoch_id_floor();
let lookup = inner.remove_pending_output(submission_id);
let mut missing_close: bool = false;
let record_opt = match lookup {
Some(record) if record.epoch() == current_epoch => Some(record),
Some(_) => None,
None if submission_id < floor => None,
None => {
missing_close = inner.record_close(Error::from_js(JsValue::from_str(
"WebCodecs video output: current-generation submission_id \
has no side-map entry; refusing to fabricate a record \
(would corrupt PTS for reordered codecs)",
)));
None
}
};
let Some(record) = record_opt else {
break 'budget (None, missing_close);
};
let new_frame_bytes: u64 = {
let copy_rect = web_sys::DomRectInit::new();
copy_rect.set_x(0.0);
copy_rect.set_y(0.0);
copy_rect.set_width(frame.coded_width() as f64);
copy_rect.set_height(frame.coded_height() as f64);
let copy_opts = web_sys::VideoFrameCopyToOptions::new();
copy_opts.set_rect(©_rect);
match frame.allocation_size_with_options(©_opts) {
Ok(n) => u64::from(n).saturating_mul(2),
Err(_) => {
let just_closed = inner.record_close(Error::from_js(JsValue::from_str(
"WebCodecs VideoFrame.allocation_size_with_options failed; \
cannot enforce per-frame byte cap, refusing admission",
)));
break 'budget (None, just_closed);
}
}
};
let projected_bytes = inner
.queue_bytes()
.saturating_add(inner.pending_copy_bytes())
.saturating_add(inner.pending_push_bytes())
.saturating_add(new_frame_bytes);
if inner.pending_copies() >= MAX_PENDING_DECODE {
let just_closed = inner.record_close(Error::from_js(JsValue::from_str(
"WebCodecs output burst exceeded MAX_PENDING_DECODE; \
video frames would be lost",
)));
break 'budget (None, just_closed);
}
if new_frame_bytes > MAX_FRAME_ALLOCATION_BYTES as u64 {
let just_closed = inner.record_close(Error::from_js(JsValue::from_str(
"WebCodecs decoded frame allocation_size exceeded \
MAX_FRAME_ALLOCATION_BYTES",
)));
break 'budget (None, just_closed);
}
if projected_bytes > MAX_INFLIGHT_BYTES {
let just_closed = inner.record_close(Error::from_js(JsValue::from_str(
"WebCodecs output burst would exceed MAX_INFLIGHT_BYTES; \
video frames would be lost",
)));
break 'budget (None, just_closed);
}
inner.add_pending_copy(new_frame_bytes);
(Some((record, current_epoch, new_frame_bytes)), false)
};
let Some((record, captured_epoch, new_frame_bytes)) = resolved else {
frame.close();
if just_closed {
state.invoke_close_hook();
}
state.wake_all();
return;
};
let sequence = state.allocate_output_sequence();
let admission = VideoCopyAdmission {
state: state.clone(),
frame: Some(frame),
byte_estimate: new_frame_bytes,
sequence,
armed: true,
};
spawn_local(handle_video_frame(
admission,
state.clone(),
captured_epoch,
record,
new_frame_bytes,
sequence,
));
})
};
let error_handler: Box<dyn FnMut(JsValue)> = {
let state = state.clone();
Box::new(move |value: JsValue| {
state.borrow_mut().record_close(Error::from_js(value));
state.wake_all();
})
};
let dequeue_handler: Box<dyn FnMut()> = {
let state = state.clone();
Box::new(move || {
state.wake_all();
})
};
let output_slot_id = allocate_value_handler(output_handler).map_err(VideoDecodeError::Js)?;
let error_slot_id = match allocate_value_handler(error_handler) {
Ok(id) => id,
Err(err) => {
free_value_handler(output_slot_id);
return Err(VideoDecodeError::Js(err));
}
};
let dequeue_slot_id = match allocate_void_handler(dequeue_handler) {
Ok(id) => id,
Err(err) => {
free_value_handler(output_slot_id);
free_value_handler(error_slot_id);
return Err(VideoDecodeError::Js(err));
}
};
let output_trampoline = make_value_trampoline(output_slot_id);
let error_trampoline = make_value_trampoline(error_slot_id);
let dequeue_trampoline = make_void_trampoline(dequeue_slot_id);
let init = web_sys::VideoDecoderInit::new(&error_trampoline, &output_trampoline);
let decoder = match web_sys::VideoDecoder::new(&init) {
Ok(d) => d,
Err(err) => {
free_value_handler(output_slot_id);
free_value_handler(error_slot_id);
free_void_handler(dequeue_slot_id);
return Err(VideoDecodeError::Js(Error::from_js(err)));
}
};
decoder.set_ondequeue(Some(&dequeue_trampoline));
let config = web_sys::VideoDecoderConfig::new(codec_string);
config.set_coded_width(coded_dimensions.width());
config.set_coded_height(coded_dimensions.height());
if let Some(bytes) = description {
if bytes.len() > MAX_CODEC_DESCRIPTION_BYTES {
free_value_handler(output_slot_id);
free_value_handler(error_slot_id);
free_void_handler(dequeue_slot_id);
return Err(VideoDecodeError::Js(Error::from_static(
"codec description exceeds MAX_CODEC_DESCRIPTION_BYTES",
)));
}
let arr = match try_new_uint8_array(bytes.len() as u32) {
Ok(a) => a,
Err(err) => {
free_value_handler(output_slot_id);
free_value_handler(error_slot_id);
free_void_handler(dequeue_slot_id);
return Err(VideoDecodeError::Js(Error::from_js(err)));
}
};
arr.copy_from(bytes);
config.set_description_u8_array(&arr);
}
if let Err(err) = decoder.configure(&config) {
let _ = decoder.close();
free_value_handler(output_slot_id);
free_value_handler(error_slot_id);
free_void_handler(dequeue_slot_id);
return Err(VideoDecodeError::Js(Error::from_js(err)));
}
state.set_close_hook_video(decoder.clone());
Ok(Self {
decoder,
config,
state,
time_base,
output_slot_id,
error_slot_id,
dequeue_slot_id,
eof: false,
})
}
pub const fn time_base(&self) -> Timebase {
self.time_base
}
fn check_closed(&self) -> Result<(), VideoDecodeError> {
if let Some(err) = self.state.borrow().last_error_clone() {
return Err(VideoDecodeError::Closed(err));
}
Ok(())
}
async fn await_decode_room(&self) -> Result<(), VideoDecodeError> {
loop {
{
let inner = self.state.borrow();
if let Some(err) = inner.last_error_clone() {
return Err(VideoDecodeError::Closed(err));
}
if inner.queue_len() >= MAX_QUEUED_OUTPUT as usize {
return Err(VideoDecodeError::OutputFull);
}
let in_flight_bytes = inner
.queue_bytes()
.saturating_add(inner.pending_copy_bytes())
.saturating_add(inner.pending_push_bytes())
.saturating_add(inner.last_measured_frame_bytes());
if in_flight_bytes > MAX_INFLIGHT_BYTES {
return Err(VideoDecodeError::OutputFull);
}
if pending_decode(&self.decoder, &inner) < MAX_PENDING_DECODE as usize {
return Ok(());
}
}
let _guard = self.state.dequeue_waker_guard();
let state = self.state.clone();
let decoder = self.decoder.clone();
core::future::poll_fn(move |cx| {
let (closed, busy, queue_full) = {
let inner = state.borrow();
(
inner.is_closed(),
pending_decode(&decoder, &inner) >= MAX_PENDING_DECODE as usize,
inner.queue_len() >= MAX_QUEUED_OUTPUT as usize,
)
};
if closed || !busy || queue_full {
return core::task::Poll::Ready(());
}
state.borrow_mut().set_dequeue_waker(cx.waker().clone());
let (closed_2, busy_2, queue_full_2) = {
let inner = state.borrow();
(
inner.is_closed(),
pending_decode(&decoder, &inner) >= MAX_PENDING_DECODE as usize,
inner.queue_len() >= MAX_QUEUED_OUTPUT as usize,
)
};
if closed_2 || !busy_2 || queue_full_2 {
state.borrow_mut().clear_dequeue_waker();
core::task::Poll::Ready(())
} else {
core::task::Poll::Pending
}
})
.await;
}
}
}
fn pending_decode(
decoder: &web_sys::VideoDecoder,
inner: &crate::state::Inner<DecodedVideoFrame>,
) -> usize {
(decoder.decode_queue_size() as usize)
.saturating_add(inner.pending_copies() as usize)
.saturating_add(inner.pending_pushes_len())
}
impl Drop for WebCodecsVideoStreamDecoder {
fn drop(&mut self) {
self.state.bump_epoch();
self.decoder.set_ondequeue(None);
let _ = self.decoder.close();
self.state.clear_close_hook();
free_value_handler(self.output_slot_id);
free_value_handler(self.error_slot_id);
free_void_handler(self.dequeue_slot_id);
}
}
impl VideoStreamDecoder for WebCodecsVideoStreamDecoder {
type Adapter = WebCodecs;
type Buffer = WebCodecsBuffer;
type Error = VideoDecodeError;
async fn send_packet(
&mut self,
packet: &VideoPacket<VideoPacketExtra, Self::Buffer>,
) -> Result<(), Self::Error> {
self.check_closed()?;
if self.eof {
return Err(VideoDecodeError::AtEof);
}
self.await_decode_room().await?;
let key = packet.flags().contains(PacketFlags::KEY);
let chunk_type = if key {
web_sys::EncodedVideoChunkType::Key
} else {
web_sys::EncodedVideoChunkType::Delta
};
let bytes = packet.data().as_ref();
if bytes.len() > MAX_INPUT_PACKET_BYTES {
let err = Error::from_js(JsValue::from_str(
"encoded video packet exceeds MAX_INPUT_PACKET_BYTES",
));
self.state.borrow_mut().record_close(err.clone());
self.state.invoke_close_hook();
self.state.wake_all();
return Err(VideoDecodeError::Closed(err));
}
{
let inner = self.state.borrow();
if inner
.pending_input_bytes()
.saturating_add(bytes.len() as u64)
> MAX_INPUT_INFLIGHT_BYTES
{
drop(inner);
let err = Error::from_js(JsValue::from_str(
"encoded video pending_input_bytes would exceed MAX_INPUT_INFLIGHT_BYTES; \
decoder is reorder-stalled — flush() to recover",
));
self.state.borrow_mut().record_close(err.clone());
self.state.invoke_close_hook();
self.state.wake_all();
return Err(VideoDecodeError::Closed(err));
}
}
let buf = match try_new_uint8_array(bytes.len() as u32) {
Ok(b) => b,
Err(err) => {
let err = Error::from_js(err);
self.state.borrow_mut().record_close(err.clone());
self.state.invoke_close_hook();
self.state.wake_all();
return Err(VideoDecodeError::Closed(err));
}
};
buf.copy_from(bytes);
let submission_id = self.state.borrow_mut().next_submission_id();
let init = web_sys::EncodedVideoChunkInit::new(&buf.into(), 0, chunk_type);
init.set_timestamp_f64(submission_id as f64);
if let Some(d) = packet.duration() {
let duration_us = d.rescale_to(MICROS).pts();
init.set_duration_f64(duration_us as f64);
}
let chunk = web_sys::EncodedVideoChunk::new(&init).map_err(Error::from_js)?;
let insert_res = {
let mut inner = self.state.borrow_mut();
let epoch = inner.epoch();
inner.insert_pending_output(
submission_id,
PendingOutput::new(epoch, packet.pts(), key, bytes.len() as u32, 0),
)
};
if let Err(err) = insert_res {
self.state.invoke_close_hook();
self.state.wake_all();
return Err(VideoDecodeError::Closed(err));
}
if let Err(e) = self.decoder.decode(&chunk) {
self.state.borrow_mut().remove_pending_output(submission_id);
return Err(VideoDecodeError::Js(Error::from_js(e)));
}
Ok(())
}
async fn receive_frame(
&mut self,
dst: &mut VideoFrame<PixelFormat, VideoFrameExtra, Self::Buffer>,
) -> Result<(), Self::Error> {
let frame = wait_for_frame(&self.state, &self.decoder, self.eof).await?;
let dimensions = frame.dimensions();
let format = frame.format();
let plane_count = frame.plane_count();
let pts = frame.pts();
let duration = frame.duration();
let visible_rect = frame.visible_rect();
let color = frame.color();
let key = frame.key();
let [p0, p1, p2, p3] = frame.into_planes();
let planes = [
Plane::new(p0.0, p0.1),
Plane::new(p1.0, p1.1),
Plane::new(p2.0, p2.1),
Plane::new(p3.0, p3.1),
];
*dst = VideoFrame::new(
dimensions,
format,
planes,
plane_count,
VideoFrameExtra::new(key),
)
.with_pts(pts)
.with_duration(duration)
.with_visible_rect(visible_rect)
.with_color(color);
Ok(())
}
async fn send_eof(&mut self) -> Result<(), Self::Error> {
self.check_closed()?;
struct EofCancelGuard<F> {
state: SharedState<F>,
completed: bool,
}
impl<F> Drop for EofCancelGuard<F> {
fn drop(&mut self) {
if !self.completed {
self.state.bump_epoch();
self
.state
.borrow_mut()
.record_close(Error::from_js(wasm_bindgen::JsValue::from_str(
"send_eof was cancelled — call flush() to recover",
)));
self.state.invoke_close_hook();
}
self.state.wake_all();
}
}
let mut guard = EofCancelGuard {
state: self.state.clone(),
completed: false,
};
let promise = self.decoder.flush();
JsFuture::from(promise).await.map_err(Error::from_js)?;
{
let mut inner = self.state.borrow_mut();
inner.clear_pending_outputs();
}
self.eof = true;
guard.completed = true;
Ok(())
}
async fn flush(&mut self) -> Result<(), Self::Error> {
self.state.bump_epoch();
self.eof = false;
struct FlushGuard<'a> {
state: &'a SharedState<DecodedVideoFrame>,
completed: bool,
}
impl Drop for FlushGuard<'_> {
fn drop(&mut self) {
if !self.completed {
let just_closed =
self
.state
.borrow_mut()
.record_close(Error::from_js(JsValue::from_str(
"video flush did not complete (reset/configure failed); \
decoder is closed",
)));
if just_closed {
self.state.invoke_close_hook();
}
self.state.wake_all();
}
}
}
let mut guard = FlushGuard {
state: &self.state,
completed: false,
};
self.decoder.reset().map_err(Error::from_js)?;
self
.decoder
.configure(&self.config)
.map_err(Error::from_js)?;
guard.completed = true;
self.state.borrow_mut().clear_last_error();
self.state.wake_dequeue();
Ok(())
}
}
async fn wait_for_frame(
state: &SharedState<DecodedVideoFrame>,
decoder: &web_sys::VideoDecoder,
eof: bool,
) -> Result<DecodedVideoFrame, VideoDecodeError> {
loop {
let popped = {
let mut inner = state.borrow_mut();
if let Some(err) = inner.last_error_clone() {
return Err(VideoDecodeError::Closed(err));
}
let head_bytes = inner
.peek_queue_head()
.map(|f| f.byte_size() as u64)
.unwrap_or(0);
let frame = inner.pop_queue(head_bytes);
if frame.is_none() {
let active_decode_work = inner.pending_copies() > 0 || decoder.decode_queue_size() > 0;
if !active_decode_work {
if eof {
return Err(VideoDecodeError::Eof);
}
return Err(VideoDecodeError::NoFrameReady);
}
}
frame
};
if let Some(frame) = popped.map(DecodedFrame::into_frame) {
state.wake_dequeue();
return Ok(frame);
}
let _guard = state.receiver_waker_guard();
core::future::poll_fn(|cx| {
let mut inner = state.borrow_mut();
let active_decode_work = inner.pending_copies() > 0 || decoder.decode_queue_size() > 0;
if !inner.queue_is_empty() || inner.is_closed() || !active_decode_work {
core::task::Poll::Ready(())
} else {
inner.set_receiver_waker(cx.waker().clone());
core::task::Poll::Pending
}
})
.await;
}
}
struct VideoCopyAdmission {
state: SharedState<DecodedVideoFrame>,
frame: Option<web_sys::VideoFrame>,
byte_estimate: u64,
sequence: u32,
armed: bool,
}
impl Drop for VideoCopyAdmission {
fn drop(&mut self) {
if !self.armed {
return;
}
if let Some(f) = self.frame.take() {
f.close();
}
self.state.borrow_mut().sub_pending_copy(self.byte_estimate);
self
.state
.deliver_pending_push(self.sequence, crate::state::PendingPush::Skipped);
self.state.wake_all();
}
}
impl VideoCopyAdmission {
fn disarm(&mut self) -> web_sys::VideoFrame {
self.armed = false;
self
.frame
.take()
.expect("VideoCopyAdmission::disarm called twice")
}
}
async fn handle_video_frame(
mut admission: VideoCopyAdmission,
state: SharedState<DecodedVideoFrame>,
epoch: u64,
record: PendingOutput,
byte_estimate: u64,
sequence: u32,
) {
let frame = admission.disarm();
drop(admission);
let frame_guard = JsFrameGuard(Some(frame));
if state.epoch() != epoch {
state.borrow_mut().sub_pending_copy(byte_estimate);
state.wake_all();
return;
}
let result = copy_video_frame(frame_guard.frame(), record.user_pts(), record.key()).await;
drop(frame_guard);
enum Outcome<F> {
Push(PendingPush<F>),
Errored {
just_closed: bool,
push: PendingPush<F>,
},
StaleEpoch,
AlreadyClosed,
}
let outcome: Outcome<DecodedVideoFrame> = {
let mut inner = state.borrow_mut();
inner.sub_pending_copy(byte_estimate);
if inner.epoch() != epoch {
Outcome::StaleEpoch
} else if inner.is_closed() {
Outcome::AlreadyClosed
} else {
match result {
Ok(decoded) => {
let bytes = decoded.byte_size() as u64;
let projected = inner
.queue_bytes()
.saturating_add(inner.pending_push_bytes())
.saturating_add(bytes);
if projected > MAX_INFLIGHT_BYTES {
let just_closed = inner.record_close(Error::from_js(JsValue::from_str(
"video output queue exceeded MAX_INFLIGHT_BYTES at copy completion",
)));
Outcome::Errored {
just_closed,
push: PendingPush::Skipped,
}
} else {
Outcome::Push(PendingPush::Ready(DecodedFrame::new(decoded), bytes))
}
}
Err(err) => {
let just_closed = inner.record_close(err);
Outcome::Errored {
just_closed,
push: PendingPush::Skipped,
}
}
}
}
};
match outcome {
Outcome::Push(push) => {
state.deliver_pending_push(sequence, push);
state.wake_dequeue();
}
Outcome::Errored { just_closed, push } => {
state.deliver_pending_push(sequence, push);
if just_closed {
state.invoke_close_hook();
}
state.wake_all();
}
Outcome::StaleEpoch | Outcome::AlreadyClosed => {
state.wake_all();
}
}
}
struct JsFrameGuard(Option<web_sys::VideoFrame>);
impl JsFrameGuard {
fn frame(&self) -> &web_sys::VideoFrame {
self.0.as_ref().expect("frame already taken")
}
}
impl Drop for JsFrameGuard {
fn drop(&mut self) {
if let Some(frame) = self.0.take() {
frame.close();
}
}
}
pub(crate) fn try_new_uint8_array(size: u32) -> Result<js_sys::Uint8Array, JsValue> {
let global = js_sys::global();
let ctor_value = js_sys::Reflect::get(&global, &JsValue::from_str("Uint8Array"))?;
let ctor: js_sys::Function = ctor_value
.dyn_into()
.map_err(|_| JsValue::from_str("globalThis.Uint8Array is not a function"))?;
let args = js_sys::Array::new();
args.push(&JsValue::from_f64(size as f64));
let constructed = js_sys::Reflect::construct(&ctor, &args)?;
constructed
.dyn_into::<js_sys::Uint8Array>()
.map_err(|_| JsValue::from_str("constructed value is not a Uint8Array"))
}
async fn copy_video_frame(
frame: &web_sys::VideoFrame,
user_pts: Option<Timestamp>,
key: bool,
) -> Result<DecodedVideoFrame, Error> {
let dimensions = Dimensions::new(frame.coded_width(), frame.coded_height());
let width = dimensions.width();
let height = dimensions.height();
let format = match frame.format() {
Some(f) => map_pixel_format(f)?,
None => {
return Err(Error::from_js(JsValue::from_str(
"VideoFrame.format is null",
)));
}
};
let pts = user_pts;
let duration = frame.duration().map(|d| Timestamp::new(d as i64, MICROS));
let visible_rect = read_visible_rect(frame, dimensions);
let color = read_color_info(frame);
let copy_rect = web_sys::DomRectInit::new();
copy_rect.set_x(0.0);
copy_rect.set_y(0.0);
copy_rect.set_width(width as f64);
copy_rect.set_height(height as f64);
let copy_opts = web_sys::VideoFrameCopyToOptions::new();
copy_opts.set_rect(©_rect);
let size_u32 = frame
.allocation_size_with_options(©_opts)
.map_err(Error::from_js)?;
if size_u32 > MAX_FRAME_ALLOCATION_BYTES {
return Err(Error::from_js(JsValue::from_str(&format!(
"VideoFrame.allocationSize = {size_u32} exceeds MAX_FRAME_ALLOCATION_BYTES = {MAX_FRAME_ALLOCATION_BYTES}"
))));
}
let size = size_u32 as usize;
let dst = try_new_uint8_array(size as u32).map_err(Error::from_js)?;
let promise = frame.copy_to_with_u8_array_and_options(&dst, ©_opts);
let layouts_js = JsFuture::from(promise).await.map_err(Error::from_js)?;
let mut bytes: Vec<u8> = Vec::new();
bytes
.try_reserve_exact(size)
.map_err(|_| Error::from_static("Rust allocation for VideoFrame copy failed"))?;
bytes.resize(size, 0);
dst.copy_to(&mut bytes);
drop(dst);
let layouts: js_sys::Array = layouts_js.dyn_into().map_err(|_| {
Error::from_js(JsValue::from_str(
"VideoFrame.copyTo did not resolve to PlaneLayout array",
))
})?;
let raw_layout_count = layouts.length() as usize;
let expected = expected_plane_layout(format, width, height);
if let Some(layout) = expected
&& raw_layout_count != layout.count
{
return Err(Error::from_js(JsValue::from_str(&format!(
"VideoFrame.copyTo PlaneLayout count = {raw_layout_count} does not match the \
expected {} planes for format {format:?}",
layout.count,
))));
}
let plane_count = if let Some(layout) = expected {
layout.count
} else {
raw_layout_count.min(4)
};
if plane_count == 0 {
return Err(Error::from_js(JsValue::from_str(
"VideoFrame.copyTo returned empty PlaneLayout array",
)));
}
let mut layout_pairs: [(u32, u32); 4] = [(0, 0); 4];
for (i, slot) in layout_pairs.iter_mut().enumerate().take(plane_count) {
let layout = layouts
.get(i as u32)
.dyn_into::<web_sys::PlaneLayout>()
.map_err(|_| {
Error::from_js(JsValue::from_str(
"VideoFrame.copyTo PlaneLayout entry has wrong type",
))
})?;
*slot = (layout.get_offset(), layout.get_stride());
}
let total_size = size;
for i in 0..plane_count {
let offset = layout_pairs[i].0 as usize;
let next_offset = if i + 1 < plane_count {
layout_pairs[i + 1].0 as usize
} else {
total_size
};
if offset > total_size {
return Err(Error::from_js(JsValue::from_str(&format!(
"PlaneLayout[{i}].offset = {offset} exceeds allocation_size = {total_size}"
))));
}
if next_offset > total_size {
return Err(Error::from_js(JsValue::from_str(&format!(
"PlaneLayout[{i}] runs past allocation_size = {total_size}"
))));
}
if next_offset < offset {
return Err(Error::from_js(JsValue::from_str(&format!(
"PlaneLayout offsets are non-monotonic at plane {i}: {offset} → {next_offset}"
))));
}
if let Some(layout) = expected {
let PlaneDim { rows, row_bytes } = layout.planes[i];
let stride = layout_pairs[i].1;
let plane_len_u32 = u32::try_from(next_offset - offset).unwrap_or(u32::MAX);
if stride < row_bytes {
return Err(Error::from_js(JsValue::from_str(&format!(
"PlaneLayout[{i}] stride = {stride} is below the expected row \
bytes = {row_bytes} for format {format:?}"
))));
}
if let Some(rows_minus_one) = rows.checked_sub(1) {
let last_row_offset = rows_minus_one
.checked_mul(stride)
.and_then(|n| n.checked_add(row_bytes));
match last_row_offset {
Some(end) if end <= plane_len_u32 => {}
_ => {
return Err(Error::from_js(JsValue::from_str(&format!(
"PlaneLayout[{i}] stride = {stride} × rows = {rows} \
+ row_bytes = {row_bytes} overruns plane_len = {plane_len_u32} \
for format {format:?}"
))));
}
}
}
}
}
let arc: Arc<Vec<u8>> = Arc::new(bytes);
let mut planes: [(WebCodecsBuffer, u32); 4] = [
(WebCodecsBuffer::empty(), 0),
(WebCodecsBuffer::empty(), 0),
(WebCodecsBuffer::empty(), 0),
(WebCodecsBuffer::empty(), 0),
];
for i in 0..plane_count {
let (offset, stride) = layout_pairs[i];
let next_offset = if i + 1 < plane_count {
layout_pairs[i + 1].0 as usize
} else {
total_size
};
let plane_len = next_offset - offset as usize;
planes[i] = (
WebCodecsBuffer::from_arc_range(Arc::clone(&arc), offset as usize, plane_len),
stride,
);
}
Ok(DecodedVideoFrame::new(
pts,
duration,
dimensions,
visible_rect,
format,
planes,
plane_count as u8,
key,
total_size as u32,
color,
))
}
fn read_color_info(frame: &web_sys::VideoFrame) -> ColorInfo {
let cs = frame.color_space();
let primaries = match cs.primaries() {
Some(web_sys::VideoColorPrimaries::Bt709) => ColorPrimaries::Bt709,
Some(web_sys::VideoColorPrimaries::Bt470bg) => ColorPrimaries::Bt470Bg,
Some(web_sys::VideoColorPrimaries::Smpte170m) => ColorPrimaries::Smpte170M,
Some(web_sys::VideoColorPrimaries::Bt2020) => ColorPrimaries::Bt2020,
Some(web_sys::VideoColorPrimaries::Smpte432) => ColorPrimaries::SmpteEg432,
_ => ColorPrimaries::Unspecified,
};
let transfer = match cs.transfer() {
Some(web_sys::VideoTransferCharacteristics::Bt709) => ColorTransfer::Bt709,
Some(web_sys::VideoTransferCharacteristics::Smpte170m) => ColorTransfer::Smpte170M,
Some(web_sys::VideoTransferCharacteristics::Iec6196621) => ColorTransfer::Iec6196621,
Some(web_sys::VideoTransferCharacteristics::Linear) => ColorTransfer::Linear,
Some(web_sys::VideoTransferCharacteristics::Pq) => ColorTransfer::SmpteSt2084Pq,
Some(web_sys::VideoTransferCharacteristics::Hlg) => ColorTransfer::AribStdB67Hlg,
_ => ColorTransfer::Unspecified,
};
let matrix = match cs.matrix() {
Some(web_sys::VideoMatrixCoefficients::Bt709) => ColorMatrix::Bt709,
Some(web_sys::VideoMatrixCoefficients::Bt470bg) => ColorMatrix::Bt601,
Some(web_sys::VideoMatrixCoefficients::Smpte170m) => ColorMatrix::Bt601,
Some(web_sys::VideoMatrixCoefficients::Bt2020Ncl) => ColorMatrix::Bt2020Ncl,
_ => ColorMatrix::default(),
};
let range = match cs.full_range() {
Some(true) => ColorRange::Full,
Some(false) => ColorRange::Limited,
None => ColorRange::Unspecified,
};
ColorInfo::UNSPECIFIED
.with_primaries(primaries)
.with_transfer(transfer)
.with_matrix(matrix)
.with_range(range)
}
fn read_visible_rect(frame: &web_sys::VideoFrame, coded: Dimensions) -> Option<Rect> {
let dom = frame.visible_rect()?;
let x = dom.x();
let y = dom.y();
let w = dom.width();
let h = dom.height();
if !(x.is_finite() && y.is_finite() && w.is_finite() && h.is_finite()) {
return None;
}
if x < 0.0 || y < 0.0 || w < 0.0 || h < 0.0 {
return None;
}
if x > u32::MAX as f64 || y > u32::MAX as f64 || w > u32::MAX as f64 || h > u32::MAX as f64 {
return None;
}
let x = x as u32;
let y = y as u32;
let w = w as u32;
let h = h as u32;
let x_end = x.checked_add(w)?;
let y_end = y.checked_add(h)?;
if x_end > coded.width() || y_end > coded.height() {
return None;
}
Some(Rect::new(x, y, w, h))
}
#[derive(Clone, Copy, Default)]
struct PlaneDim {
rows: u32,
row_bytes: u32,
}
const fn dim(rows: u32, row_bytes: u32) -> PlaneDim {
PlaneDim { rows, row_bytes }
}
#[derive(Clone, Copy)]
struct PlaneLayout {
planes: [PlaneDim; 4],
count: usize,
}
fn expected_plane_layout(format: PixelFormat, width: u32, height: u32) -> Option<PlaneLayout> {
use PixelFormat as P;
let halfw = width.div_ceil(2);
let halfh = height.div_ceil(2);
let w2 = width.saturating_mul(2);
let halfw2 = halfw.saturating_mul(2);
let w4 = width.saturating_mul(4);
let none = PlaneDim::default();
let (planes, count): ([PlaneDim; 4], usize) = match format {
P::Yuv420p => (
[
dim(height, width),
dim(halfh, halfw),
dim(halfh, halfw),
none,
],
3,
),
P::Yuv420p10Le | P::Yuv420p12Le => (
[
dim(height, w2),
dim(halfh, halfw2),
dim(halfh, halfw2),
none,
],
3,
),
P::Yuva420p => (
[
dim(height, width),
dim(halfh, halfw),
dim(halfh, halfw),
dim(height, width),
],
4,
),
P::Yuva420p10Le | P::Yuva420p12Le => (
[
dim(height, w2),
dim(halfh, halfw2),
dim(halfh, halfw2),
dim(height, w2),
],
4,
),
P::Yuv422p => (
[
dim(height, width),
dim(height, halfw),
dim(height, halfw),
none,
],
3,
),
P::Yuv422p10Le | P::Yuv422p12Le => (
[
dim(height, w2),
dim(height, halfw2),
dim(height, halfw2),
none,
],
3,
),
P::Yuva422p => (
[
dim(height, width),
dim(height, halfw),
dim(height, halfw),
dim(height, width),
],
4,
),
P::Yuva422p10Le | P::Yuva422p12Le => (
[
dim(height, w2),
dim(height, halfw2),
dim(height, halfw2),
dim(height, w2),
],
4,
),
P::Yuv444p => ([dim(height, width); 4], 3),
P::Yuv444p10Le | P::Yuv444p12Le => ([dim(height, w2); 4], 3),
P::Yuva444p => ([dim(height, width); 4], 4),
P::Yuva444p10Le | P::Yuva444p12Le => ([dim(height, w2); 4], 4),
P::Nv12 => ([dim(height, width), dim(halfh, width), none, none], 2),
P::Rgba | P::Rgbx | P::Bgra | P::Bgrx => ([dim(height, w4), none, none, none], 1),
_ => return None,
};
Some(PlaneLayout { planes, count })
}
fn map_pixel_format(fmt: web_sys::VideoPixelFormat) -> Result<PixelFormat, Error> {
use web_sys::VideoPixelFormat as W;
Ok(match fmt {
W::I420 => PixelFormat::Yuv420p,
W::I420p10 => PixelFormat::Yuv420p10Le,
W::I420p12 => PixelFormat::Yuv420p12Le,
W::I420a => PixelFormat::Yuva420p,
W::I420ap10 => PixelFormat::Yuva420p10Le,
W::I420ap12 => PixelFormat::Yuva420p12Le,
W::I422 => PixelFormat::Yuv422p,
W::I422p10 => PixelFormat::Yuv422p10Le,
W::I422p12 => PixelFormat::Yuv422p12Le,
W::I422a => PixelFormat::Yuva422p,
W::I422ap10 => PixelFormat::Yuva422p10Le,
W::I422ap12 => PixelFormat::Yuva422p12Le,
W::I444 => PixelFormat::Yuv444p,
W::I444p10 => PixelFormat::Yuv444p10Le,
W::I444p12 => PixelFormat::Yuv444p12Le,
W::I444a => PixelFormat::Yuva444p,
W::I444ap10 => PixelFormat::Yuva444p10Le,
W::I444ap12 => PixelFormat::Yuva444p12Le,
W::Nv12 => PixelFormat::Nv12,
W::Rgba => PixelFormat::Rgba,
W::Rgbx => PixelFormat::Rgbx,
W::Bgra => PixelFormat::Bgra,
W::Bgrx => PixelFormat::Bgrx,
other => {
return Err(Error::from_js(JsValue::from_str(&format!(
"unsupported VideoPixelFormat: {other:?}"
))));
}
})
}