use crate::frame::Frame;
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)]
pub enum VideoState {
#[default]
Idle,
Playing,
Paused,
Ended,
}
pub type VideoFrame = Frame;
pub struct VideoDecoder {
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
decoder: Option<openh264::decoder::Decoder>,
state: VideoState,
}
impl VideoDecoder {
pub fn new() -> Self {
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
{
use openh264::decoder::{DecoderConfig, Flush};
let config = DecoderConfig::new().flush_after_decode(Flush::NoFlush);
let api = openh264::OpenH264API::from_source();
let decoder = openh264::decoder::Decoder::with_api_config(api, config).ok();
Self {
decoder,
state: VideoState::Idle,
}
}
#[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
{
Self {
state: VideoState::Idle,
}
}
}
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
pub fn decode_nal(&mut self, nal_data: &[u8]) -> Option<Frame> {
let decoder = self.decoder.as_mut()?;
match decoder.decode(nal_data) {
Ok(Some(yuv)) => {
let (uv_w, uv_h) = yuv.dimensions_uv();
let w = uv_w * 2;
let h = uv_h * 2;
let mut rgba = vec![0u8; w * h * 4];
yuv.write_rgba8(&mut rgba);
self.state = VideoState::Playing;
Some(Frame::from_rgba(rgba, w as u32, h as u32))
}
Ok(None) => None,
Err(e) => {
tracing::warn!("decode error: {e:?}");
None
}
}
}
#[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
pub fn decode_nal(&mut self, _nal_data: &[u8]) -> Option<Frame> {
tracing::warn!("Use native_stream for mobile video decoding");
None
}
pub fn state(&self) -> VideoState {
self.state
}
}
impl Default for VideoDecoder {
fn default() -> Self {
Self::new()
}
}
#[derive(Clone)]
pub struct VideoPlayer {
state: std::sync::Arc<std::sync::Mutex<VideoPlayerInner>>,
#[allow(dead_code)]
player_id: u64,
pub playing_signal: blinc_core::State<bool>,
pub position_signal: blinc_core::State<u64>,
pub duration_signal: blinc_core::State<u64>,
pub volume_signal: blinc_core::State<f32>,
}
struct VideoPlayerInner {
playback_state: VideoState,
volume: f32,
current_frame: Option<std::sync::Arc<Frame>>,
source: Option<String>,
position_ms: u64,
duration_ms: u64,
#[cfg_attr(target_arch = "wasm32", allow(dead_code))]
generation: u64,
frame_generation: u64,
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
source_bytes: Option<std::sync::Arc<Vec<u8>>>,
}
static PLAYER_ID_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
impl VideoPlayer {
pub fn new() -> Self {
let id = PLAYER_ID_COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
let playing_signal = blinc_core::use_state_keyed(&format!("vp:{id}:playing"), || false);
let position_signal = blinc_core::use_state_keyed(&format!("vp:{id}:position"), || 0u64);
let duration_signal = blinc_core::use_state_keyed(&format!("vp:{id}:duration"), || 0u64);
let volume_signal = blinc_core::use_state_keyed(&format!("vp:{id}:volume"), || 1.0f32);
let state = std::sync::Arc::new(std::sync::Mutex::new(VideoPlayerInner {
playback_state: VideoState::Idle,
volume: 1.0,
current_frame: None,
source: None,
position_ms: 0,
duration_ms: 0,
generation: 0,
frame_generation: 0,
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
source_bytes: None,
}));
let state_for_tick = state.clone();
let pos_for_tick = position_signal.clone();
#[cfg(target_arch = "wasm32")]
let dur_for_tick = duration_signal.clone();
#[cfg(target_arch = "wasm32")]
let playing_for_tick = playing_signal.clone();
let _tick_id = id;
if let Some(handle) = blinc_animation::try_get_scheduler() {
let redraw_handle = handle.clone();
handle.register_tick_callback(move |_dt| {
#[cfg(target_arch = "wasm32")]
{
let mut inner = state_for_tick.lock().unwrap();
if inner.playback_state == VideoState::Playing {
let new_pos = crate::web_video::position_ms(_tick_id);
let new_dur = crate::web_video::duration_ms(_tick_id);
let pos_changed = inner.position_ms != new_pos;
let dur_changed = inner.duration_ms != new_dur;
inner.position_ms = new_pos;
inner.duration_ms = new_dur;
drop(inner);
if pos_changed {
if let Some(frame) = crate::web_video::capture_frame(_tick_id) {
let mut inner = state_for_tick.lock().unwrap();
inner.current_frame = Some(std::sync::Arc::new(frame));
inner.frame_generation += 1;
}
pos_for_tick.set(new_pos);
redraw_handle.request_redraw();
}
if dur_changed {
dur_for_tick.set(new_dur);
}
if crate::web_video::is_ended(_tick_id) {
state_for_tick.lock().unwrap().playback_state = VideoState::Ended;
playing_for_tick.set(false);
}
}
}
#[cfg(not(target_arch = "wasm32"))]
{
let inner = state_for_tick.lock().unwrap();
if inner.playback_state == VideoState::Playing {
let pos = inner.position_ms;
drop(inner);
pos_for_tick.set(pos);
redraw_handle.request_redraw();
}
}
});
}
#[cfg(target_arch = "wasm32")]
crate::web_video::create(id);
Self {
state,
player_id: id,
playing_signal,
position_signal,
duration_signal,
volume_signal,
}
}
fn sync_signals(&self) {
let inner = self.state.lock().unwrap();
let pos = inner.position_ms;
let dur = inner.duration_ms;
let vol = inner.volume;
drop(inner);
self.position_signal.set(pos);
self.duration_signal.set(dur);
self.volume_signal.set(vol);
}
pub fn load(&self, path: &str) {
{
let mut inner = self.state.lock().unwrap();
inner.source = Some(path.to_string());
inner.playback_state = VideoState::Idle;
inner.position_ms = 0;
}
self.sync_signals();
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>(
"video",
"load",
vec![blinc_core::native_bridge::NativeValue::String(
path.to_string(),
)],
);
}
}
pub fn play(&self) {
{
let mut inner = self.state.lock().unwrap();
if inner.playback_state == VideoState::Ended {
drop(inner);
self.replay();
return;
}
inner.playback_state = VideoState::Playing;
}
self.playing_signal.set(true);
if let Some(handle) = blinc_animation::try_get_scheduler() {
handle.request_redraw();
}
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>("video", "play", ());
}
#[cfg(target_arch = "wasm32")]
crate::web_video::play(self.player_id);
}
pub fn pause(&self) {
self.state.lock().unwrap().playback_state = VideoState::Paused;
self.playing_signal.set(false);
if let Some(handle) = blinc_animation::try_get_scheduler() {
handle.request_redraw();
}
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>("video", "pause", ());
}
#[cfg(target_arch = "wasm32")]
crate::web_video::pause(self.player_id);
}
pub fn stop(&self) {
{
let mut inner = self.state.lock().unwrap();
inner.playback_state = VideoState::Idle;
inner.position_ms = 0;
inner.current_frame = None;
}
self.sync_signals();
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>("video", "stop", ());
}
#[cfg(target_arch = "wasm32")]
crate::web_video::pause(self.player_id);
}
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
pub fn seek(&self, position_ms: u64) {
let bytes = {
let mut inner = self.state.lock().unwrap();
inner.playback_state = VideoState::Idle;
inner.position_ms = position_ms;
inner.generation += 1;
inner.source_bytes.clone()
};
self.position_signal.set(position_ms);
std::thread::sleep(std::time::Duration::from_millis(15));
if let Some(bytes) = bytes {
self.start_decode_thread_from(bytes, position_ms);
}
}
#[cfg(target_arch = "wasm32")]
pub fn seek(&self, position_ms: u64) {
self.state.lock().unwrap().position_ms = position_ms;
self.position_signal.set(position_ms);
crate::web_video::seek(self.player_id, position_ms);
}
#[cfg(any(target_os = "android", target_os = "ios"))]
pub fn seek(&self, position_ms: u64) {
self.state.lock().unwrap().position_ms = position_ms;
self.sync_signals();
let _ = blinc_core::native_bridge::native_call::<(), _>(
"video",
"seek",
vec![blinc_core::native_bridge::NativeValue::Int64(
position_ms as i64,
)],
);
}
pub fn set_volume(&self, volume: f32) {
self.state.lock().unwrap().volume = volume.clamp(0.0, 1.0);
self.sync_signals();
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>(
"video",
"set_volume",
vec![blinc_core::native_bridge::NativeValue::Float32(volume)],
);
}
#[cfg(target_arch = "wasm32")]
crate::web_video::set_volume(self.player_id, volume);
}
pub fn current_frame(&self) -> Option<std::sync::Arc<Frame>> {
self.state.lock().unwrap().current_frame.clone()
}
pub fn frame_generation(&self) -> u64 {
self.state.lock().unwrap().frame_generation
}
pub fn push_frame(&self, frame: Frame) {
let mut inner = self.state.lock().unwrap();
inner.current_frame = Some(std::sync::Arc::new(frame));
inner.frame_generation += 1;
drop(inner);
self.sync_signals();
}
pub fn playback_state(&self) -> VideoState {
self.state.lock().unwrap().playback_state
}
pub fn position_ms(&self) -> u64 {
self.state.lock().unwrap().position_ms
}
pub fn duration_ms(&self) -> u64 {
self.state.lock().unwrap().duration_ms
}
pub fn volume(&self) -> f32 {
self.state.lock().unwrap().volume
}
pub fn is_playing(&self) -> bool {
self.playback_state() == VideoState::Playing
}
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
pub fn load_file(&self, path: &str) {
let bytes =
std::fs::read(path).unwrap_or_else(|e| panic!("failed to read video: {path}: {e}"));
self.load_bytes(bytes);
}
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
pub fn replay(&self) {
let bytes = {
let mut inner = self.state.lock().unwrap();
inner.playback_state = VideoState::Idle;
inner.position_ms = 0;
inner.current_frame = None;
inner.source_bytes.clone()
};
self.sync_signals();
std::thread::sleep(std::time::Duration::from_millis(10));
if let Some(bytes) = bytes {
self.start_decode_thread(bytes);
}
}
#[cfg(target_arch = "wasm32")]
pub fn replay(&self) {
{
let mut inner = self.state.lock().unwrap();
inner.playback_state = VideoState::Idle;
inner.position_ms = 0;
inner.current_frame = None;
}
crate::web_video::seek(self.player_id, 0);
self.play();
}
#[cfg(any(target_os = "android", target_os = "ios"))]
pub fn replay(&self) {
let _ = blinc_core::native_bridge::native_call::<(), _>("video", "replay", ());
}
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
pub fn load_bytes(&self, bytes: Vec<u8>) {
let bytes = std::sync::Arc::new(bytes);
self.state.lock().unwrap().source_bytes = Some(std::sync::Arc::clone(&bytes));
self.start_decode_thread(bytes);
}
#[cfg(target_arch = "wasm32")]
pub fn load_bytes(&self, bytes: Vec<u8>) {
crate::web_video::load_bytes(self.player_id, &bytes);
}
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
fn start_decode_thread(&self, bytes: std::sync::Arc<Vec<u8>>) {
self.start_decode_thread_from(bytes, 0);
}
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
fn start_decode_thread_from(&self, bytes: std::sync::Arc<Vec<u8>>, start_ms: u64) {
let file_size = bytes.len() as u64;
let mp4 = mp4::Mp4Reader::read_header(std::io::Cursor::new(bytes.as_ref()), file_size)
.unwrap_or_else(|e| panic!("failed to parse MP4: {e}"));
let video_track_id = mp4
.tracks()
.values()
.find(|t| t.media_type().ok() == Some(mp4::MediaType::H264))
.map(|t| t.track_id())
.unwrap_or_else(|| panic!("no H.264 video track found"));
let track = &mp4.tracks()[&video_track_id];
let sample_count = track.sample_count();
let duration_ms = track.duration().as_millis() as u64;
let my_generation;
{
let mut inner = self.state.lock().unwrap();
inner.duration_ms = duration_ms;
inner.playback_state = VideoState::Playing;
my_generation = inner.generation;
}
self.playing_signal.set(true);
self.sync_signals();
let avc1 = track
.trak
.mdia
.minf
.stbl
.stsd
.avc1
.as_ref()
.expect("no AVC1 config in video track");
let avcc = &avc1.avcc;
let length_size = avcc.length_size_minus_one + 1;
let sps_list: Vec<Vec<u8>> = avcc
.sequence_parameter_sets
.iter()
.map(|n| n.bytes.clone())
.collect();
let pps_list: Vec<Vec<u8>> = avcc
.picture_parameter_sets
.iter()
.map(|n| n.bytes.clone())
.collect();
let state = self.state.clone();
let playing_sig = self.playing_signal.clone();
let position_sig = self.position_signal.clone();
std::thread::spawn(move || {
let mut decoder = VideoDecoder::new();
tracing::info!(
"video: decode start — {} SPS, {} PPS, {} samples, {}ms, length_size={}",
sps_list.len(),
pps_list.len(),
sample_count,
duration_ms,
length_size,
);
let mut mp4 = match mp4::Mp4Reader::read_header(
std::io::Cursor::new(bytes.as_ref()),
file_size,
) {
Ok(r) => r,
Err(e) => {
tracing::error!("failed to re-parse MP4: {e}");
return;
}
};
let frame_duration = if sample_count > 0 {
std::time::Duration::from_millis(duration_ms / sample_count as u64)
} else {
std::time::Duration::from_millis(33)
};
let mut decoded_count: u32 = 0;
let mut playback_clock: Option<std::time::Instant> = None;
let target_sample = if start_ms > 0 && duration_ms > 0 {
let s = ((start_ms as f64 / duration_ms as f64) * sample_count as f64) as u32;
s.min(sample_count).max(1)
} else {
1
};
let track_ref = &mp4.tracks()[&video_track_id];
let start_sample = if let Some(ref stss) = track_ref.trak.mdia.minf.stbl.stss {
let mut best = 1u32;
for &sync_id in &stss.entries {
if sync_id <= target_sample {
best = sync_id;
} else {
break;
}
}
best
} else {
target_sample
};
tracing::info!(
"video: seek to {}ms → target sample {}, keyframe {}",
start_ms,
target_sample,
start_sample
);
let mut annex_b = Vec::new();
for sps in &sps_list {
let mut buf = vec![0u8, 0, 0, 1];
buf.extend_from_slice(sps);
decoder.decode_nal(&buf);
}
for pps in &pps_list {
let mut buf = vec![0u8, 0, 0, 1];
buf.extend_from_slice(pps);
decoder.decode_nal(&buf);
}
let mut new_idr = true;
let mut sps_seen = false;
let mut pps_seen = false;
for sample_idx in start_sample..=sample_count {
loop {
let inner = state.lock().unwrap();
if inner.generation != my_generation {
return;
}
let ps = inner.playback_state;
drop(inner);
match ps {
VideoState::Playing => break,
VideoState::Paused => {
std::thread::sleep(std::time::Duration::from_millis(50));
continue;
}
VideoState::Idle | VideoState::Ended => return,
}
}
let sample = match mp4.read_sample(video_track_id, sample_idx) {
Ok(Some(s)) => s,
Ok(None) => break,
Err(e) => {
tracing::warn!("failed to read sample {sample_idx}: {e}");
continue;
}
};
annex_b.clear();
let mut stream = sample.bytes.as_ref();
while !stream.is_empty() {
if stream.len() < length_size as usize {
break;
}
let mut nal_size: u32 = 0;
for &byte in stream.iter().take(length_size as usize) {
nal_size = (nal_size << 8) | byte as u32;
}
stream = &stream[length_size as usize..];
if nal_size == 0 || nal_size as usize > stream.len() {
break;
}
let nal_data = &stream[..nal_size as usize];
let nal_type = nal_data[0] & 0x1F;
stream = &stream[nal_size as usize..];
match nal_type {
7 => sps_seen = true, 8 => pps_seen = true, 5 => {
if !new_idr && nal_data.len() > 1 && nal_data[1] & 0x80 != 0 {
new_idr = true;
}
if new_idr && !sps_seen && !pps_seen {
new_idr = false;
for sps in &sps_list {
annex_b.extend_from_slice(&[0, 0, 1]);
annex_b.extend_from_slice(sps);
}
for pps in &pps_list {
annex_b.extend_from_slice(&[0, 0, 1]);
annex_b.extend_from_slice(pps);
}
}
if new_idr && sps_seen && !pps_seen {
for pps in &pps_list {
annex_b.extend_from_slice(&[0, 0, 1]);
annex_b.extend_from_slice(pps);
}
}
}
_ => {}
}
annex_b.extend_from_slice(&[0, 0, 1]);
annex_b.extend_from_slice(nal_data);
if !new_idr && nal_type == 1 {
new_idr = true;
sps_seen = false;
pps_seen = false;
}
}
if let Some(frame) = decoder.decode_nal(&annex_b) {
decoded_count += 1;
if sample_idx >= target_sample {
let mut inner = state.lock().unwrap();
inner.current_frame = Some(std::sync::Arc::new(frame));
inner.frame_generation += 1;
inner.position_ms = (sample_idx as u64 * duration_ms) / sample_count as u64;
}
}
if sample_idx >= target_sample {
let clock = playback_clock.get_or_insert_with(std::time::Instant::now);
let frames_since_start = (sample_idx - target_sample) + 1;
let target_time = *clock + frame_duration * frames_since_start;
let now = std::time::Instant::now();
if target_time > now {
std::thread::sleep(target_time - now);
}
}
}
if let Some(ref mut raw_decoder) = decoder.decoder {
if let Ok(remaining) = raw_decoder.flush_remaining() {
for yuv in remaining {
let (uv_w, uv_h) = yuv.dimensions_uv();
let w = uv_w * 2;
let h = uv_h * 2;
let mut rgba = vec![0u8; w * h * 4];
yuv.write_rgba8(&mut rgba);
decoded_count += 1;
let mut inner = state.lock().unwrap();
inner.current_frame = Some(std::sync::Arc::new(Frame::from_rgba(
rgba, w as u32, h as u32,
)));
inner.frame_generation += 1;
inner.position_ms = duration_ms;
}
}
}
tracing::info!("video: decode complete — {decoded_count}/{sample_count} frames");
state.lock().unwrap().playback_state = VideoState::Ended;
playing_sig.set(false);
position_sig.set(duration_ms);
});
}
}
impl Default for VideoPlayer {
fn default() -> Self {
Self::new()
}
}
impl crate::player::Player for VideoPlayer {
fn play(&self) {
VideoPlayer::play(self);
}
fn pause(&self) {
VideoPlayer::pause(self);
}
fn stop(&self) {
VideoPlayer::stop(self);
}
fn seek(&self, position_ms: u64) {
VideoPlayer::seek(self, position_ms);
}
fn position_ms(&self) -> u64 {
VideoPlayer::position_ms(self)
}
fn duration_ms(&self) -> u64 {
VideoPlayer::duration_ms(self)
}
fn volume(&self) -> f32 {
VideoPlayer::volume(self)
}
fn set_volume(&self, volume: f32) {
VideoPlayer::set_volume(self, volume);
}
fn is_playing(&self) -> bool {
VideoPlayer::is_playing(self)
}
}