use crate::frame::Frame;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum VideoState {
Idle,
Playing,
Paused,
Ended,
}
pub type VideoFrame = Frame;
pub struct VideoDecoder {
#[cfg(not(any(target_os = "android", target_os = "ios")))]
decoder: Option<openh264::decoder::Decoder>,
state: VideoState,
}
impl VideoDecoder {
pub fn new() -> Self {
#[cfg(not(any(target_os = "android", target_os = "ios")))]
{
let decoder = openh264::decoder::Decoder::new().ok();
Self {
decoder,
state: VideoState::Idle,
}
}
#[cfg(any(target_os = "android", target_os = "ios"))]
{
Self {
state: VideoState::Idle,
}
}
}
#[cfg(not(any(target_os = "android", target_os = "ios")))]
pub fn decode_nal(&mut self, nal_data: &[u8]) -> Option<Frame> {
let decoder = self.decoder.as_mut()?;
let yuv = decoder.decode(nal_data).ok()??;
let (uv_w, uv_h) = yuv.dimensions_uv();
let w = uv_w * 2;
let h = uv_h * 2;
let mut rgba = vec![0u8; w * h * 4];
yuv.write_rgba8(&mut rgba);
self.state = VideoState::Playing;
Some(Frame::from_rgba(rgba, w as u32, h as u32))
}
#[cfg(any(target_os = "android", target_os = "ios"))]
pub fn decode_nal(&mut self, _nal_data: &[u8]) -> Option<Frame> {
tracing::warn!("Use native_stream for mobile video decoding");
None
}
pub fn state(&self) -> VideoState {
self.state
}
}
impl Default for VideoDecoder {
fn default() -> Self {
Self::new()
}
}
pub struct VideoPlayer {
state: std::sync::Arc<std::sync::Mutex<VideoPlayerInner>>,
}
struct VideoPlayerInner {
playback_state: VideoState,
volume: f32,
current_frame: Option<Frame>,
source: Option<String>,
position_ms: u64,
duration_ms: u64,
}
impl VideoPlayer {
pub fn new() -> Self {
Self {
state: std::sync::Arc::new(std::sync::Mutex::new(VideoPlayerInner {
playback_state: VideoState::Idle,
volume: 1.0,
current_frame: None,
source: None,
position_ms: 0,
duration_ms: 0,
})),
}
}
pub fn load(&self, path: &str) {
let mut inner = self.state.lock().unwrap();
inner.source = Some(path.to_string());
inner.playback_state = VideoState::Idle;
inner.position_ms = 0;
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>(
"video",
"load",
vec![blinc_core::native_bridge::NativeValue::String(
path.to_string(),
)],
);
}
}
pub fn play(&self) {
let mut inner = self.state.lock().unwrap();
inner.playback_state = VideoState::Playing;
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>("video", "play", ());
}
}
pub fn pause(&self) {
let mut inner = self.state.lock().unwrap();
inner.playback_state = VideoState::Paused;
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>("video", "pause", ());
}
}
pub fn stop(&self) {
let mut inner = self.state.lock().unwrap();
inner.playback_state = VideoState::Idle;
inner.position_ms = 0;
inner.current_frame = None;
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>("video", "stop", ());
}
}
pub fn seek(&self, position_ms: u64) {
let mut inner = self.state.lock().unwrap();
inner.position_ms = position_ms;
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>(
"video",
"seek",
vec![blinc_core::native_bridge::NativeValue::Int64(
position_ms as i64,
)],
);
}
}
pub fn set_volume(&self, volume: f32) {
self.state.lock().unwrap().volume = volume.clamp(0.0, 1.0);
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let _ = blinc_core::native_bridge::native_call::<(), _>(
"video",
"set_volume",
vec![blinc_core::native_bridge::NativeValue::Float32(volume)],
);
}
}
pub fn current_frame(&self) -> Option<Frame> {
self.state.lock().unwrap().current_frame.clone()
}
pub fn push_frame(&self, frame: Frame) {
self.state.lock().unwrap().current_frame = Some(frame);
}
pub fn playback_state(&self) -> VideoState {
self.state.lock().unwrap().playback_state
}
pub fn position_ms(&self) -> u64 {
self.state.lock().unwrap().position_ms
}
pub fn duration_ms(&self) -> u64 {
self.state.lock().unwrap().duration_ms
}
pub fn volume(&self) -> f32 {
self.state.lock().unwrap().volume
}
pub fn is_playing(&self) -> bool {
self.playback_state() == VideoState::Playing
}
}
impl Default for VideoPlayer {
fn default() -> Self {
Self::new()
}
}
impl crate::player::Player for VideoPlayer {
fn play(&self) {
VideoPlayer::play(self);
}
fn pause(&self) {
VideoPlayer::pause(self);
}
fn stop(&self) {
VideoPlayer::stop(self);
}
fn seek(&self, position_ms: u64) {
VideoPlayer::seek(self, position_ms);
}
fn position_ms(&self) -> u64 {
VideoPlayer::position_ms(self)
}
fn duration_ms(&self) -> u64 {
VideoPlayer::duration_ms(self)
}
fn volume(&self) -> f32 {
VideoPlayer::volume(self)
}
fn set_volume(&self, volume: f32) {
VideoPlayer::set_volume(self, volume);
}
fn is_playing(&self) -> bool {
VideoPlayer::is_playing(self)
}
}