use std::sync::{Arc, Mutex};
use crate::frame::{AudioSamples, Frame};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CameraFacing {
Front,
Back,
}
#[derive(Clone, Debug)]
pub struct CameraConfig {
pub width: u32,
pub height: u32,
pub fps: u32,
pub facing: CameraFacing,
}
impl Default for CameraConfig {
fn default() -> Self {
Self {
width: 640,
height: 480,
fps: 30,
facing: CameraFacing::Front,
}
}
}
#[derive(Clone, Debug)]
pub struct AudioRecorderConfig {
pub sample_rate: u32,
pub channels: u16,
}
impl Default for AudioRecorderConfig {
fn default() -> Self {
Self {
sample_rate: 44100,
channels: 1,
}
}
}
type SharedFrame = Arc<Mutex<Option<Frame>>>;
type SharedSamples = Arc<Mutex<Option<AudioSamples>>>;
pub struct CameraStream {
latest: SharedFrame,
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
_handle: Option<std::thread::JoinHandle<()>>,
#[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
_bridge_stream: Option<blinc_core::native_bridge::NativeStream>,
active: Arc<std::sync::atomic::AtomicBool>,
}
impl CameraStream {
pub fn open(config: CameraConfig) -> Self {
let latest: SharedFrame = Arc::new(Mutex::new(None));
let active = Arc::new(std::sync::atomic::AtomicBool::new(true));
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
{
tracing::info!(
"Camera requested: {}x{} @ {}fps (desktop — use native bridge for real capture)",
config.width,
config.height,
config.fps,
);
Self {
latest,
_handle: None,
active,
}
}
#[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
{
let latest_for_stream = Arc::clone(&latest);
let w = config.width;
let h = config.height;
let stream = blinc_core::native_bridge::native_stream(
"camera",
"preview",
vec![
blinc_core::native_bridge::NativeValue::Int32(w as i32),
blinc_core::native_bridge::NativeValue::Int32(h as i32),
blinc_core::native_bridge::NativeValue::Int32(config.fps as i32),
blinc_core::native_bridge::NativeValue::Int32(
if config.facing == CameraFacing::Front {
0
} else {
1
},
),
],
move |data| {
if let Some(bytes) = data.as_bytes() {
let frame = Frame::from_rgba(bytes.to_vec(), w, h);
*latest_for_stream.lock().unwrap() = Some(frame);
}
},
);
Self {
latest,
_bridge_stream: Some(stream),
active,
}
}
}
pub fn latest_frame(&self) -> Option<Frame> {
self.latest.lock().unwrap().clone()
}
pub fn push_frame(&self, frame: Frame) {
*self.latest.lock().unwrap() = Some(frame);
}
pub fn is_active(&self) -> bool {
self.active.load(std::sync::atomic::Ordering::Relaxed)
}
}
impl Drop for CameraStream {
fn drop(&mut self) {
self.active
.store(false, std::sync::atomic::Ordering::Relaxed);
}
}
pub struct AudioRecorder {
latest: SharedSamples,
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
_stream: Option<cpal::Stream>,
#[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
_bridge_stream: Option<blinc_core::native_bridge::NativeStream>,
active: Arc<std::sync::atomic::AtomicBool>,
}
impl AudioRecorder {
pub fn open(config: AudioRecorderConfig) -> Self {
let latest: SharedSamples = Arc::new(Mutex::new(None));
let active = Arc::new(std::sync::atomic::AtomicBool::new(true));
#[cfg(not(any(target_os = "android", target_os = "ios", target_arch = "wasm32")))]
{
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
let host = cpal::default_host();
let device = host.default_input_device();
let stream = device.and_then(|dev| {
let stream_config = cpal::StreamConfig {
channels: config.channels,
sample_rate: config.sample_rate,
buffer_size: cpal::BufferSize::Default,
};
let latest_for_cb = Arc::clone(&latest);
let channels = config.channels;
let sample_rate = config.sample_rate;
dev.build_input_stream(
&stream_config,
move |data: &[f32], _: &cpal::InputCallbackInfo| {
let samples = AudioSamples::from_f32(data, channels, sample_rate);
*latest_for_cb.lock().unwrap() = Some(samples);
},
|err| {
tracing::error!("Audio capture error: {}", err);
},
None,
)
.ok()
});
if let Some(ref s) = stream {
let _ = s.play();
}
Self {
latest,
_stream: stream,
active,
}
}
#[cfg(any(target_os = "android", target_os = "ios", target_arch = "wasm32"))]
{
let latest_for_stream = Arc::clone(&latest);
let channels = config.channels;
let sample_rate = config.sample_rate;
let stream = blinc_core::native_bridge::native_stream(
"audio",
"record",
vec![
blinc_core::native_bridge::NativeValue::Int32(sample_rate as i32),
blinc_core::native_bridge::NativeValue::Int32(channels as i32),
],
move |data| {
if let Some(bytes) = data.as_bytes() {
let float_samples: Vec<f32> = bytes
.chunks_exact(4)
.map(|b| f32::from_le_bytes([b[0], b[1], b[2], b[3]]))
.collect();
let samples = AudioSamples::from_f32(&float_samples, channels, sample_rate);
*latest_for_stream.lock().unwrap() = Some(samples);
}
},
);
Self {
latest,
_bridge_stream: Some(stream),
active,
}
}
}
pub fn latest_samples(&self) -> Option<AudioSamples> {
self.latest.lock().unwrap().clone()
}
pub fn push_samples(&self, samples: AudioSamples) {
*self.latest.lock().unwrap() = Some(samples);
}
pub fn is_active(&self) -> bool {
self.active.load(std::sync::atomic::Ordering::Relaxed)
}
}
impl Drop for AudioRecorder {
fn drop(&mut self) {
self.active
.store(false, std::sync::atomic::Ordering::Relaxed);
}
}