use std::time::Instant;
use async_trait::async_trait;
use nokhwa::{Camera, pixel_format::RgbFormat};
use tracing::debug;
use super::types::{CameraError, CameraFormat, CameraFrame, PixelFormat};
#[async_trait]
pub trait CameraCapture: Send + Sync {
fn format(&self) -> CameraFormat;
async fn capture_frame(&mut self) -> Result<CameraFrame, CameraError>;
fn stop(&mut self);
}
pub struct NokhwaCapture {
camera: Option<Camera>,
format: CameraFormat,
start: Instant,
}
unsafe impl Send for NokhwaCapture {}
unsafe impl Sync for NokhwaCapture {}
impl NokhwaCapture {
pub(crate) fn new(camera: Camera, format: CameraFormat) -> Self {
Self {
camera: Some(camera),
format,
start: Instant::now(),
}
}
}
#[async_trait]
impl CameraCapture for NokhwaCapture {
fn format(&self) -> CameraFormat {
self.format
}
async fn capture_frame(&mut self) -> Result<CameraFrame, CameraError> {
let camera = self
.camera
.as_mut()
.ok_or_else(|| CameraError::CaptureFailed("camera stream has been stopped".into()))?;
let elapsed_ms = self.start.elapsed().as_millis() as u64;
let fmt = self.format;
tokio::task::block_in_place(|| {
let buf = camera
.frame()
.map_err(|e| CameraError::CaptureFailed(e.to_string()))?;
let decoded = buf
.decode_image::<RgbFormat>()
.map_err(|e| CameraError::CaptureFailed(format!("decode failed: {e}")))?;
let (width, height) = decoded.dimensions();
let data = decoded.into_raw();
debug!(
"Captured frame {width}x{height} ({} bytes) fmt={fmt:?}",
data.len()
);
Ok(CameraFrame {
width,
height,
format: PixelFormat::Rgb, data,
timestamp_ms: elapsed_ms,
})
})
}
fn stop(&mut self) {
if let Some(mut cam) = self.camera.take() {
let _ = cam.stop_stream();
}
}
}
impl Drop for NokhwaCapture {
fn drop(&mut self) {
self.stop();
}
}