use std::time::Duration;
use crate::image::temporal::{TemporalCoherence, TemporalConfig};
use crate::image::{ColorMode, DitheringMethod};
use crate::{BrailleGrid, DotmaxError, Result};
use super::MediaPlayer;
extern crate ffmpeg_next as ffmpeg;
use ffmpeg::format::Pixel;
use ffmpeg::media::Type;
use ffmpeg::software::scaling::{context::Context as ScalingContext, flag::Flags};
use ffmpeg::util::frame::video::Video as VideoFrame;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct WebcamDevice {
pub id: String,
pub name: String,
pub description: String,
}
impl WebcamDevice {
#[must_use]
pub fn new(id: impl Into<String>, name: impl Into<String>, description: impl Into<String>) -> Self {
Self {
id: id.into(),
name: name.into(),
description: description.into(),
}
}
}
#[derive(Debug, Clone, Default)]
pub enum WebcamDeviceId {
#[default]
Default,
Index(usize),
Path(String),
}
impl From<usize> for WebcamDeviceId {
fn from(index: usize) -> Self {
Self::Index(index)
}
}
impl From<&str> for WebcamDeviceId {
fn from(path: &str) -> Self {
Self::Path(path.to_string())
}
}
impl From<String> for WebcamDeviceId {
fn from(path: String) -> Self {
Self::Path(path)
}
}
impl std::fmt::Display for WebcamDeviceId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Default => write!(f, "default"),
Self::Index(i) => write!(f, "index:{i}"),
Self::Path(p) => write!(f, "{p}"),
}
}
}
#[must_use]
pub fn list_webcams() -> Vec<WebcamDevice> {
#[cfg(target_os = "linux")]
{
list_webcams_linux()
}
#[cfg(target_os = "macos")]
{
list_webcams_macos()
}
#[cfg(target_os = "windows")]
{
list_webcams_windows()
}
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
{
vec![]
}
}
#[cfg(target_os = "linux")]
fn list_webcams_linux() -> Vec<WebcamDevice> {
use std::fs;
let mut devices = Vec::new();
if let Ok(entries) = fs::read_dir("/dev") {
for entry in entries.flatten() {
let path = entry.path();
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
if name.starts_with("video") {
let device_path = path.to_string_lossy().to_string();
let device_name = get_v4l2_device_name(&device_path)
.unwrap_or_else(|| name.to_string());
devices.push(WebcamDevice {
id: device_path.clone(),
name: device_name,
description: format!("V4L2 device at {device_path}"),
});
}
}
}
}
devices.sort_by(|a, b| a.id.cmp(&b.id));
devices
}
#[cfg(target_os = "linux")]
fn get_v4l2_device_name(device_path: &str) -> Option<String> {
use std::fs;
let device_name = device_path.strip_prefix("/dev/")?;
let sysfs_path = format!("/sys/class/video4linux/{device_name}/name");
fs::read_to_string(sysfs_path)
.ok()
.map(|s| s.trim().to_string())
}
#[cfg(target_os = "macos")]
fn list_webcams_macos() -> Vec<WebcamDevice> {
use std::process::Command;
let output = Command::new("ffmpeg")
.args(["-f", "avfoundation", "-list_devices", "true", "-i", ""])
.output();
let stderr = match output {
Ok(out) => String::from_utf8_lossy(&out.stderr).to_string(),
Err(_) => return vec![],
};
parse_avfoundation_device_list(&stderr)
}
#[cfg(target_os = "macos")]
fn parse_avfoundation_device_list(output: &str) -> Vec<WebcamDevice> {
let mut devices = Vec::new();
let mut in_video_section = false;
for line in output.lines() {
if line.contains("AVFoundation video devices") {
in_video_section = true;
continue;
}
if line.contains("AVFoundation audio devices") {
break;
}
if !in_video_section {
continue;
}
if let Some(bracket_start) = line.find('[') {
if let Some(bracket_end) = line.find(']') {
let inside_bracket = &line[bracket_start + 1..bracket_end];
if let Ok(index) = inside_bracket.parse::<usize>() {
let name = line[bracket_end + 1..].trim().to_string();
if !name.is_empty() {
devices.push(WebcamDevice {
id: index.to_string(),
name: name.clone(),
description: format!("AVFoundation device {index}"),
});
}
}
}
}
}
devices
}
#[cfg(target_os = "windows")]
fn list_webcams_windows() -> Vec<WebcamDevice> {
use std::os::windows::process::CommandExt;
use std::process::{Command, Stdio};
const CREATE_NO_WINDOW: u32 = 0x08000000;
let output = Command::new("ffmpeg")
.args(["-list_devices", "true", "-f", "dshow", "-i", "dummy"])
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.creation_flags(CREATE_NO_WINDOW)
.output();
match output {
Ok(out) => {
let stderr = String::from_utf8_lossy(&out.stderr).to_string();
tracing::debug!("FFmpeg dshow output ({} bytes): {}", stderr.len(),
if stderr.len() > 200 { &stderr[..200] } else { &stderr });
parse_dshow_device_list(&stderr)
}
Err(e) => {
tracing::debug!("Failed to run ffmpeg for device enumeration: {}", e);
vec![]
}
}
}
#[cfg(target_os = "windows")]
fn parse_dshow_device_list(output: &str) -> Vec<WebcamDevice> {
let mut devices = Vec::new();
for line in output.lines() {
if line.contains("Alternative name") {
continue;
}
if !line.contains("(video)") && !line.contains("(none)") {
continue;
}
if let Some(start) = line.find('"') {
if let Some(end) = line.rfind('"') {
if end > start {
let name = line[start + 1..end].to_string();
devices.push(WebcamDevice {
id: format!("video={}", name),
name: name.clone(),
description: "DirectShow video device".to_string(),
});
}
}
}
}
devices
}
struct SendableScaler(ScalingContext);
#[allow(clippy::non_send_fields_in_send_ty)]
unsafe impl Send for SendableScaler {}
pub struct WebcamPlayer {
device_id: String,
input_context: ffmpeg::format::context::Input,
video_stream_index: usize,
decoder: ffmpeg::decoder::Video,
scaler: SendableScaler,
width: u32,
height: u32,
fps: f64,
terminal_width: usize,
terminal_height: usize,
decoded_frame: VideoFrame,
rgb_frame: VideoFrame,
rgb_buffer: Vec<u8>,
dithering: DitheringMethod,
threshold: Option<u8>,
brightness: f32,
contrast: f32,
gamma: f32,
color_mode: ColorMode,
temporal_coherence: TemporalCoherence,
}
impl std::fmt::Debug for WebcamPlayer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("WebcamPlayer")
.field("device_id", &self.device_id)
.field("width", &self.width)
.field("height", &self.height)
.field("fps", &self.fps)
.field("dithering", &self.dithering)
.field("threshold", &self.threshold)
.field("brightness", &self.brightness)
.field("contrast", &self.contrast)
.field("gamma", &self.gamma)
.field("color_mode", &self.color_mode)
.finish_non_exhaustive()
}
}
impl WebcamPlayer {
pub fn new() -> Result<Self> {
Self::from_device(WebcamDeviceId::Default)
}
pub fn from_device(device: impl Into<WebcamDeviceId>) -> Result<Self> {
let device_id = device.into();
Self::open_device(device_id, None, None, None)
}
#[must_use]
pub fn builder() -> WebcamPlayerBuilder {
WebcamPlayerBuilder::new()
}
fn open_device(
device_id: WebcamDeviceId,
requested_resolution: Option<(u32, u32)>,
requested_fps: Option<u32>,
render_settings: Option<RenderSettings>,
) -> Result<Self> {
let device_str = device_id.to_string();
let (device_url, input_format) = build_device_url(&device_id)?;
tracing::debug!("Opening device URL: {} with format: {}", device_url, input_format);
ffmpeg::init().map_err(|e| DotmaxError::WebcamError {
device: device_str.clone(),
message: format!("FFmpeg initialization failed: {e}"),
})?;
let mut options = ffmpeg::Dictionary::new();
let (term_width, term_height) = crossterm::terminal::size()
.map(|(w, h)| (w as u32, h as u32))
.unwrap_or((80, 24));
let needed_width = term_width * 2;
let needed_height = term_height * 4;
let optimal_resolution = if let Some((w, h)) = requested_resolution {
(w, h)
} else {
(320, 240)
};
options.set("video_size", &format!("{}x{}", optimal_resolution.0, optimal_resolution.1));
tracing::info!(
"Terminal {}x{} needs {}x{} pixels, requesting {}x{} capture",
term_width, term_height, needed_width, needed_height,
optimal_resolution.0, optimal_resolution.1
);
let target_fps = requested_fps.unwrap_or(30);
options.set("framerate", &target_fps.to_string());
#[cfg(target_os = "linux")]
{
options.set("input_format", "mjpeg"); }
#[cfg(target_os = "windows")]
{
options.set("vcodec", "mjpeg");
options.set("rtbufsize", "10M");
options.set("fflags", "nobuffer");
options.set("flags", "low_delay");
}
let format = ffmpeg::device::input::video()
.find(|f| f.name() == input_format)
.ok_or_else(|| {
DotmaxError::WebcamError {
device: device_str.clone(),
message: format!("Input format '{}' not found - FFmpeg may not support webcam capture on this platform", input_format),
}
})?;
let context = ffmpeg::format::open_with(&device_url, &format, options)
.map_err(|e| map_ffmpeg_error(&device_str, e))?;
let input_context = match context {
ffmpeg::format::context::Context::Input(input) => input,
_ => {
return Err(DotmaxError::WebcamError {
device: device_str,
message: "Unexpected output context when opening webcam".to_string(),
});
}
};
let video_stream = input_context
.streams()
.best(Type::Video)
.ok_or_else(|| DotmaxError::WebcamError {
device: device_str.clone(),
message: "No video stream found from webcam".to_string(),
})?;
let video_stream_index = video_stream.index();
let codec_params = video_stream.parameters();
let context = ffmpeg::codec::context::Context::from_parameters(codec_params)
.map_err(|e| DotmaxError::WebcamError {
device: device_str.clone(),
message: format!("Failed to create codec context: {e}"),
})?;
let decoder = context.decoder().video().map_err(|e| DotmaxError::WebcamError {
device: device_str.clone(),
message: format!("Failed to create video decoder: {e}"),
})?;
let width = decoder.width();
let height = decoder.height();
let fps = video_stream.avg_frame_rate();
let fps = if fps.denominator() != 0 {
f64::from(fps.numerator()) / f64::from(fps.denominator())
} else {
30.0 };
let (terminal_width, terminal_height) = crossterm::terminal::size()
.map(|(w, h)| (w as usize, h as usize))
.unwrap_or((80, 24));
let target_pixel_width = (terminal_width * 2) as u32;
let target_pixel_height = (terminal_height * 4) as u32;
let scaler = SendableScaler(
ScalingContext::get(
decoder.format(),
width,
height,
Pixel::RGB24,
target_pixel_width,
target_pixel_height,
Flags::BILINEAR,
)
.map_err(|e| DotmaxError::WebcamError {
device: device_str.clone(),
message: format!("Failed to create scaler: {e}"),
})?,
);
tracing::info!(
"Opened webcam: {}, {}x{} @ {:.2} fps",
device_str,
width,
height,
fps
);
let rgb_buffer_size = (target_pixel_width * target_pixel_height * 3) as usize;
let settings = render_settings.unwrap_or_default();
Ok(Self {
device_id: device_str,
input_context,
video_stream_index,
decoder,
scaler,
width,
height,
fps,
terminal_width,
terminal_height,
decoded_frame: VideoFrame::empty(),
rgb_frame: VideoFrame::empty(),
rgb_buffer: vec![0u8; rgb_buffer_size],
dithering: settings.dithering,
threshold: settings.threshold,
brightness: settings.brightness,
contrast: settings.contrast,
gamma: settings.gamma,
color_mode: settings.color_mode,
temporal_coherence: TemporalCoherence::new(TemporalConfig::webcam()),
})
}
#[must_use]
pub const fn width(&self) -> u32 {
self.width
}
#[must_use]
pub const fn height(&self) -> u32 {
self.height
}
#[must_use]
pub const fn fps(&self) -> f64 {
self.fps
}
#[must_use]
pub fn device_id(&self) -> &str {
&self.device_id
}
#[must_use]
pub const fn dithering(mut self, method: DitheringMethod) -> Self {
self.dithering = method;
self
}
#[must_use]
pub const fn threshold(mut self, threshold: Option<u8>) -> Self {
self.threshold = threshold;
self
}
#[must_use]
pub const fn brightness(mut self, brightness: f32) -> Self {
self.brightness = brightness;
self
}
#[must_use]
pub const fn contrast(mut self, contrast: f32) -> Self {
self.contrast = contrast;
self
}
#[must_use]
pub const fn gamma(mut self, gamma: f32) -> Self {
self.gamma = gamma;
self
}
#[must_use]
pub const fn color_mode(mut self, mode: ColorMode) -> Self {
self.color_mode = mode;
self
}
#[must_use]
pub const fn get_dithering(&self) -> DitheringMethod {
self.dithering
}
#[must_use]
pub const fn get_threshold(&self) -> Option<u8> {
self.threshold
}
#[must_use]
pub const fn get_brightness(&self) -> f32 {
self.brightness
}
#[must_use]
pub const fn get_contrast(&self) -> f32 {
self.contrast
}
#[must_use]
pub const fn get_gamma(&self) -> f32 {
self.gamma
}
#[must_use]
pub const fn get_color_mode(&self) -> ColorMode {
self.color_mode
}
pub fn set_dithering(&mut self, method: DitheringMethod) {
self.dithering = method;
}
pub fn set_threshold(&mut self, threshold: Option<u8>) {
self.threshold = threshold;
}
pub fn set_brightness(&mut self, brightness: f32) {
self.brightness = brightness;
}
pub fn set_contrast(&mut self, contrast: f32) {
self.contrast = contrast;
}
pub fn set_gamma(&mut self, gamma: f32) {
self.gamma = gamma;
}
pub fn set_color_mode(&mut self, mode: ColorMode) {
self.color_mode = mode;
}
#[must_use]
pub fn temporal_config(&self) -> &TemporalConfig {
self.temporal_coherence.config()
}
pub fn set_temporal_config(&mut self, config: TemporalConfig) {
self.temporal_coherence.set_config(config);
}
pub fn reset_temporal_state(&mut self) {
self.temporal_coherence.reset();
}
fn decode_next_frame(&mut self) -> Option<Result<()>> {
let mut got_frame = false;
loop {
match self.decoder.receive_frame(&mut self.decoded_frame) {
Ok(()) => {
got_frame = true;
continue;
}
Err(ffmpeg::Error::Other { errno }) if errno == ffmpeg::error::EAGAIN => {
break;
}
Err(e) => {
return Some(Err(DotmaxError::WebcamError {
device: self.device_id.clone(),
message: format!("Frame decode error: {e}"),
}));
}
}
}
if got_frame {
return Some(Ok(()));
}
loop {
let mut found_video_packet = false;
for (stream, packet) in self.input_context.packets() {
if stream.index() == self.video_stream_index {
if let Err(e) = self.decoder.send_packet(&packet) {
tracing::warn!("Error sending packet to decoder: {}", e);
}
found_video_packet = true;
break;
}
}
if !found_video_packet {
return Some(Err(DotmaxError::WebcamError {
device: self.device_id.clone(),
message: "Webcam stream ended unexpectedly".to_string(),
}));
}
match self.decoder.receive_frame(&mut self.decoded_frame) {
Ok(()) => {
return Some(Ok(()));
}
Err(ffmpeg::Error::Other { errno }) if errno == ffmpeg::error::EAGAIN => {
continue;
}
Err(e) => {
return Some(Err(DotmaxError::WebcamError {
device: self.device_id.clone(),
message: format!("Frame decode error: {e}"),
}));
}
}
}
}
fn frame_to_grid(&mut self) -> Result<BrailleGrid> {
use crate::image::{
apply_dithering, apply_dithering_with_custom_threshold, apply_threshold,
pixels_to_braille, render_image_with_color, DitheringMethod,
};
self.scaler
.0
.run(&self.decoded_frame, &mut self.rgb_frame)
.map_err(|e| DotmaxError::WebcamError {
device: self.device_id.clone(),
message: format!("Frame scaling error: {e}"),
})?;
let data = self.rgb_frame.data(0);
let stride = self.rgb_frame.stride(0);
let target_width = (self.terminal_width * 2) as u32;
let target_height = (self.terminal_height * 4) as u32;
let pixel_count = (target_width * target_height) as usize;
if self.color_mode != crate::image::ColorMode::Monochrome {
let expected_size = pixel_count * 3;
if self.rgb_buffer.len() != expected_size {
self.rgb_buffer.resize(expected_size, 0);
}
let mut offset = 0;
for y in 0..target_height {
let row_start = (y as usize) * stride;
let row_len = (target_width as usize) * 3;
self.rgb_buffer[offset..offset + row_len]
.copy_from_slice(&data[row_start..row_start + row_len]);
offset += row_len;
}
let buffer = std::mem::take(&mut self.rgb_buffer);
let img = image::RgbImage::from_raw(target_width, target_height, buffer)
.ok_or_else(|| DotmaxError::WebcamError {
device: self.device_id.clone(),
message: "Failed to create image from frame data".to_string(),
})?;
let dynamic_img = image::DynamicImage::ImageRgb8(img);
let grid = render_image_with_color(
&dynamic_img,
self.color_mode,
self.terminal_width,
self.terminal_height,
self.dithering,
self.threshold,
self.brightness,
self.contrast,
self.gamma,
)?;
if let image::DynamicImage::ImageRgb8(rgb) = dynamic_img {
self.rgb_buffer = rgb.into_raw();
}
return Ok(grid);
}
if self.rgb_buffer.len() != pixel_count {
self.rgb_buffer.resize(pixel_count, 0);
}
let apply_brightness = (self.brightness - 1.0).abs() > f32::EPSILON;
let apply_contrast = (self.contrast - 1.0).abs() > f32::EPSILON;
let apply_gamma = (self.gamma - 1.0).abs() > f32::EPSILON;
let inv_gamma = if apply_gamma { 1.0 / self.gamma } else { 1.0 };
for y in 0..target_height as usize {
let row_start = y * stride;
let out_row_start = y * (target_width as usize);
for x in 0..target_width as usize {
let px_offset = row_start + x * 3;
let r = data[px_offset] as f32;
let g = data[px_offset + 1] as f32;
let b = data[px_offset + 2] as f32;
let mut luma = 0.114f32.mul_add(b, 0.299f32.mul_add(r, 0.587 * g));
if apply_brightness {
luma *= self.brightness;
}
if apply_contrast {
luma = self.contrast.mul_add(luma - 128.0, 128.0);
}
if apply_gamma {
luma = 255.0 * (luma / 255.0).powf(inv_gamma);
}
self.rgb_buffer[out_row_start + x] = luma.clamp(0.0, 255.0) as u8;
}
}
let gray_buffer = std::mem::take(&mut self.rgb_buffer);
let gray = image::GrayImage::from_raw(target_width, target_height, gray_buffer)
.ok_or_else(|| DotmaxError::WebcamError {
device: self.device_id.clone(),
message: "Failed to create grayscale image".to_string(),
})?;
let binary = if self.dithering == DitheringMethod::None {
let threshold_val = self.threshold.unwrap_or_else(|| {
crate::image::otsu_threshold(&gray)
});
apply_threshold(&gray, threshold_val)
} else if let Some(t) = self.threshold {
apply_dithering_with_custom_threshold(&gray, self.dithering, Some(t))?
} else {
apply_dithering(&gray, self.dithering)?
};
self.rgb_buffer = gray.into_raw();
let grid = pixels_to_braille(&binary, self.terminal_width, self.terminal_height)?;
Ok(grid)
}
fn frame_delay(&self) -> Duration {
if self.fps > 0.0 {
Duration::from_secs_f64(1.0 / self.fps)
} else {
Duration::from_millis(33) }
}
}
impl MediaPlayer for WebcamPlayer {
fn next_frame(&mut self) -> Option<Result<(BrailleGrid, Duration)>> {
match self.decode_next_frame() {
Some(Ok(())) => {}
Some(Err(e)) => return Some(Err(e)),
None => return None,
}
let grid = match self.frame_to_grid() {
Ok(g) => g,
Err(e) => return Some(Err(e)),
};
let delay = self.frame_delay();
Some(Ok((grid, delay)))
}
fn reset(&mut self) {
tracing::debug!("WebcamPlayer::reset() called - no-op for live streams");
}
fn frame_count(&self) -> Option<usize> {
None }
fn loop_count(&self) -> Option<u16> {
Some(0) }
fn handle_resize(&mut self, width: usize, height: usize) {
if self.terminal_width == width && self.terminal_height == height {
return;
}
self.terminal_width = width;
self.terminal_height = height;
let target_pixel_width = (width * 2) as u32;
let target_pixel_height = (height * 4) as u32;
match ScalingContext::get(
self.decoder.format(),
self.width,
self.height,
Pixel::RGB24,
target_pixel_width,
target_pixel_height,
Flags::BILINEAR,
) {
Ok(new_scaler) => {
self.scaler = SendableScaler(new_scaler);
let rgb_buffer_size = (target_pixel_width * target_pixel_height * 3) as usize;
self.rgb_buffer.resize(rgb_buffer_size, 0);
tracing::debug!("WebcamPlayer resized to {}x{}", width, height);
}
Err(e) => {
tracing::warn!("Failed to resize webcam scaler: {}", e);
}
}
}
}
#[derive(Debug, Clone)]
struct RenderSettings {
dithering: DitheringMethod,
threshold: Option<u8>,
brightness: f32,
contrast: f32,
gamma: f32,
color_mode: ColorMode,
}
impl Default for RenderSettings {
fn default() -> Self {
Self {
dithering: DitheringMethod::Bayer,
threshold: None,
brightness: 1.0,
contrast: 1.0,
gamma: 1.0,
color_mode: ColorMode::Monochrome,
}
}
}
#[derive(Debug, Default)]
pub struct WebcamPlayerBuilder {
device: WebcamDeviceId,
resolution: Option<(u32, u32)>,
fps: Option<u32>,
render_settings: RenderSettings,
}
impl WebcamPlayerBuilder {
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[must_use]
pub fn device(mut self, device: impl Into<WebcamDeviceId>) -> Self {
self.device = device.into();
self
}
#[must_use]
pub const fn resolution(mut self, width: u32, height: u32) -> Self {
self.resolution = Some((width, height));
self
}
#[must_use]
pub const fn fps(mut self, fps: u32) -> Self {
self.fps = Some(fps);
self
}
#[must_use]
pub const fn dithering(mut self, method: DitheringMethod) -> Self {
self.render_settings.dithering = method;
self
}
#[must_use]
pub const fn threshold(mut self, threshold: Option<u8>) -> Self {
self.render_settings.threshold = threshold;
self
}
#[must_use]
pub const fn brightness(mut self, brightness: f32) -> Self {
self.render_settings.brightness = brightness;
self
}
#[must_use]
pub const fn contrast(mut self, contrast: f32) -> Self {
self.render_settings.contrast = contrast;
self
}
#[must_use]
pub const fn gamma(mut self, gamma: f32) -> Self {
self.render_settings.gamma = gamma;
self
}
#[must_use]
pub const fn color_mode(mut self, mode: ColorMode) -> Self {
self.render_settings.color_mode = mode;
self
}
pub fn build(self) -> Result<WebcamPlayer> {
WebcamPlayer::open_device(
self.device,
self.resolution,
self.fps,
Some(self.render_settings),
)
}
}
#[allow(clippy::unnecessary_wraps)] fn build_device_url(device_id: &WebcamDeviceId) -> Result<(String, &'static str)> {
#[cfg(target_os = "linux")]
{
let device_path = match device_id {
WebcamDeviceId::Default => "/dev/video0".to_string(),
WebcamDeviceId::Index(i) => format!("/dev/video{i}"),
WebcamDeviceId::Path(p) => p.clone(),
};
Ok((device_path, "v4l2"))
}
#[cfg(target_os = "macos")]
{
let device_index = match device_id {
WebcamDeviceId::Default => "0".to_string(),
WebcamDeviceId::Index(i) => i.to_string(),
WebcamDeviceId::Path(p) => p.clone(),
};
Ok((device_index, "avfoundation"))
}
#[cfg(target_os = "windows")]
{
let device_name = match device_id {
WebcamDeviceId::Default => {
let devices = list_webcams();
if devices.is_empty() {
return Err(DotmaxError::CameraNotFound {
device: "default".to_string(),
available: vec![],
});
}
devices[0].id.clone()
}
WebcamDeviceId::Index(i) => {
let devices = list_webcams();
tracing::debug!("Windows device lookup: index={}, found {} devices", i, devices.len());
if *i >= devices.len() {
let devices_retry = list_webcams();
let available: Vec<String> = devices_retry.iter().map(|d| d.name.clone()).collect();
return Err(DotmaxError::CameraNotFound {
device: format!("index:{i}"),
available,
});
}
devices[*i].id.clone()
}
WebcamDeviceId::Path(p) => {
if p.starts_with("video=") {
p.clone()
} else {
format!("video={p}")
}
}
};
Ok((device_name, "dshow"))
}
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
{
Err(DotmaxError::WebcamError {
device: device_id.to_string(),
message: "Webcam capture not supported on this platform".to_string(),
})
}
}
fn map_ffmpeg_error(device: &str, error: ffmpeg::Error) -> DotmaxError {
let error_str = error.to_string().to_lowercase();
if error_str.contains("no such file") || error_str.contains("not found") {
let available: Vec<String> = list_webcams().iter().map(|d| d.name.clone()).collect();
return DotmaxError::CameraNotFound {
device: device.to_string(),
available,
};
}
if error_str.contains("permission") || error_str.contains("access denied") {
let hint = get_permission_hint();
return DotmaxError::CameraPermissionDenied {
device: device.to_string(),
hint,
};
}
if error_str.contains("busy") || error_str.contains("in use") || error_str.contains("device or resource busy") {
return DotmaxError::CameraInUse {
device: device.to_string(),
};
}
DotmaxError::WebcamError {
device: device.to_string(),
message: format!("Failed to open webcam: {error}"),
}
}
fn get_permission_hint() -> String {
#[cfg(target_os = "linux")]
{
"Add your user to the 'video' group: sudo usermod -aG video $USER (then log out and back in)".to_string()
}
#[cfg(target_os = "macos")]
{
"Grant camera access in System Preferences > Security & Privacy > Privacy > Camera".to_string()
}
#[cfg(target_os = "windows")]
{
"Grant camera access in Settings > Privacy > Camera".to_string()
}
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
{
"Check your system's camera permission settings".to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn _assert_webcam_player_send() {
fn assert_send<T: Send>() {}
assert_send::<WebcamPlayer>();
}
#[test]
fn test_webcam_device_new() {
let device = WebcamDevice::new("/dev/video0", "USB Camera", "Generic USB webcam");
assert_eq!(device.id, "/dev/video0");
assert_eq!(device.name, "USB Camera");
assert_eq!(device.description, "Generic USB webcam");
}
#[test]
fn test_webcam_device_id_from_index() {
let id: WebcamDeviceId = 0.into();
assert!(matches!(id, WebcamDeviceId::Index(0)));
}
#[test]
fn test_webcam_device_id_from_str() {
let id: WebcamDeviceId = "/dev/video0".into();
assert!(matches!(id, WebcamDeviceId::Path(_)));
}
#[test]
fn test_webcam_device_id_from_string() {
let id: WebcamDeviceId = String::from("/dev/video1").into();
assert!(matches!(id, WebcamDeviceId::Path(_)));
}
#[test]
fn test_webcam_device_id_display() {
assert_eq!(WebcamDeviceId::Default.to_string(), "default");
assert_eq!(WebcamDeviceId::Index(0).to_string(), "index:0");
assert_eq!(WebcamDeviceId::Path("/dev/video0".into()).to_string(), "/dev/video0");
}
#[test]
fn test_list_webcams_returns_vec() {
let devices = list_webcams();
let _ = devices.len();
}
#[test]
fn test_webcam_player_builder_chain() {
let builder = WebcamPlayerBuilder::new()
.device(0)
.resolution(1280, 720)
.fps(30)
.dithering(DitheringMethod::FloydSteinberg)
.threshold(Some(128))
.brightness(1.2)
.contrast(1.1)
.gamma(0.9)
.color_mode(ColorMode::Monochrome);
assert!(matches!(builder.device, WebcamDeviceId::Index(0)));
assert_eq!(builder.resolution, Some((1280, 720)));
assert_eq!(builder.fps, Some(30));
assert_eq!(builder.render_settings.dithering, DitheringMethod::FloydSteinberg);
assert_eq!(builder.render_settings.threshold, Some(128));
assert!((builder.render_settings.brightness - 1.2).abs() < f32::EPSILON);
}
#[test]
fn test_render_settings_default() {
let settings = RenderSettings::default();
assert_eq!(settings.dithering, DitheringMethod::Bayer);
assert_eq!(settings.threshold, None);
assert!((settings.brightness - 1.0).abs() < f32::EPSILON);
assert!((settings.contrast - 1.0).abs() < f32::EPSILON);
assert!((settings.gamma - 1.0).abs() < f32::EPSILON);
assert_eq!(settings.color_mode, ColorMode::Monochrome);
}
#[test]
#[ignore = "Requires webcam hardware"]
fn test_webcam_player_new_with_camera() {
let result = WebcamPlayer::new();
if let Ok(player) = result {
assert!(player.width() > 0);
assert!(player.height() > 0);
assert!(player.fps() > 0.0);
}
}
}