use crate::texture::TextureManager;
use anyhow::{anyhow, Result};
use gst::prelude::*;
use gstreamer as gst;
use gstreamer_app as gst_app;
use gstreamer_video as gst_video;
use log::{debug, info};
use std::sync::{Arc, Mutex};
use std::time::Instant;
use wgpu;
pub struct WebcamTextureManager {
texture_manager: TextureManager,
pipeline: gst::Pipeline,
appsink: gst_app::AppSink,
dimensions: (u32, u32),
is_active: Arc<Mutex<bool>>,
last_update: Instant,
current_frame: Arc<Mutex<Option<image::RgbaImage>>>,
texture_initialized: bool,
frame_count: usize,
device_name: String,
}
impl WebcamTextureManager {
pub fn new(
device: &wgpu::Device,
queue: &wgpu::Queue,
bind_group_layout: &wgpu::BindGroupLayout,
device_index: Option<u32>,
) -> Result<Self> {
let default_image = image::RgbaImage::new(1, 1);
let texture_manager = TextureManager::new(device, queue, &default_image, bind_group_layout);
let device_name = device_index
.map(|i| format!("/dev/video{i}"))
.unwrap_or_else(|| "0".to_string());
info!("Creating webcam capture from device: {device_name}");
let pipeline = gst::Pipeline::new();
#[cfg(target_os = "linux")]
let source = gst::ElementFactory::make("v4l2src")
.name("webcam_source")
.property("device", &device_name)
.build()
.map_err(|_| anyhow!("Failed to create v4l2src element"))?;
#[cfg(target_os = "macos")]
let source = gst::ElementFactory::make("avfvideosrc")
.name("webcam_source")
.property("device-index", device_index.unwrap_or(0) as i32)
.build()
.map_err(|_| anyhow!("Failed to create avfvideosrc element"))?;
#[cfg(target_os = "windows")]
let source = gst::ElementFactory::make("ksvideosrc")
.name("webcam_source")
.property("device-index", device_index.unwrap_or(0) as i32)
.build()
.map_err(|_| anyhow!("Failed to create ksvideosrc element"))?;
let caps_filter = gst::ElementFactory::make("capsfilter")
.name("caps")
.build()
.map_err(|_| anyhow!("Failed to create capsfilter element"))?;
let caps = gst::Caps::builder("video/x-raw")
.field("width", 1280i32)
.field("height", 720i32)
.field("framerate", gst::Fraction::new(30, 1))
.build();
caps_filter.set_property("caps", &caps);
let videorate = gst::ElementFactory::make("videorate")
.name("rate")
.build()
.map_err(|_| anyhow!("Failed to create videorate element"))?;
let videoconvert = gst::ElementFactory::make("videoconvert")
.name("convert")
.build()
.map_err(|_| anyhow!("Failed to create videoconvert element"))?;
let appsink = gst::ElementFactory::make("appsink")
.name("sink")
.build()
.map_err(|_| anyhow!("Failed to create appsink element"))?;
let appsink = appsink
.dynamic_cast::<gst_app::AppSink>()
.map_err(|_| anyhow!("Failed to cast to AppSink"))?;
appsink.set_caps(Some(
&gst::Caps::builder("video/x-raw")
.field("format", gst_video::VideoFormat::Rgba.to_str())
.build(),
));
appsink.set_max_buffers(2);
appsink.set_drop(true);
appsink.set_sync(false);
pipeline
.add_many([
&source,
&caps_filter,
&videorate,
&videoconvert,
appsink.upcast_ref(),
])
.map_err(|_| anyhow!("Failed to add webcam elements to pipeline"))?;
gst::Element::link_many([
&source,
&caps_filter,
&videorate,
&videoconvert,
appsink.upcast_ref(),
])
.map_err(|_| anyhow!("Failed to link webcam elements"))?;
let current_frame = Arc::new(Mutex::new(None));
let current_frame_clone = current_frame.clone();
let is_active = Arc::new(Mutex::new(false));
appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
let sample = match sink.pull_sample() {
Ok(sample) => sample,
Err(_) => return Err(gst::FlowError::Eos),
};
let buffer = match sample.buffer() {
Some(buffer) => buffer,
_ => return Err(gst::FlowError::Error),
};
let caps = match sample.caps() {
Some(caps) => caps,
_ => return Err(gst::FlowError::Error),
};
let video_info = match gst_video::VideoInfo::from_caps(caps) {
Ok(info) => info,
Err(_) => return Err(gst::FlowError::Error),
};
let map = match buffer.map_readable() {
Ok(map) => map,
Err(_) => return Err(gst::FlowError::Error),
};
let frame_data = map.as_slice();
let width = video_info.width() as usize;
let height = video_info.height() as usize;
let mut rgba_image = image::RgbaImage::new(width as u32, height as u32);
let stride = video_info.stride()[0] as usize;
for y in 0..height {
let src_start = y * stride;
let src_end = src_start + width * 4;
let dst_start = y * width * 4;
let dst_end = dst_start + width * 4;
let dst_buffer = rgba_image.as_mut();
if src_end <= frame_data.len() && dst_end <= dst_buffer.len() {
dst_buffer[dst_start..dst_end]
.copy_from_slice(&frame_data[src_start..src_end]);
}
}
if let Ok(mut frame_lock) = current_frame_clone.lock() {
*frame_lock = Some(rgba_image);
}
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
let webcam_texture = Self {
texture_manager,
pipeline,
appsink,
dimensions: (1280, 720),
is_active,
last_update: Instant::now(),
current_frame,
texture_initialized: false,
frame_count: 0,
device_name,
};
info!("Webcam texture manager created successfully");
Ok(webcam_texture)
}
pub fn start(&mut self) -> Result<()> {
info!("Starting webcam capture");
match self.pipeline.set_state(gst::State::Playing) {
Ok(_) => {
*self.is_active.lock().unwrap() = true;
std::thread::sleep(std::time::Duration::from_millis(100));
if let Some(pad) = self.appsink.static_pad("sink") {
if let Some(caps) = pad.current_caps() {
if let Some(s) = caps.structure(0) {
if let (Ok(width), Ok(height)) =
(s.get::<i32>("width"), s.get::<i32>("height"))
{
self.dimensions = (width as u32, height as u32);
info!("Webcam dimensions: {width}x{height}");
}
}
}
}
Ok(())
}
Err(e) => Err(anyhow!("Failed to start webcam: {:?}", e)),
}
}
pub fn stop(&mut self) -> Result<()> {
info!("Stopping webcam capture");
match self.pipeline.set_state(gst::State::Null) {
Ok(_) => {
*self.is_active.lock().unwrap() = false;
Ok(())
}
Err(e) => Err(anyhow!("Failed to stop webcam: {:?}", e)),
}
}
pub fn texture_manager(&self) -> &TextureManager {
&self.texture_manager
}
pub fn update_texture(
&mut self,
device: &wgpu::Device,
queue: &wgpu::Queue,
bind_group_layout: &wgpu::BindGroupLayout,
) -> Result<bool> {
if !*self.is_active.lock().unwrap() {
return Ok(false);
}
let frame_to_process = {
let mut frame_lock = self.current_frame.lock().unwrap();
frame_lock.take()
};
if let Some(frame) = frame_to_process {
self.frame_count += 1;
let width = frame.width();
let height = frame.height();
if self.frame_count % 60 == 0 {
debug!(
"Processing webcam frame #{} (dimensions: {}x{})",
self.frame_count, width, height
);
}
let should_recreate = !self.texture_initialized
|| self.dimensions != (width, height)
|| self.dimensions.0 <= 1
|| self.dimensions.1 <= 1
|| self.frame_count <= 3;
if should_recreate {
info!("Creating new webcam texture with dimensions: {width}x{height}");
let new_texture_manager =
TextureManager::new(device, queue, &frame, bind_group_layout);
self.texture_manager = new_texture_manager;
self.dimensions = (width, height);
self.texture_initialized = true;
} else {
self.texture_manager.update(queue, &frame);
}
self.last_update = Instant::now();
Ok(true)
} else {
Ok(false)
}
}
pub fn dimensions(&self) -> (u32, u32) {
self.dimensions
}
pub fn is_active(&self) -> bool {
*self.is_active.lock().unwrap()
}
pub fn device_name(&self) -> &str {
&self.device_name
}
pub fn list_devices() -> Vec<String> {
let mut devices = Vec::new();
#[cfg(target_os = "linux")]
{
for i in 0..10 {
let device_path = format!("/dev/video{}", i);
if std::path::Path::new(&device_path).exists() {
devices.push(device_path);
}
}
}
#[cfg(target_os = "macos")]
{
for i in 0..5 {
devices.push(format!("Camera {i}"));
}
}
#[cfg(target_os = "windows")]
{
for i in 0..5 {
devices.push(format!("Camera {}", i));
}
}
if devices.is_empty() {
devices.push("Default Camera".to_string());
}
devices
}
}
impl Drop for WebcamTextureManager {
fn drop(&mut self) {
info!("Shutting down webcam pipeline");
let _ = self.pipeline.set_state(gst::State::Null);
}
}