pub mod engine;
use std::{
sync::{Arc, mpsc},
time::SystemTime,
};
use engine::{ChannelItem, Engine, EngineError, ProcessingError};
use crate::{
capturer::Options,
frame::{AudioFrame, FrameType},
has_permission, is_supported,
};
pub use crate::capturer::{Area, Point, Resolution, Size};
pub enum GpuFrame {
Video(GpuVideoFrame),
Audio(AudioFrame),
}
pub struct GpuVideoFrame {
texture: wgpu::Texture,
format: wgpu::TextureFormat,
size: [u32; 2],
display_time: SystemTime,
}
impl GpuVideoFrame {
pub fn texture(&self) -> &wgpu::Texture {
&self.texture
}
pub fn into_texture(self) -> wgpu::Texture {
self.texture
}
pub fn format(&self) -> wgpu::TextureFormat {
self.format
}
pub fn size(&self) -> [u32; 2] {
self.size
}
pub fn display_time(&self) -> SystemTime {
self.display_time
}
pub fn create_view(&self, desc: &wgpu::TextureViewDescriptor) -> wgpu::TextureView {
self.texture.create_view(desc)
}
pub fn create_default_view(&self) -> wgpu::TextureView {
self.texture
.create_view(&wgpu::TextureViewDescriptor::default())
}
}
impl std::fmt::Debug for GpuVideoFrame {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GpuVideoFrame")
.field("size", &self.size)
.field("format", &self.format)
.field("display_time", &self.display_time)
.finish_non_exhaustive()
}
}
#[derive(thiserror::Error, Debug)]
pub enum GPUCapturerBuildError {
#[error("screen capturing is not supported on this platform")]
NotSupported,
#[error("permission to capture the screen is not granted")]
PermissionNotGranted,
#[error("GPU capture currently requires BGRA output frames")]
UnsupportedOutputType,
#[error("GPU capture engine is unavailable: {0}")]
Engine(&'static str),
#[error(transparent)]
Internal(#[from] EngineError),
}
#[derive(thiserror::Error, Debug)]
pub enum GPUFrameError {
#[error(transparent)]
Recv(#[from] mpsc::RecvError),
#[error(transparent)]
Processing(#[from] ProcessingError),
}
#[derive(thiserror::Error, Debug)]
pub enum GPUFrameTryError {
#[error(transparent)]
Channel(#[from] mpsc::RecvError),
#[error(transparent)]
Processing(#[from] ProcessingError),
}
pub struct GPUCapturer {
engine: Engine,
rx: mpsc::Receiver<ChannelItem>,
}
impl GPUCapturer {
pub fn build(
options: Options,
device: Arc<wgpu::Device>,
queue: Arc<wgpu::Queue>,
) -> Result<GPUCapturer, GPUCapturerBuildError> {
if !is_supported() {
return Err(GPUCapturerBuildError::NotSupported);
}
if !has_permission() {
return Err(GPUCapturerBuildError::PermissionNotGranted);
}
if !matches!(options.output_type, FrameType::BGRAFrame) {
return Err(GPUCapturerBuildError::UnsupportedOutputType);
}
let (tx, rx) = mpsc::channel();
let engine = match Engine::new(&options, device, queue, tx) {
Ok(engine) => engine,
Err(EngineError::Unsupported) => {
return Err(GPUCapturerBuildError::Engine(
"GPU capture is not available for this platform",
));
}
Err(other) => return Err(GPUCapturerBuildError::from(other)),
};
Ok(GPUCapturer { engine, rx })
}
pub fn start_capture(&mut self) {
self.engine.start();
}
pub fn stop_capture(&mut self) {
self.engine.stop();
}
pub fn get_next_frame(&self) -> Result<GpuFrame, GPUFrameError> {
loop {
let item = self.rx.recv()?;
match self.engine.process_channel_item(item) {
Ok(Some(frame)) => return Ok(frame),
Ok(None) => continue,
Err(err) => return Err(err.into()),
}
}
}
pub fn try_get_next_frame(&self) -> Result<Option<GpuFrame>, GPUFrameTryError> {
loop {
match self.rx.try_recv() {
Ok(item) => match self.engine.process_channel_item(item) {
Ok(Some(frame)) => return Ok(Some(frame)),
Ok(None) => continue,
Err(err) => return Err(err.into()),
},
Err(mpsc::TryRecvError::Empty) => return Ok(None),
Err(mpsc::TryRecvError::Disconnected) => {
return Err(GPUFrameTryError::Channel(mpsc::RecvError));
}
}
}
}
pub fn get_output_frame_size(&mut self) -> [u32; 2] {
self.engine.get_output_frame_size()
}
}
impl From<GpuVideoFrame> for wgpu::Texture {
fn from(value: GpuVideoFrame) -> Self {
value.texture
}
}