use super::analyzer::{create_shared_analyzer, SharedAnalyzer, SpectrumData};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
#[cfg(not(target_os = "windows"))]
use cpal::BufferSize;
use cpal::{Device, Stream, StreamConfig};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
pub struct AudioCaptureManager {
_stream: Stream,
analyzer: SharedAnalyzer,
active: Arc<AtomicBool>,
}
impl AudioCaptureManager {
pub fn new() -> Option<Self> {
let host = cpal::default_host();
let device = Self::find_loopback_device(&host)?;
let _device_name = device
.description()
.ok()
.map(|description| description.name().to_string());
let config = Self::get_compatible_config(&device)?;
let analyzer = create_shared_analyzer();
let active = Arc::new(AtomicBool::new(true));
let stream = Self::build_stream(&device, &config, analyzer.clone(), active.clone())?;
if stream.play().is_err() {
return None;
}
Some(Self {
_stream: stream,
analyzer,
active,
})
}
pub fn get_spectrum(&self) -> Option<SpectrumData> {
if !self.active.load(Ordering::Relaxed) {
return None;
}
if let Ok(mut analyzer) = self.analyzer.lock() {
Some(analyzer.process())
} else {
None
}
}
pub fn is_active(&self) -> bool {
self.active.load(Ordering::Relaxed)
}
fn find_loopback_device(host: &cpal::Host) -> Option<Device> {
#[cfg(target_os = "windows")]
{
host.default_output_device()
}
#[cfg(target_os = "linux")]
{
if let Ok(devices) = host.input_devices() {
let mut monitors: Vec<Device> = Vec::new();
for device in devices {
if let Ok(description) = device.description() {
let name_lower = description.name().to_lowercase();
if name_lower.contains("monitor") {
monitors.push(device);
}
}
}
monitors.sort_by_key(|d| {
let name_lower = d
.description()
.map(|description| description.name().to_lowercase())
.unwrap_or_default();
if name_lower.contains("bluez") || name_lower.contains("bluetooth") {
return 0; }
if name_lower.contains("speaker") || name_lower.contains("analog") {
return 1; }
if name_lower.contains("hdmi") {
return 3; }
2 });
if let Some(device) = monitors.into_iter().next() {
return Some(device);
}
}
if let Some(device) = host.default_input_device() {
return Some(device);
}
None
}
#[cfg(target_os = "macos")]
{
if let Some(device) = host.default_input_device() {
return Some(device);
}
}
#[cfg(not(any(target_os = "windows", target_os = "linux", target_os = "macos")))]
{
host.default_input_device()
}
#[cfg(target_os = "macos")]
None
}
fn get_compatible_config(device: &Device) -> Option<StreamConfig> {
#[cfg(target_os = "windows")]
{
if let Ok(config) = device.default_output_config() {
return Some(config.into());
}
}
#[cfg(not(target_os = "windows"))]
{
if let Ok(config) = device.default_input_config() {
let stream_config = StreamConfig {
channels: config.channels(),
sample_rate: config.sample_rate(),
buffer_size: BufferSize::Default,
};
return Some(stream_config);
}
}
None
}
fn build_stream(
device: &Device,
config: &StreamConfig,
analyzer: SharedAnalyzer,
active: Arc<AtomicBool>,
) -> Option<Stream> {
let channels = config.channels as usize;
let active_clone = active.clone();
let data_callback = move |data: &[f32], _: &cpal::InputCallbackInfo| {
let mono_samples: Vec<f32> = data
.chunks(channels)
.map(|frame| frame.iter().sum::<f32>() / channels as f32)
.collect();
if let Ok(mut analyzer) = analyzer.lock() {
analyzer.push_samples(&mono_samples);
}
};
let error_callback = move |_err: cpal::StreamError| {
active_clone.store(false, Ordering::Relaxed);
};
device
.build_input_stream(config, data_callback, error_callback, None)
.ok()
}
}
impl Drop for AudioCaptureManager {
fn drop(&mut self) {
self.active.store(false, Ordering::Relaxed);
}
}