Skip to main content

wavecraft_dev_server/audio/
server.rs

1//! Audio server for full-duplex audio I/O in dev mode.
2//!
3//! This module provides an audio server that captures microphone input,
4//! processes it through a `DevAudioProcessor` (typically an `FfiProcessor`
5//! loaded from the user's cdylib), and sends the processed audio to the
6//! output device (speakers/headphones). Meter data is communicated back
7//! via a callback channel.
8//!
9//! # Architecture
10//!
11//! ```text
12//! OS Mic → cpal input callback → deinterleave → FfiProcessor::process()
13//!                                                        │
14//!                                              ┌─────────┴──────────┐
15//!                                              │                    │
16//!                                         meter compute      interleave
17//!                                              │               → SPSC ring
18//!                                              ▼                    │
19//!                                        WebSocket broadcast        │
20//!                                                                   ▼
21//!                                              cpal output callback → Speakers
22//! ```
23
24mod device_setup;
25mod input_pipeline;
26mod metering;
27mod output_routing;
28mod startup_wiring;
29
30use std::sync::Arc;
31
32use anyhow::Result;
33use cpal::{Device, Stream, StreamConfig};
34use wavecraft_processors::OscilloscopeFrameConsumer;
35use wavecraft_protocol::MeterUpdateNotification;
36
37use super::atomic_params::AtomicParameterBridge;
38use super::ffi_processor::DevAudioProcessor;
39
40/// Configuration for audio server.
41#[derive(Debug, Clone)]
42pub struct AudioConfig {
43    /// Desired sample rate (e.g., 44100.0). Falls back to system default.
44    pub sample_rate: f32,
45    /// Buffer size in samples.
46    pub buffer_size: u32,
47}
48
49/// Handle returned by `AudioServer::start()` that keeps both audio
50/// streams alive. Drop this handle to stop audio capture and playback.
51pub struct AudioHandle {
52    _input_stream: Stream,
53    _output_stream: Option<Stream>,
54}
55
56/// Audio server that processes OS input through a `DevAudioProcessor`
57/// and routes the processed audio to the output device.
58pub struct AudioServer {
59    processor: Box<dyn DevAudioProcessor>,
60    config: AudioConfig,
61    input_device: Device,
62    output_device: Device,
63    input_config: StreamConfig,
64    output_config: StreamConfig,
65    param_bridge: Arc<AtomicParameterBridge>,
66}
67
68impl AudioServer {
69    /// Create a new audio server with the given processor, config, and
70    /// parameter bridge for lock-free audio-thread parameter reads.
71    pub fn new(
72        processor: Box<dyn DevAudioProcessor>,
73        config: AudioConfig,
74        param_bridge: Arc<AtomicParameterBridge>,
75    ) -> Result<Self> {
76        let negotiated = device_setup::negotiate_default_devices_and_configs()?;
77
78        Ok(Self {
79            processor,
80            config,
81            input_device: negotiated.input_device,
82            output_device: negotiated.output_device,
83            input_config: negotiated.input_config,
84            output_config: negotiated.output_config,
85            param_bridge,
86        })
87    }
88
89    /// Start audio capture, processing, and playback.
90    ///
91    /// Returns an `AudioHandle` that keeps both streams alive, plus a
92    /// `MeterConsumer` for draining meter frames from a lock-free ring
93    /// buffer (RT-safe: no allocations on the audio thread).
94    ///
95    /// Drop the handle to stop audio.
96    pub fn start(
97        mut self,
98    ) -> Result<(
99        AudioHandle,
100        rtrb::Consumer<MeterUpdateNotification>,
101        OscilloscopeFrameConsumer,
102    )> {
103        // Set sample rate from the actual input device config
104        let actual_sample_rate = self.input_config.sample_rate.0 as f32;
105        self.processor.set_sample_rate(actual_sample_rate);
106
107        let processor = self.processor;
108        let buffer_size = self.config.buffer_size as usize;
109        let input_channels = self.input_config.channels as usize;
110        let output_channels = self.output_config.channels as usize;
111        let param_bridge = Arc::clone(&self.param_bridge);
112
113        startup_wiring::start_audio_io(startup_wiring::StartAudioIoContext {
114            input_device: &self.input_device,
115            input_config: &self.input_config,
116            output_device: &self.output_device,
117            output_config: &self.output_config,
118            processor,
119            buffer_size,
120            input_channels,
121            output_channels,
122            param_bridge,
123            actual_sample_rate,
124        })
125    }
126
127    /// Returns true if an output device is available for audio playback.
128    pub fn has_output(&self) -> bool {
129        true
130    }
131}