web_audio_api/io/
none.rs

1use std::thread;
2use std::time::{Duration, Instant};
3
4use super::{AudioBackendManager, RenderThreadInit};
5
6use crate::buffer::AudioBuffer;
7use crate::context::AudioContextOptions;
8use crate::media_devices::MediaDeviceInfo;
9use crate::render::RenderThread;
10use crate::{MAX_CHANNELS, RENDER_QUANTUM_SIZE};
11
12use crossbeam_channel::{Receiver, Sender};
13
14enum NoneBackendMessage {
15    Resume,
16    Suspend,
17    Close,
18}
19
20#[derive(Clone)]
21pub(crate) struct NoneBackend {
22    sender: Sender<NoneBackendMessage>,
23    sample_rate: f32,
24}
25
26impl NoneBackend {
27    /// Creates a mock backend to be used as tombstones
28    pub(crate) fn void() -> Self {
29        Self {
30            sample_rate: 0.,
31            sender: crossbeam_channel::bounded(0).0,
32        }
33    }
34}
35
36struct Callback {
37    receiver: Receiver<NoneBackendMessage>,
38    render_thread: RenderThread,
39    sample_rate: f32,
40    running: bool,
41}
42
43impl Callback {
44    fn run(mut self) {
45        let buffer_size = RENDER_QUANTUM_SIZE; // TODO Latency Category
46        let mut buffer = vec![0.; buffer_size * MAX_CHANNELS];
47        let interval = Duration::from_secs_f32(buffer_size as f32 / self.sample_rate);
48
49        // For an isochronous callback we must calculate the deadline every render quantum
50        let mut deadline = Instant::now().checked_add(interval).unwrap();
51
52        loop {
53            // poll the receiver as long as the deadline is in the future
54            while let Ok(msg) = self.receiver.recv_deadline(deadline) {
55                match msg {
56                    NoneBackendMessage::Close => return,
57                    NoneBackendMessage::Resume => {
58                        self.running = true;
59                        deadline = Instant::now().checked_add(interval).unwrap();
60                        break; // start processing right away
61                    }
62                    NoneBackendMessage::Suspend => self.running = false,
63                }
64            }
65
66            if self.running {
67                self.render_thread.render(&mut buffer[..]);
68            }
69
70            deadline = deadline.checked_add(interval).unwrap();
71        }
72    }
73}
74
75impl AudioBackendManager for NoneBackend {
76    /// Setup a new output stream (speakers)
77    fn build_output(options: AudioContextOptions, render_thread_init: RenderThreadInit) -> Self
78    where
79        Self: Sized,
80    {
81        let sample_rate = options.sample_rate.unwrap_or(48000.);
82
83        let RenderThreadInit {
84            state,
85            frames_played,
86            ctrl_msg_recv,
87            load_value_send,
88            event_send,
89        } = render_thread_init;
90
91        let mut render_thread = RenderThread::new(
92            sample_rate,
93            MAX_CHANNELS,
94            ctrl_msg_recv,
95            state,
96            frames_played,
97            event_send,
98        );
99        render_thread.set_load_value_sender(load_value_send);
100        render_thread.spawn_garbage_collector_thread();
101
102        // Use a bounded channel for real-time safety. A maximum of 32 control messages (resume,
103        // suspend, ..) will be handled per render quantum. The control thread will block when the
104        // capacity is reached.
105        let (sender, receiver) = crossbeam_channel::bounded(32);
106
107        // todo: pass buffer size and sample rate
108        let callback = Callback {
109            render_thread,
110            receiver,
111            sample_rate,
112            running: true,
113        };
114
115        thread::spawn(move || callback.run());
116
117        Self {
118            sender,
119            sample_rate,
120        }
121    }
122
123    /// Setup a new input stream (microphone capture)
124    fn build_input(
125        _options: AudioContextOptions,
126        _number_of_channels: Option<u32>,
127    ) -> (Self, Receiver<AudioBuffer>)
128    where
129        Self: Sized,
130    {
131        unimplemented!()
132    }
133
134    /// Resume or start the stream
135    fn resume(&self) -> bool {
136        self.sender.send(NoneBackendMessage::Resume).unwrap();
137        true
138    }
139
140    /// Suspend the stream
141    fn suspend(&self) -> bool {
142        self.sender.send(NoneBackendMessage::Suspend).unwrap();
143        true
144    }
145
146    /// Close the stream, freeing all resources. It cannot be started again after closing.
147    fn close(&self) {
148        self.sender.send(NoneBackendMessage::Close).unwrap()
149    }
150
151    /// Sample rate of the stream
152    fn sample_rate(&self) -> f32 {
153        self.sample_rate
154    }
155
156    /// Number of channels of the stream
157    fn number_of_channels(&self) -> usize {
158        MAX_CHANNELS
159    }
160
161    /// Output latency of the stream in seconds
162    ///
163    /// This is the difference between the time the backend acquires the data in the callback and
164    /// the listener can hear the sound.
165    fn output_latency(&self) -> f64 {
166        0.
167    }
168
169    /// The audio output device
170    fn sink_id(&self) -> &str {
171        "none"
172    }
173
174    fn enumerate_devices_sync() -> Vec<MediaDeviceInfo>
175    where
176        Self: Sized,
177    {
178        unimplemented!()
179    }
180}