web_audio_api/render/processor.rs
1//! Audio processing code that runs on the audio rendering thread
2use crate::context::{AudioNodeId, AudioParamId};
3use crate::events::{AudioProcessingEvent, ErrorEvent, EventDispatch};
4use crate::{AudioBuffer, Event, RENDER_QUANTUM_SIZE};
5
6use super::{graph::Node, AudioRenderQuantum, NodeCollection};
7
8use crossbeam_channel::Sender;
9use std::cell::Cell;
10
11use std::any::Any;
12use std::ops::Deref;
13
14#[non_exhaustive] // we may want to add user-provided blobs to this later
15/// The execution context of all AudioProcessors in a given AudioContext
16///
17/// This struct currently only contains information about the progress of time. In a future
18/// version, it should be possible to add arbitrary data. For example, multiple processors might
19/// share a buffer defining a wavetable or an impulse response.
20pub struct AudioWorkletGlobalScope {
21 pub current_frame: u64,
22 pub current_time: f64,
23 pub sample_rate: f32,
24
25 pub(crate) node_id: Cell<AudioNodeId>,
26 pub(crate) event_sender: Sender<EventDispatch>,
27}
28
29impl std::fmt::Debug for AudioWorkletGlobalScope {
30 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
31 let mut format = f.debug_struct("RenderScope");
32 format
33 .field("current_frame", &self.current_frame)
34 .field("current_time", &self.current_time)
35 .field("sample_rate", &self.sample_rate)
36 .finish_non_exhaustive()
37 }
38}
39
40impl AudioWorkletGlobalScope {
41 /// Send a message to the corresponding AudioWorkletNode of this processor
42 ///
43 /// This method is just a shim of the full
44 /// [`MessagePort`](https://webaudio.github.io/web-audio-api/#dom-audioworkletprocessor-port)
45 /// `postMessage` functionality of the AudioWorkletProcessor.
46 pub fn post_message(&self, msg: Box<dyn Any + Send + 'static>) {
47 // sending could fail if the channel is saturated or the main thread is shutting down
48 let _ = self
49 .event_sender
50 .try_send(EventDispatch::message(self.node_id.get(), msg));
51 }
52
53 pub(crate) fn send_ended_event(&self) {
54 // sending could fail if the channel is saturated or the main thread is shutting down
55 let _ = self
56 .event_sender
57 .try_send(EventDispatch::ended(self.node_id.get()));
58 }
59
60 pub(crate) fn send_audio_processing_event(
61 &self,
62 input_buffer: AudioBuffer,
63 output_buffer: AudioBuffer,
64 playback_time: f64,
65 ) {
66 // sending could fail if the channel is saturated or the main thread is shutting down
67 let event = AudioProcessingEvent {
68 input_buffer,
69 output_buffer,
70 playback_time,
71 registration: None,
72 };
73 let dispatch = EventDispatch::audio_processing(self.node_id.get(), event);
74 let _ = self.event_sender.try_send(dispatch);
75 }
76
77 pub(crate) fn report_error(&self, error: Box<dyn Any + Send>) {
78 pub fn type_name_of_val<T: ?Sized>(_val: &T) -> &'static str {
79 std::any::type_name::<T>()
80 }
81 let message = if let Some(v) = error.downcast_ref::<String>() {
82 v.to_string()
83 } else if let Some(v) = error.downcast_ref::<&str>() {
84 v.to_string()
85 } else {
86 type_name_of_val(&error).to_string()
87 };
88 eprintln!(
89 "Panic occurred in Audio Processor: '{}'. Removing node from graph.",
90 &message
91 );
92
93 let event = ErrorEvent {
94 message,
95 error,
96 event: Event {
97 type_: "ErrorEvent",
98 },
99 };
100 let _ = self
101 .event_sender
102 .try_send(EventDispatch::processor_error(self.node_id.get(), event));
103 }
104}
105
106/// Interface for audio processing code that runs on the audio rendering thread.
107///
108/// Note that the AudioProcessor is typically constructed together with an
109/// [`AudioNode`](crate::node::AudioNode) (the user facing object that lives in the control
110/// thread). See [`ConcreteBaseAudioContext::register`](crate::context::ConcreteBaseAudioContext::register).
111///
112/// Check the `examples/worklet.rs` file for example usage of this trait.
113pub trait AudioProcessor: Send {
114 /// Audio processing function
115 ///
116 /// # Arguments
117 ///
118 /// - inputs: readonly array of input buffers
119 /// - outputs: array of output buffers
120 /// - params: available `AudioParam`s for this processor
121 /// - timestamp: time of the start of this render quantum
122 /// - sample_rate: sample rate of this render quantum
123 ///
124 /// # Return value
125 ///
126 /// The return value (bool) of this callback controls the lifetime of the processor.
127 ///
128 /// - return `false` when the node only transforms their inputs, and as such can be removed when
129 /// the inputs are disconnected (e.g. GainNode)
130 /// - return `true` for some time when the node still outputs after the inputs are disconnected
131 /// (e.g. DelayNode)
132 /// - return `true` as long as this node is a source of output (e.g. OscillatorNode)
133 fn process(
134 &mut self,
135 inputs: &[AudioRenderQuantum],
136 outputs: &mut [AudioRenderQuantum],
137 params: AudioParamValues<'_>,
138 scope: &AudioWorkletGlobalScope,
139 ) -> bool;
140
141 /// Handle incoming messages from the linked AudioNode
142 ///
143 /// By overriding this method you can add a handler for messages sent from the control thread
144 /// via
145 /// [`AudioContextRegistration::post_message`](crate::context::AudioContextRegistration::post_message).
146 /// This will not be necessary for most processors.
147 ///
148 /// Receivers are supposed to consume the content of `msg`. The content of `msg` might
149 /// also be replaced by cruft that needs to be deallocated outside of the render thread
150 /// afterwards, e.g. when replacing an internal buffer.
151 ///
152 /// This method is just a shim of the full
153 /// [`MessagePort`](https://webaudio.github.io/web-audio-api/#dom-audioworkletprocessor-port)
154 /// `onmessage` functionality of the AudioWorkletProcessor.
155 #[allow(unused_variables)]
156 fn onmessage(&mut self, msg: &mut dyn Any) {
157 log::warn!("Ignoring incoming message");
158 }
159
160 /// Return the name of the actual AudioProcessor type
161 fn name(&self) -> &'static str {
162 std::any::type_name::<Self>()
163 }
164
165 /// Indicates if this processor has 'side effects' other than producing output
166 ///
167 /// Processors without side effects can not be dropped when there are no outputs connected, and
168 /// when the control side handle no longer exists
169 ///
170 /// Side effects could include
171 /// - IO (e.g. speaker output of the destination node)
172 /// - Message passing (e.g. worklet nodes)
173 fn has_side_effects(&self) -> bool {
174 false
175 }
176
177 fn before_drop(&mut self, _scope: &AudioWorkletGlobalScope) {}
178}
179
180impl std::fmt::Debug for dyn AudioProcessor {
181 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
182 f.debug_struct(self.name()).finish_non_exhaustive()
183 }
184}
185
186struct DerefAudioRenderQuantumChannel<'a>(std::cell::Ref<'a, Node>);
187
188impl Deref for DerefAudioRenderQuantumChannel<'_> {
189 type Target = [f32];
190
191 fn deref(&self) -> &Self::Target {
192 let buffer = self.0.get_buffer();
193 let len = if buffer.single_valued() {
194 1
195 } else {
196 RENDER_QUANTUM_SIZE
197 };
198
199 &buffer.channel_data(0)[..len]
200 }
201}
202
203/// Accessor for current [`crate::param::AudioParam`] values
204///
205/// Provided to implementations of [`AudioProcessor`] in the render thread
206pub struct AudioParamValues<'a> {
207 nodes: &'a NodeCollection,
208}
209
210impl std::fmt::Debug for AudioParamValues<'_> {
211 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
212 f.debug_struct("AudioParamValues").finish_non_exhaustive()
213 }
214}
215
216impl<'a> AudioParamValues<'a> {
217 pub(crate) fn from(nodes: &'a NodeCollection) -> Self {
218 Self { nodes }
219 }
220
221 /// Get the computed values for the given [`crate::param::AudioParam`]
222 ///
223 /// For k-rate params or if the (a-rate) parameter is constant for this block, it will provide
224 /// a slice of length 1. In other cases, i.e. a-rate param with scheduled automations it will
225 /// provide a slice of length equal to the render quantum size (default: 128)
226 #[allow(clippy::missing_panics_doc)]
227 pub fn get(&self, index: &AudioParamId) -> impl Deref<Target = [f32]> + '_ {
228 DerefAudioRenderQuantumChannel(self.nodes.get_unchecked(index.into()).borrow())
229 }
230
231 pub(crate) fn listener_params(&self) -> [impl Deref<Target = [f32]> + '_; 9] {
232 crate::context::LISTENER_AUDIO_PARAM_IDS.map(|p| self.get(&p))
233 }
234}
235
236#[cfg(test)]
237mod tests {
238 use super::*;
239
240 struct TestNode;
241
242 impl AudioProcessor for TestNode {
243 fn process(
244 &mut self,
245 _inputs: &[AudioRenderQuantum],
246 _outputs: &mut [AudioRenderQuantum],
247 _params: AudioParamValues<'_>,
248 _scope: &AudioWorkletGlobalScope,
249 ) -> bool {
250 todo!()
251 }
252 }
253
254 #[test]
255 fn test_debug_fmt() {
256 let proc = &TestNode as &dyn AudioProcessor;
257 assert_eq!(
258 &format!("{:?}", proc),
259 "web_audio_api::render::processor::tests::TestNode { .. }"
260 );
261 }
262}