web_audio_api/
worklet.rs

1//! User-defined audio nodes and processors
2//!
3//! See the following files for an example implementation of user defined nodes:
4//! - `examples/worklet.rs` (basics with an audio param)
5//! - `examples/worklet_message_port.rs` (basics with message port)
6//! - `examples/worklet_bitcrusher.rs` (real world example)
7
8pub use crate::render::AudioWorkletGlobalScope;
9
10use crate::context::{AudioContextRegistration, AudioParamId, BaseAudioContext};
11use crate::node::{AudioNode, AudioNodeOptions, ChannelConfig};
12use crate::param::{AudioParam, AudioParamDescriptor};
13use crate::render::{AudioProcessor, AudioRenderQuantum};
14use crate::{MessagePort, MAX_CHANNELS};
15
16use std::any::Any;
17use std::collections::HashMap;
18use std::ops::{Deref, DerefMut};
19
20/// Accessor for current [`AudioParam`] values
21pub struct AudioParamValues<'a> {
22    values: crate::render::AudioParamValues<'a>,
23    map: &'a HashMap<String, AudioParamId>,
24}
25
26impl std::fmt::Debug for AudioParamValues<'_> {
27    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
28        f.debug_struct("AudioParamValues").finish_non_exhaustive()
29    }
30}
31
32impl<'a> AudioParamValues<'a> {
33    /// Get the computed values for the given [`AudioParam`]
34    ///
35    /// For k-rate params or if the (a-rate) parameter is constant for this block, it will provide
36    /// a slice of length 1. In other cases, i.e. a-rate param with scheduled automations it will
37    /// provide a slice of length equal to the render quantum size (default: 128)
38    #[allow(clippy::missing_panics_doc)]
39    pub fn get(&'a self, name: &str) -> impl Deref<Target = [f32]> + 'a {
40        let id = self.map.get(name).unwrap();
41        self.values.get(id)
42    }
43
44    pub fn keys(&self) -> impl Iterator<Item = &str> {
45        self.map.keys().map(|s| s.as_ref())
46    }
47}
48
49/// Audio processing code that runs on the audio rendering thread.
50pub trait AudioWorkletProcessor {
51    /// Constructor options for the audio processor
52    ///
53    /// This holds any user-defined data that may be used to initialize custom
54    /// properties in an AudioWorkletProcessor instance that is associated with the
55    /// AudioWorkletNode.
56    type ProcessorOptions: Send;
57
58    /// Constructor of the [`AudioWorkletProcessor`] instance (to be executed in the render thread)
59    fn constructor(opts: Self::ProcessorOptions) -> Self
60    where
61        Self: Sized;
62
63    /// List of [`AudioParam`]s for this audio processor
64    ///
65    /// A default implementation is provided that supplies no parameters.
66    fn parameter_descriptors() -> Vec<AudioParamDescriptor>
67    where
68        Self: Sized,
69    {
70        vec![] // empty by default
71    }
72
73    /// Audio processing function
74    ///
75    /// # Arguments
76    ///
77    /// - inputs: readonly array of input buffers
78    /// - outputs: array of output buffers
79    /// - params: available [`AudioParam`] values for this processor
80    /// - scope: AudioWorkletGlobalScope object with current frame, timestamp, sample rate
81    ///
82    /// # Return value
83    ///
84    /// The return value (bool) of this callback controls the lifetime of the processor.
85    ///
86    /// - return `false` when the node only transforms their inputs, and as such can be removed when
87    ///   the inputs are disconnected (e.g. GainNode)
88    /// - return `true` for some time when the node still outputs after the inputs are disconnected
89    ///   (e.g. DelayNode)
90    /// - return `true` as long as this node is a source of output (e.g. OscillatorNode)
91    fn process<'a, 'b>(
92        &mut self,
93        inputs: &'b [&'a [&'a [f32]]],
94        outputs: &'b mut [&'a mut [&'a mut [f32]]],
95        params: AudioParamValues<'b>,
96        scope: &'b AudioWorkletGlobalScope,
97    ) -> bool;
98
99    /// Handle incoming messages from the linked AudioNode
100    ///
101    /// By overriding this method you can add a handler for messages sent from the control thread
102    /// via the AudioWorkletNode MessagePort.
103    ///
104    /// Receivers are supposed to consume the content of `msg`. The content of `msg` might
105    /// also be replaced by cruft that needs to be deallocated outside of the render thread
106    /// afterwards, e.g. when replacing an internal buffer.
107    ///
108    /// This method is just a shim of the full
109    /// [`MessagePort`](https://webaudio.github.io/web-audio-api/#dom-audioworkletprocessor-port)
110    /// `onmessage` functionality of the AudioWorkletProcessor.
111    fn onmessage(&mut self, _msg: &mut dyn Any) {
112        log::warn!("AudioWorkletProcessor: Ignoring incoming message");
113    }
114}
115
116/// Options for constructing an [`AudioWorkletNode`]
117// dictionary AudioWorkletNodeOptions : AudioNodeOptions {
118//     unsigned long numberOfInputs = 1;
119//     unsigned long numberOfOutputs = 1;
120//     sequence<unsigned long> outputChannelCount;
121//     record<DOMString, double> parameterData;
122//     object processorOptions;
123// };
124#[derive(Clone, Debug)]
125pub struct AudioWorkletNodeOptions<C> {
126    /// This is used to initialize the value of the AudioNode numberOfInputs attribute.
127    pub number_of_inputs: usize,
128    /// This is used to initialize the value of the AudioNode numberOfOutputs attribute.
129    pub number_of_outputs: usize,
130    /// This array is used to configure the number of channels in each output.
131    pub output_channel_count: Vec<usize>,
132    /// This is a list of user-defined key-value pairs that are used to set the initial value of an
133    /// AudioParam with the matched name in the AudioWorkletNode.
134    pub parameter_data: HashMap<String, f64>,
135    /// This holds any user-defined data that may be used to initialize custom properties in an
136    /// AudioWorkletProcessor instance that is associated with the AudioWorkletNode.
137    pub processor_options: C,
138    /// Channel config options
139    pub audio_node_options: AudioNodeOptions,
140}
141
142impl<C: Default> Default for AudioWorkletNodeOptions<C> {
143    fn default() -> Self {
144        Self {
145            number_of_inputs: 1,
146            number_of_outputs: 1,
147            output_channel_count: Vec::new(),
148            parameter_data: HashMap::new(),
149            processor_options: C::default(),
150            audio_node_options: AudioNodeOptions::default(),
151        }
152    }
153}
154
155/// A user-defined AudioNode which lives in the control thread
156///
157/// - MDN documentation: <https://developer.mozilla.org/en-US/docs/Web/API/AudioWorkletNode>
158/// - specification: <https://webaudio.github.io/web-audio-api/#AudioWorkletNode>
159///
160/// # Examples
161///
162/// - `cargo run --release --example worklet`
163/// - `cargo run --release --example worklet_message_port`
164/// - `cargo run --release --example worklet_bitcrusher`
165///
166#[derive(Debug)]
167pub struct AudioWorkletNode {
168    registration: AudioContextRegistration,
169    channel_config: ChannelConfig,
170    number_of_inputs: usize,
171    number_of_outputs: usize,
172    audio_param_map: HashMap<String, AudioParam>,
173}
174
175impl AudioNode for AudioWorkletNode {
176    fn registration(&self) -> &AudioContextRegistration {
177        &self.registration
178    }
179
180    fn channel_config(&self) -> &ChannelConfig {
181        &self.channel_config
182    }
183
184    fn number_of_inputs(&self) -> usize {
185        self.number_of_inputs
186    }
187
188    fn number_of_outputs(&self) -> usize {
189        self.number_of_outputs
190    }
191}
192
193impl AudioWorkletNode {
194    /// Construct a new AudioWorkletNode
195    ///
196    /// # Panics
197    ///
198    /// This function panics when
199    /// - the number of inputs and the number of outputs of the supplied options are both equal to
200    ///   zero.
201    /// - any of the output channel counts is equal to zero or larger than 32 ([`MAX_CHANNELS`])
202    pub fn new<P: AudioWorkletProcessor + 'static>(
203        context: &impl BaseAudioContext,
204        options: AudioWorkletNodeOptions<P::ProcessorOptions>,
205    ) -> Self {
206        let AudioWorkletNodeOptions {
207            number_of_inputs,
208            number_of_outputs,
209            output_channel_count,
210            parameter_data,
211            processor_options,
212            audio_node_options: channel_config,
213        } = options;
214
215        assert!(
216            number_of_inputs != 0 || number_of_outputs != 0,
217            "NotSupportedError: number of inputs and outputs cannot both be zero"
218        );
219
220        let output_channel_count = if output_channel_count.is_empty() {
221            if number_of_inputs == 1 && number_of_outputs == 1 {
222                vec![] // special case
223            } else {
224                vec![1; number_of_outputs]
225            }
226        } else {
227            output_channel_count
228                .iter()
229                .copied()
230                .for_each(crate::assert_valid_number_of_channels);
231            assert_eq!(
232                output_channel_count.len(),
233                number_of_outputs,
234                "IndexSizeError: outputChannelCount.length should equal numberOfOutputs"
235            );
236            output_channel_count
237        };
238
239        let number_of_output_channels = if output_channel_count.is_empty() {
240            MAX_CHANNELS
241        } else {
242            output_channel_count.iter().sum::<usize>()
243        };
244
245        let node = context.base().register(move |registration| {
246            // Setup audio params, set initial values when supplied via parameter_data
247            let mut node_param_map = HashMap::new();
248            let mut processor_param_map = HashMap::new();
249            for mut param_descriptor in P::parameter_descriptors() {
250                let name = std::mem::take(&mut param_descriptor.name);
251                let (param, proc) = context.create_audio_param(param_descriptor, &registration);
252                if let Some(value) = parameter_data.get(&name) {
253                    param.set_value(*value as f32); // mismatch in spec f32 vs f64
254                }
255                node_param_map.insert(name.clone(), param);
256                processor_param_map.insert(name, proc);
257            }
258
259            let node = AudioWorkletNode {
260                registration,
261                channel_config: channel_config.into(),
262                number_of_inputs,
263                number_of_outputs,
264                audio_param_map: node_param_map,
265            };
266
267            let render: AudioWorkletRenderer<P> = AudioWorkletRenderer {
268                processor: Processor::new(processor_options),
269                audio_param_map: processor_param_map,
270                output_channel_count,
271                inputs_flat: Vec::with_capacity(number_of_inputs * MAX_CHANNELS),
272                inputs_grouped: Vec::with_capacity(number_of_inputs),
273                outputs_flat: Vec::with_capacity(number_of_output_channels),
274                outputs_grouped: Vec::with_capacity(number_of_outputs),
275            };
276
277            (node, Box::new(render))
278        });
279
280        node
281    }
282
283    /// Collection of AudioParam objects with associated names of this node
284    ///
285    /// This map is populated from a list of [`AudioParamDescriptor`]s in the
286    /// [`AudioWorkletProcessor`] class constructor at the instantiation.
287    pub fn parameters(&self) -> &HashMap<String, AudioParam> {
288        &self.audio_param_map
289    }
290
291    /// Message port to the processor in the render thread
292    ///
293    /// Every AudioWorkletNode has an associated port which is the [`MessagePort`]. It is connected
294    /// to the port on the corresponding [`AudioWorkletProcessor`] object allowing bidirectional
295    /// communication between the AudioWorkletNode and its AudioWorkletProcessor.
296    pub fn port(&self) -> MessagePort<'_> {
297        MessagePort::from_node(self)
298    }
299}
300
301enum Processor<P: AudioWorkletProcessor> {
302    Uninit(Option<P::ProcessorOptions>),
303    Init(P),
304}
305
306impl<P: AudioWorkletProcessor> Processor<P> {
307    fn new(opts: P::ProcessorOptions) -> Self {
308        Self::Uninit(Some(opts))
309    }
310
311    fn load(&mut self) -> &mut dyn AudioWorkletProcessor<ProcessorOptions = P::ProcessorOptions> {
312        if let Processor::Uninit(opts) = self {
313            *self = Self::Init(P::constructor(opts.take().unwrap()));
314        }
315
316        match self {
317            Self::Init(p) => p,
318            Self::Uninit(_) => unreachable!(),
319        }
320    }
321}
322
323struct AudioWorkletRenderer<P: AudioWorkletProcessor> {
324    processor: Processor<P>,
325    audio_param_map: HashMap<String, AudioParamId>,
326    output_channel_count: Vec<usize>,
327
328    // Preallocated, reusable containers for channel data
329    inputs_flat: Vec<&'static [f32]>,
330    inputs_grouped: Vec<&'static [&'static [f32]]>,
331    outputs_flat: Vec<&'static mut [f32]>,
332    outputs_grouped: Vec<&'static mut [&'static mut [f32]]>,
333}
334
335// SAFETY:
336// The concrete AudioWorkletProcessor is instantiated inside the render thread and won't be
337// sent elsewhere.
338unsafe impl<P: AudioWorkletProcessor> Send for AudioWorkletRenderer<P> {}
339
340impl<P: AudioWorkletProcessor> AudioProcessor for AudioWorkletRenderer<P> {
341    fn process(
342        &mut self,
343        inputs: &[AudioRenderQuantum],
344        outputs: &mut [AudioRenderQuantum],
345        params: crate::render::AudioParamValues<'_>,
346        scope: &AudioWorkletGlobalScope,
347    ) -> bool {
348        let processor = self.processor.load();
349
350        // Bear with me, to construct a &[&[&[f32]]] we first build a backing vector of all the
351        // individual sample slices. Then we chop it up to get to the right sub-slice structure.
352        inputs
353            .iter()
354            .flat_map(|input| input.channels())
355            .map(|input_channel| input_channel.as_ref())
356            // SAFETY
357            // We're upgrading the lifetime of the channel data to `static`. This is okay because
358            // `self.processor` is a HRTB (for <'a> Fn (&'a) -> ..) so the references cannot
359            // escape. The channel containers are cleared at the end of the `process` method.
360            .map(|input_channel| unsafe { std::mem::transmute(input_channel) })
361            .for_each(|c| self.inputs_flat.push(c));
362
363        let mut inputs_flat = &self.inputs_flat[..];
364        for input in inputs {
365            let c = input.number_of_channels();
366            let (left, right) = inputs_flat.split_at(c);
367            // SAFETY - see comments above
368            let left_static = unsafe { std::mem::transmute::<&[&[f32]], &[&[f32]]>(left) };
369            self.inputs_grouped.push(left_static);
370            inputs_flat = right;
371        }
372
373        // Set the proper channel count for the outputs
374        if !outputs.is_empty() && self.output_channel_count.is_empty() {
375            // special case - single input/output - inherit channel count from input
376            outputs[0].set_number_of_channels(inputs[0].number_of_channels());
377        } else {
378            outputs
379                .iter_mut()
380                .zip(self.output_channel_count.iter())
381                .for_each(|(output, &channel_count)| output.set_number_of_channels(channel_count));
382        }
383
384        // Create an iterator for the output channel counts without allocating, handling also the
385        // case where self.output_channel_count is empty.
386        let single_case = [inputs
387            .first()
388            .map(|i| i.number_of_channels())
389            .unwrap_or_default()];
390        let output_channel_count = if self.output_channel_count.is_empty() {
391            &single_case[..]
392        } else {
393            &self.output_channel_count[..]
394        };
395
396        outputs
397            .iter_mut()
398            .flat_map(|output| output.channels_mut())
399            .map(|output_channel| output_channel.deref_mut())
400            // SAFETY
401            // We're upgrading the lifetime of the channel data to `static`. This is okay because
402            // `self.processor` is a HRTB (for <'a> Fn (&'a) -> ..) so the references cannot
403            // escape. The channel containers are cleared at the end of the `process` method.
404            .map(|output_channel| unsafe { std::mem::transmute(output_channel) })
405            .for_each(|c| self.outputs_flat.push(c));
406
407        if !outputs.is_empty() {
408            let mut outputs_flat = &mut self.outputs_flat[..];
409            for c in output_channel_count {
410                let (left, right) = outputs_flat.split_at_mut(*c);
411                // SAFETY - see comments above
412                let left_static =
413                    unsafe { std::mem::transmute::<&mut [&mut [f32]], &mut [&mut [f32]]>(left) };
414                self.outputs_grouped.push(left_static);
415                outputs_flat = right;
416            }
417        }
418
419        let param_getter = AudioParamValues {
420            values: params,
421            map: &self.audio_param_map,
422        };
423
424        let tail_time = processor.process(
425            &self.inputs_grouped[..],
426            &mut self.outputs_grouped[..],
427            param_getter,
428            scope,
429        );
430
431        self.inputs_grouped.clear();
432        self.inputs_flat.clear();
433        self.outputs_grouped.clear();
434        self.outputs_flat.clear();
435
436        tail_time
437    }
438
439    fn onmessage(&mut self, msg: &mut dyn Any) {
440        self.processor.load().onmessage(msg)
441    }
442
443    fn has_side_effects(&self) -> bool {
444        true // could be IO, message passing, ..
445    }
446}
447
448#[cfg(test)]
449mod tests {
450    use super::*;
451    use crate::context::OfflineAudioContext;
452    use float_eq::assert_float_eq;
453    use std::sync::atomic::{AtomicBool, Ordering};
454    use std::sync::Arc;
455
456    struct TestProcessor;
457
458    impl AudioWorkletProcessor for TestProcessor {
459        type ProcessorOptions = ();
460
461        fn constructor(_opts: Self::ProcessorOptions) -> Self {
462            TestProcessor {}
463        }
464
465        fn process<'a, 'b>(
466            &mut self,
467            _inputs: &'b [&'a [&'a [f32]]],
468            _outputs: &'b mut [&'a mut [&'a mut [f32]]],
469            _params: AudioParamValues<'b>,
470            _scope: &'b AudioWorkletGlobalScope,
471        ) -> bool {
472            true
473        }
474    }
475
476    #[test]
477    fn test_worklet_render() {
478        let mut context = OfflineAudioContext::new(1, 128, 48000.);
479        let options = AudioWorkletNodeOptions::default();
480        let worklet = AudioWorkletNode::new::<TestProcessor>(&context, options);
481        worklet.connect(&context.destination());
482        let buffer = context.start_rendering_sync();
483        assert_float_eq!(
484            buffer.get_channel_data(0)[..],
485            &[0.; 128][..],
486            abs_all <= 0.
487        );
488    }
489
490    #[test]
491    fn test_worklet_inputs_outputs() {
492        let matrix = [0, 1, 2];
493        let mut context = OfflineAudioContext::new(1, 128, 48000.);
494        for inputs in matrix {
495            for outputs in matrix {
496                if inputs == 0 && outputs == 0 {
497                    continue; // this case is not allowed
498                }
499                let options = AudioWorkletNodeOptions {
500                    number_of_inputs: inputs,
501                    number_of_outputs: outputs,
502                    ..AudioWorkletNodeOptions::default()
503                };
504                let worklet = AudioWorkletNode::new::<TestProcessor>(&context, options);
505
506                if outputs > 0 {
507                    worklet.connect(&context.destination());
508                }
509            }
510        }
511        let buffer = context.start_rendering_sync();
512        assert_float_eq!(
513            buffer.get_channel_data(0)[..],
514            &[0.; 128][..],
515            abs_all <= 0.
516        );
517    }
518
519    #[test]
520    fn test_worklet_only_input() {
521        struct SetBoolWhenRunProcessor(Arc<AtomicBool>);
522
523        impl AudioWorkletProcessor for SetBoolWhenRunProcessor {
524            type ProcessorOptions = Arc<AtomicBool>;
525
526            fn constructor(opts: Self::ProcessorOptions) -> Self {
527                Self(opts)
528            }
529
530            fn process<'a, 'b>(
531                &mut self,
532                _inputs: &'b [&'a [&'a [f32]]],
533                _outputs: &'b mut [&'a mut [&'a mut [f32]]],
534                _params: AudioParamValues<'b>,
535                _scope: &'b AudioWorkletGlobalScope,
536            ) -> bool {
537                self.0.store(true, Ordering::Relaxed);
538                false
539            }
540        }
541
542        let has_run = Arc::new(AtomicBool::new(false));
543
544        let mut context = OfflineAudioContext::new(1, 128, 48000.);
545        let options = AudioWorkletNodeOptions {
546            number_of_inputs: 1,
547            number_of_outputs: 0,
548            processor_options: Arc::clone(&has_run),
549            ..AudioWorkletNodeOptions::default()
550        };
551        let _ = AudioWorkletNode::new::<SetBoolWhenRunProcessor>(&context, options);
552
553        let _ = context.start_rendering_sync();
554        assert!(has_run.load(Ordering::Relaxed));
555    }
556
557    #[test]
558    fn test_worklet_output_channel_count() {
559        let mut context = OfflineAudioContext::new(1, 128, 48000.);
560
561        let options1 = AudioWorkletNodeOptions {
562            output_channel_count: vec![],
563            ..AudioWorkletNodeOptions::default()
564        };
565        let worklet1 = AudioWorkletNode::new::<TestProcessor>(&context, options1);
566        worklet1.connect(&context.destination());
567
568        let options2 = AudioWorkletNodeOptions {
569            output_channel_count: vec![1],
570            ..AudioWorkletNodeOptions::default()
571        };
572        let worklet2 = AudioWorkletNode::new::<TestProcessor>(&context, options2);
573        worklet2.connect(&context.destination());
574
575        let options3 = AudioWorkletNodeOptions {
576            number_of_outputs: 2,
577            output_channel_count: vec![1, 2],
578            ..AudioWorkletNodeOptions::default()
579        };
580        let worklet3 = AudioWorkletNode::new::<TestProcessor>(&context, options3);
581        worklet3.connect(&context.destination());
582
583        let buffer = context.start_rendering_sync();
584        assert_float_eq!(
585            buffer.get_channel_data(0)[..],
586            &[0.; 128][..],
587            abs_all <= 0.
588        );
589    }
590
591    #[test]
592    fn send_bound() {
593        #[derive(Default)]
594        struct RcProcessor {
595            _rc: std::rc::Rc<()>, // not send
596        }
597
598        impl AudioWorkletProcessor for RcProcessor {
599            type ProcessorOptions = ();
600
601            fn constructor(_opts: Self::ProcessorOptions) -> Self {
602                Self::default()
603            }
604
605            fn process<'a, 'b>(
606                &mut self,
607                _inputs: &'b [&'a [&'a [f32]]],
608                _outputs: &'b mut [&'a mut [&'a mut [f32]]],
609                _params: AudioParamValues<'b>,
610                _scope: &'b AudioWorkletGlobalScope,
611            ) -> bool {
612                true
613            }
614        }
615
616        let context = OfflineAudioContext::new(1, 128, 48000.);
617        let options = AudioWorkletNodeOptions::default();
618        let _worklet = AudioWorkletNode::new::<RcProcessor>(&context, options);
619    }
620}