1pub use crate::render::AudioWorkletGlobalScope;
9
10use crate::context::{AudioContextRegistration, AudioParamId, BaseAudioContext};
11use crate::node::{AudioNode, AudioNodeOptions, ChannelConfig};
12use crate::param::{AudioParam, AudioParamDescriptor};
13use crate::render::{AudioProcessor, AudioRenderQuantum};
14use crate::{MessagePort, MAX_CHANNELS};
15
16use std::any::Any;
17use std::collections::HashMap;
18use std::ops::{Deref, DerefMut};
19
20pub struct AudioParamValues<'a> {
22 values: crate::render::AudioParamValues<'a>,
23 map: &'a HashMap<String, AudioParamId>,
24}
25
26impl std::fmt::Debug for AudioParamValues<'_> {
27 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
28 f.debug_struct("AudioParamValues").finish_non_exhaustive()
29 }
30}
31
32impl<'a> AudioParamValues<'a> {
33 #[allow(clippy::missing_panics_doc)]
39 pub fn get(&'a self, name: &str) -> impl Deref<Target = [f32]> + 'a {
40 let id = self.map.get(name).unwrap();
41 self.values.get(id)
42 }
43
44 pub fn keys(&self) -> impl Iterator<Item = &str> {
45 self.map.keys().map(|s| s.as_ref())
46 }
47}
48
49pub trait AudioWorkletProcessor {
51 type ProcessorOptions: Send;
57
58 fn constructor(opts: Self::ProcessorOptions) -> Self
60 where
61 Self: Sized;
62
63 fn parameter_descriptors() -> Vec<AudioParamDescriptor>
67 where
68 Self: Sized,
69 {
70 vec![] }
72
73 fn process<'a, 'b>(
92 &mut self,
93 inputs: &'b [&'a [&'a [f32]]],
94 outputs: &'b mut [&'a mut [&'a mut [f32]]],
95 params: AudioParamValues<'b>,
96 scope: &'b AudioWorkletGlobalScope,
97 ) -> bool;
98
99 fn onmessage(&mut self, _msg: &mut dyn Any) {
112 log::warn!("AudioWorkletProcessor: Ignoring incoming message");
113 }
114}
115
116#[derive(Clone, Debug)]
125pub struct AudioWorkletNodeOptions<C> {
126 pub number_of_inputs: usize,
128 pub number_of_outputs: usize,
130 pub output_channel_count: Vec<usize>,
132 pub parameter_data: HashMap<String, f64>,
135 pub processor_options: C,
138 pub audio_node_options: AudioNodeOptions,
140}
141
142impl<C: Default> Default for AudioWorkletNodeOptions<C> {
143 fn default() -> Self {
144 Self {
145 number_of_inputs: 1,
146 number_of_outputs: 1,
147 output_channel_count: Vec::new(),
148 parameter_data: HashMap::new(),
149 processor_options: C::default(),
150 audio_node_options: AudioNodeOptions::default(),
151 }
152 }
153}
154
155#[derive(Debug)]
167pub struct AudioWorkletNode {
168 registration: AudioContextRegistration,
169 channel_config: ChannelConfig,
170 number_of_inputs: usize,
171 number_of_outputs: usize,
172 audio_param_map: HashMap<String, AudioParam>,
173}
174
175impl AudioNode for AudioWorkletNode {
176 fn registration(&self) -> &AudioContextRegistration {
177 &self.registration
178 }
179
180 fn channel_config(&self) -> &ChannelConfig {
181 &self.channel_config
182 }
183
184 fn number_of_inputs(&self) -> usize {
185 self.number_of_inputs
186 }
187
188 fn number_of_outputs(&self) -> usize {
189 self.number_of_outputs
190 }
191}
192
193impl AudioWorkletNode {
194 pub fn new<P: AudioWorkletProcessor + 'static>(
203 context: &impl BaseAudioContext,
204 options: AudioWorkletNodeOptions<P::ProcessorOptions>,
205 ) -> Self {
206 let AudioWorkletNodeOptions {
207 number_of_inputs,
208 number_of_outputs,
209 output_channel_count,
210 parameter_data,
211 processor_options,
212 audio_node_options: channel_config,
213 } = options;
214
215 assert!(
216 number_of_inputs != 0 || number_of_outputs != 0,
217 "NotSupportedError: number of inputs and outputs cannot both be zero"
218 );
219
220 let output_channel_count = if output_channel_count.is_empty() {
221 if number_of_inputs == 1 && number_of_outputs == 1 {
222 vec![] } else {
224 vec![1; number_of_outputs]
225 }
226 } else {
227 output_channel_count
228 .iter()
229 .copied()
230 .for_each(crate::assert_valid_number_of_channels);
231 assert_eq!(
232 output_channel_count.len(),
233 number_of_outputs,
234 "IndexSizeError: outputChannelCount.length should equal numberOfOutputs"
235 );
236 output_channel_count
237 };
238
239 let number_of_output_channels = if output_channel_count.is_empty() {
240 MAX_CHANNELS
241 } else {
242 output_channel_count.iter().sum::<usize>()
243 };
244
245 let node = context.base().register(move |registration| {
246 let mut node_param_map = HashMap::new();
248 let mut processor_param_map = HashMap::new();
249 for mut param_descriptor in P::parameter_descriptors() {
250 let name = std::mem::take(&mut param_descriptor.name);
251 let (param, proc) = context.create_audio_param(param_descriptor, ®istration);
252 if let Some(value) = parameter_data.get(&name) {
253 param.set_value(*value as f32); }
255 node_param_map.insert(name.clone(), param);
256 processor_param_map.insert(name, proc);
257 }
258
259 let node = AudioWorkletNode {
260 registration,
261 channel_config: channel_config.into(),
262 number_of_inputs,
263 number_of_outputs,
264 audio_param_map: node_param_map,
265 };
266
267 let render: AudioWorkletRenderer<P> = AudioWorkletRenderer {
268 processor: Processor::new(processor_options),
269 audio_param_map: processor_param_map,
270 output_channel_count,
271 inputs_flat: Vec::with_capacity(number_of_inputs * MAX_CHANNELS),
272 inputs_grouped: Vec::with_capacity(number_of_inputs),
273 outputs_flat: Vec::with_capacity(number_of_output_channels),
274 outputs_grouped: Vec::with_capacity(number_of_outputs),
275 };
276
277 (node, Box::new(render))
278 });
279
280 node
281 }
282
283 pub fn parameters(&self) -> &HashMap<String, AudioParam> {
288 &self.audio_param_map
289 }
290
291 pub fn port(&self) -> MessagePort<'_> {
297 MessagePort::from_node(self)
298 }
299}
300
301enum Processor<P: AudioWorkletProcessor> {
302 Uninit(Option<P::ProcessorOptions>),
303 Init(P),
304}
305
306impl<P: AudioWorkletProcessor> Processor<P> {
307 fn new(opts: P::ProcessorOptions) -> Self {
308 Self::Uninit(Some(opts))
309 }
310
311 fn load(&mut self) -> &mut dyn AudioWorkletProcessor<ProcessorOptions = P::ProcessorOptions> {
312 if let Processor::Uninit(opts) = self {
313 *self = Self::Init(P::constructor(opts.take().unwrap()));
314 }
315
316 match self {
317 Self::Init(p) => p,
318 Self::Uninit(_) => unreachable!(),
319 }
320 }
321}
322
323struct AudioWorkletRenderer<P: AudioWorkletProcessor> {
324 processor: Processor<P>,
325 audio_param_map: HashMap<String, AudioParamId>,
326 output_channel_count: Vec<usize>,
327
328 inputs_flat: Vec<&'static [f32]>,
330 inputs_grouped: Vec<&'static [&'static [f32]]>,
331 outputs_flat: Vec<&'static mut [f32]>,
332 outputs_grouped: Vec<&'static mut [&'static mut [f32]]>,
333}
334
335unsafe impl<P: AudioWorkletProcessor> Send for AudioWorkletRenderer<P> {}
339
340impl<P: AudioWorkletProcessor> AudioProcessor for AudioWorkletRenderer<P> {
341 fn process(
342 &mut self,
343 inputs: &[AudioRenderQuantum],
344 outputs: &mut [AudioRenderQuantum],
345 params: crate::render::AudioParamValues<'_>,
346 scope: &AudioWorkletGlobalScope,
347 ) -> bool {
348 let processor = self.processor.load();
349
350 inputs
353 .iter()
354 .flat_map(|input| input.channels())
355 .map(|input_channel| input_channel.as_ref())
356 .map(|input_channel| unsafe { std::mem::transmute(input_channel) })
361 .for_each(|c| self.inputs_flat.push(c));
362
363 let mut inputs_flat = &self.inputs_flat[..];
364 for input in inputs {
365 let c = input.number_of_channels();
366 let (left, right) = inputs_flat.split_at(c);
367 let left_static = unsafe { std::mem::transmute::<&[&[f32]], &[&[f32]]>(left) };
369 self.inputs_grouped.push(left_static);
370 inputs_flat = right;
371 }
372
373 if !outputs.is_empty() && self.output_channel_count.is_empty() {
375 outputs[0].set_number_of_channels(inputs[0].number_of_channels());
377 } else {
378 outputs
379 .iter_mut()
380 .zip(self.output_channel_count.iter())
381 .for_each(|(output, &channel_count)| output.set_number_of_channels(channel_count));
382 }
383
384 let single_case = [inputs
387 .first()
388 .map(|i| i.number_of_channels())
389 .unwrap_or_default()];
390 let output_channel_count = if self.output_channel_count.is_empty() {
391 &single_case[..]
392 } else {
393 &self.output_channel_count[..]
394 };
395
396 outputs
397 .iter_mut()
398 .flat_map(|output| output.channels_mut())
399 .map(|output_channel| output_channel.deref_mut())
400 .map(|output_channel| unsafe { std::mem::transmute(output_channel) })
405 .for_each(|c| self.outputs_flat.push(c));
406
407 if !outputs.is_empty() {
408 let mut outputs_flat = &mut self.outputs_flat[..];
409 for c in output_channel_count {
410 let (left, right) = outputs_flat.split_at_mut(*c);
411 let left_static =
413 unsafe { std::mem::transmute::<&mut [&mut [f32]], &mut [&mut [f32]]>(left) };
414 self.outputs_grouped.push(left_static);
415 outputs_flat = right;
416 }
417 }
418
419 let param_getter = AudioParamValues {
420 values: params,
421 map: &self.audio_param_map,
422 };
423
424 let tail_time = processor.process(
425 &self.inputs_grouped[..],
426 &mut self.outputs_grouped[..],
427 param_getter,
428 scope,
429 );
430
431 self.inputs_grouped.clear();
432 self.inputs_flat.clear();
433 self.outputs_grouped.clear();
434 self.outputs_flat.clear();
435
436 tail_time
437 }
438
439 fn onmessage(&mut self, msg: &mut dyn Any) {
440 self.processor.load().onmessage(msg)
441 }
442
443 fn has_side_effects(&self) -> bool {
444 true }
446}
447
448#[cfg(test)]
449mod tests {
450 use super::*;
451 use crate::context::OfflineAudioContext;
452 use float_eq::assert_float_eq;
453 use std::sync::atomic::{AtomicBool, Ordering};
454 use std::sync::Arc;
455
456 struct TestProcessor;
457
458 impl AudioWorkletProcessor for TestProcessor {
459 type ProcessorOptions = ();
460
461 fn constructor(_opts: Self::ProcessorOptions) -> Self {
462 TestProcessor {}
463 }
464
465 fn process<'a, 'b>(
466 &mut self,
467 _inputs: &'b [&'a [&'a [f32]]],
468 _outputs: &'b mut [&'a mut [&'a mut [f32]]],
469 _params: AudioParamValues<'b>,
470 _scope: &'b AudioWorkletGlobalScope,
471 ) -> bool {
472 true
473 }
474 }
475
476 #[test]
477 fn test_worklet_render() {
478 let mut context = OfflineAudioContext::new(1, 128, 48000.);
479 let options = AudioWorkletNodeOptions::default();
480 let worklet = AudioWorkletNode::new::<TestProcessor>(&context, options);
481 worklet.connect(&context.destination());
482 let buffer = context.start_rendering_sync();
483 assert_float_eq!(
484 buffer.get_channel_data(0)[..],
485 &[0.; 128][..],
486 abs_all <= 0.
487 );
488 }
489
490 #[test]
491 fn test_worklet_inputs_outputs() {
492 let matrix = [0, 1, 2];
493 let mut context = OfflineAudioContext::new(1, 128, 48000.);
494 for inputs in matrix {
495 for outputs in matrix {
496 if inputs == 0 && outputs == 0 {
497 continue; }
499 let options = AudioWorkletNodeOptions {
500 number_of_inputs: inputs,
501 number_of_outputs: outputs,
502 ..AudioWorkletNodeOptions::default()
503 };
504 let worklet = AudioWorkletNode::new::<TestProcessor>(&context, options);
505
506 if outputs > 0 {
507 worklet.connect(&context.destination());
508 }
509 }
510 }
511 let buffer = context.start_rendering_sync();
512 assert_float_eq!(
513 buffer.get_channel_data(0)[..],
514 &[0.; 128][..],
515 abs_all <= 0.
516 );
517 }
518
519 #[test]
520 fn test_worklet_only_input() {
521 struct SetBoolWhenRunProcessor(Arc<AtomicBool>);
522
523 impl AudioWorkletProcessor for SetBoolWhenRunProcessor {
524 type ProcessorOptions = Arc<AtomicBool>;
525
526 fn constructor(opts: Self::ProcessorOptions) -> Self {
527 Self(opts)
528 }
529
530 fn process<'a, 'b>(
531 &mut self,
532 _inputs: &'b [&'a [&'a [f32]]],
533 _outputs: &'b mut [&'a mut [&'a mut [f32]]],
534 _params: AudioParamValues<'b>,
535 _scope: &'b AudioWorkletGlobalScope,
536 ) -> bool {
537 self.0.store(true, Ordering::Relaxed);
538 false
539 }
540 }
541
542 let has_run = Arc::new(AtomicBool::new(false));
543
544 let mut context = OfflineAudioContext::new(1, 128, 48000.);
545 let options = AudioWorkletNodeOptions {
546 number_of_inputs: 1,
547 number_of_outputs: 0,
548 processor_options: Arc::clone(&has_run),
549 ..AudioWorkletNodeOptions::default()
550 };
551 let _ = AudioWorkletNode::new::<SetBoolWhenRunProcessor>(&context, options);
552
553 let _ = context.start_rendering_sync();
554 assert!(has_run.load(Ordering::Relaxed));
555 }
556
557 #[test]
558 fn test_worklet_output_channel_count() {
559 let mut context = OfflineAudioContext::new(1, 128, 48000.);
560
561 let options1 = AudioWorkletNodeOptions {
562 output_channel_count: vec![],
563 ..AudioWorkletNodeOptions::default()
564 };
565 let worklet1 = AudioWorkletNode::new::<TestProcessor>(&context, options1);
566 worklet1.connect(&context.destination());
567
568 let options2 = AudioWorkletNodeOptions {
569 output_channel_count: vec![1],
570 ..AudioWorkletNodeOptions::default()
571 };
572 let worklet2 = AudioWorkletNode::new::<TestProcessor>(&context, options2);
573 worklet2.connect(&context.destination());
574
575 let options3 = AudioWorkletNodeOptions {
576 number_of_outputs: 2,
577 output_channel_count: vec![1, 2],
578 ..AudioWorkletNodeOptions::default()
579 };
580 let worklet3 = AudioWorkletNode::new::<TestProcessor>(&context, options3);
581 worklet3.connect(&context.destination());
582
583 let buffer = context.start_rendering_sync();
584 assert_float_eq!(
585 buffer.get_channel_data(0)[..],
586 &[0.; 128][..],
587 abs_all <= 0.
588 );
589 }
590
591 #[test]
592 fn send_bound() {
593 #[derive(Default)]
594 struct RcProcessor {
595 _rc: std::rc::Rc<()>, }
597
598 impl AudioWorkletProcessor for RcProcessor {
599 type ProcessorOptions = ();
600
601 fn constructor(_opts: Self::ProcessorOptions) -> Self {
602 Self::default()
603 }
604
605 fn process<'a, 'b>(
606 &mut self,
607 _inputs: &'b [&'a [&'a [f32]]],
608 _outputs: &'b mut [&'a mut [&'a mut [f32]]],
609 _params: AudioParamValues<'b>,
610 _scope: &'b AudioWorkletGlobalScope,
611 ) -> bool {
612 true
613 }
614 }
615
616 let context = OfflineAudioContext::new(1, 128, 48000.);
617 let options = AudioWorkletNodeOptions::default();
618 let _worklet = AudioWorkletNode::new::<RcProcessor>(&context, options);
619 }
620}