web_audio_api/node/
delay.rs

1use crate::context::{AudioContextRegistration, AudioParamId, BaseAudioContext};
2use crate::param::{AudioParam, AudioParamDescriptor};
3use crate::render::{
4    AudioParamValues, AudioProcessor, AudioRenderQuantum, AudioWorkletGlobalScope,
5};
6use crate::RENDER_QUANTUM_SIZE;
7
8use super::{AudioNode, AudioNodeOptions, ChannelConfig, ChannelInterpretation};
9
10use std::cell::{Cell, RefCell, RefMut};
11use std::rc::Rc;
12
13/// Options for constructing a [`DelayNode`]
14// dictionary DelayOptions : AudioNodeOptions {
15//   double maxDelayTime = 1;
16//   double delayTime = 0;
17// };
18#[derive(Clone, Debug)]
19pub struct DelayOptions {
20    pub max_delay_time: f64,
21    pub delay_time: f64,
22    pub audio_node_options: AudioNodeOptions,
23}
24
25impl Default for DelayOptions {
26    fn default() -> Self {
27        Self {
28            max_delay_time: 1.,
29            delay_time: 0.,
30            audio_node_options: AudioNodeOptions::default(),
31        }
32    }
33}
34
35#[derive(Copy, Clone, Debug, Default)]
36struct PlaybackInfo {
37    prev_block_index: usize,
38    prev_frame_index: usize,
39    k: f32,
40}
41
42/// Node that delays the incoming audio signal by a certain amount
43///
44/// The current implementation does not allow for zero delay. The minimum delay is one render
45/// quantum (e.g. ~2.9ms at 44.1kHz).
46///
47/// - MDN documentation: <https://developer.mozilla.org/en-US/docs/Web/API/DelayNode>
48/// - specification: <https://webaudio.github.io/web-audio-api/#DelayNode>
49/// - see also: [`BaseAudioContext::create_delay`]
50///
51/// # Usage
52///
53/// ```no_run
54/// use std::fs::File;
55/// use web_audio_api::context::{BaseAudioContext, AudioContext};
56/// use web_audio_api::node::{AudioNode, AudioScheduledSourceNode};
57///
58/// // create an `AudioContext` and load a sound file
59/// let context = AudioContext::default();
60/// let file = File::open("samples/sample.wav").unwrap();
61/// let audio_buffer = context.decode_audio_data_sync(file).unwrap();
62///
63/// // create a delay of 0.5s
64/// let delay = context.create_delay(1.);
65/// delay.delay_time().set_value(0.5);
66/// delay.connect(&context.destination());
67///
68/// let mut src = context.create_buffer_source();
69/// src.set_buffer(audio_buffer);
70/// // connect to both delay and destination
71/// src.connect(&delay);
72/// src.connect(&context.destination());
73/// src.start();
74/// ```
75///
76/// # Examples
77///
78/// - `cargo run --release --example simple_delay`
79/// - `cargo run --release --example feedback_delay`
80///
81/*
82 * For simplicity in the audio graph rendering, we have made the conscious decision to deviate from
83 * the spec and split the delay node up front in a reader and writer node (instead of during the
84 * render loop - see https://webaudio.github.io/web-audio-api/#rendering-loop )
85 *
86 * This has a drawback: a delay of 0 is no longer possible. This would only be possible if the
87 * writer end is rendered before the reader end in the graph, but we cannot enforce that here.
88 * (The only way would be to connect the writer to the reader, but that would kill the
89 * cycle-breaker feature of the delay node.)
90 *
91 * @note: one possible strategy here would be to create a connection between Reader
92 * and Writer in `DelayNode::new` just to guarantee the order of the processing if
93 * the delay is not in a loop. In the graph process if the node is found in a cycle,
94 * this connection could be removed and the Reader marked as "in_cycle" so that
95 * it would clamp the min delay to quantum duration.
96 * > no need to make this cancellable, once in a cycle the node behaves like that
97 * even if the cycle is broken later (user have to know what they are doing)
98 */
99#[derive(Debug)]
100pub struct DelayNode {
101    reader_registration: AudioContextRegistration,
102    writer_registration: AudioContextRegistration,
103    delay_time: AudioParam,
104    channel_config: ChannelConfig,
105}
106
107impl AudioNode for DelayNode {
108    /*
109     * We set the writer node as 'main' registration.  This means other nodes can say
110     * `node.connect(delaynode)` and they will connect to the writer.
111     * Below, we override the (dis)connect methods as they should operate on the reader node.
112     */
113    fn registration(&self) -> &AudioContextRegistration {
114        &self.writer_registration
115    }
116
117    fn channel_config(&self) -> &ChannelConfig {
118        &self.channel_config
119    }
120
121    fn number_of_inputs(&self) -> usize {
122        1
123    }
124
125    fn number_of_outputs(&self) -> usize {
126        1
127    }
128
129    /// Connect a specific output of this AudioNode to a specific input of another node.
130    fn connect_from_output_to_input<'a>(
131        &self,
132        dest: &'a dyn AudioNode,
133        output: usize,
134        input: usize,
135    ) -> &'a dyn AudioNode {
136        assert!(
137            self.context() == dest.context(),
138            "InvalidAccessError - Attempting to connect nodes from different contexts",
139        );
140
141        assert!(
142            self.number_of_outputs() > output,
143            "IndexSizeError - output port {} is out of bounds",
144            output
145        );
146
147        assert!(
148            dest.number_of_inputs() > input,
149            "IndexSizeError - input port {} is out of bounds",
150            input
151        );
152
153        self.context().connect(
154            self.reader_registration.id(),
155            dest.registration().id(),
156            output,
157            input,
158        );
159
160        dest
161    }
162
163    /// Disconnects all outgoing connections from the AudioNode.
164    fn disconnect(&self) {
165        self.context()
166            .disconnect(self.reader_registration.id(), None, None, None);
167    }
168
169    /// Disconnects all outputs of the AudioNode that go to a specific destination AudioNode.
170    ///
171    /// # Panics
172    ///
173    /// This function will panic when
174    /// - the AudioContext of the source and destination does not match
175    /// - the source node was not connected to the destination node
176    fn disconnect_dest(&self, dest: &dyn AudioNode) {
177        assert!(
178            self.context() == dest.context(),
179            "InvalidAccessError - Attempting to disconnect nodes from different contexts"
180        );
181
182        self.context().disconnect(
183            self.reader_registration.id(),
184            None,
185            Some(dest.registration().id()),
186            None,
187        );
188    }
189
190    /// Disconnects all outgoing connections at the given output port from the AudioNode.
191    ///
192    /// # Panics
193    ///
194    /// This function will panic when
195    /// - if the output port is out of bounds for this node
196    fn disconnect_output(&self, output: usize) {
197        assert!(
198            self.number_of_outputs() > output,
199            "IndexSizeError - output port {} is out of bounds",
200            output
201        );
202
203        self.context()
204            .disconnect(self.reader_registration.id(), Some(output), None, None);
205    }
206
207    /// Disconnects a specific output of the AudioNode to a specific destination AudioNode
208    ///
209    /// # Panics
210    ///
211    /// This function will panic when
212    /// - the AudioContext of the source and destination does not match
213    /// - if the output port is out of bounds for the source node
214    /// - the source node was not connected to the destination node
215    fn disconnect_dest_from_output(&self, dest: &dyn AudioNode, output: usize) {
216        assert!(
217            self.context() == dest.context(),
218            "InvalidAccessError - Attempting to disconnect nodes from different contexts"
219        );
220
221        assert!(
222            self.number_of_outputs() > output,
223            "IndexSizeError - output port {} is out of bounds",
224            output
225        );
226
227        self.context().disconnect(
228            self.reader_registration.id(),
229            Some(output),
230            Some(dest.registration().id()),
231            None,
232        );
233    }
234
235    /// Disconnects a specific output of the AudioNode to a specific input of some destination
236    /// AudioNode
237    ///
238    /// # Panics
239    ///
240    /// This function will panic when
241    /// - the AudioContext of the source and destination does not match
242    /// - if the input port is out of bounds for the destination node
243    /// - if the output port is out of bounds for the source node
244    /// - the source node was not connected to the destination node
245    fn disconnect_dest_from_output_to_input(
246        &self,
247        dest: &dyn AudioNode,
248        output: usize,
249        input: usize,
250    ) {
251        assert!(
252            self.context() == dest.context(),
253            "InvalidAccessError - Attempting to disconnect nodes from different contexts"
254        );
255
256        assert!(
257            self.number_of_outputs() > output,
258            "IndexSizeError - output port {} is out of bounds",
259            output
260        );
261
262        assert!(
263            dest.number_of_inputs() > input,
264            "IndexSizeError - input port {} is out of bounds",
265            input
266        );
267
268        self.context().disconnect(
269            self.reader_registration.id(),
270            Some(output),
271            Some(dest.registration().id()),
272            Some(input),
273        );
274    }
275}
276
277impl DelayNode {
278    /// Create a new DelayNode
279    ///
280    /// # Panics
281    ///
282    /// Panics when the max delay value is smaller than zero or langer than three minutes.
283    pub fn new<C: BaseAudioContext>(context: &C, options: DelayOptions) -> Self {
284        let sample_rate = context.sample_rate() as f64;
285
286        // Specifies the maximum delay time in seconds allowed for the delay line.
287        // If specified, this value MUST be greater than zero and less than three
288        // minutes or a NotSupportedError exception MUST be thrown. If not specified,
289        // then 1 will be used.
290        assert!(
291            options.max_delay_time > 0. && options.max_delay_time < 180.,
292            "NotSupportedError - maxDelayTime MUST be greater than zero and less than three minutes",
293        );
294
295        // Allocate large enough ring buffer to store all delayed samples.
296        // We add one extra slot in the ring buffer so that reader never reads the
297        // same entry in history as the writer, even if `delay_time == max_delay_time`
298        // of if `max_delay_time < quantum duration`
299        let max_delay_time = options.max_delay_time;
300        let num_quanta =
301            (max_delay_time * sample_rate / RENDER_QUANTUM_SIZE as f64).ceil() as usize;
302        let ring_buffer = Vec::with_capacity(num_quanta + 1);
303
304        let shared_ring_buffer = Rc::new(RefCell::new(ring_buffer));
305        let shared_ring_buffer_clone = Rc::clone(&shared_ring_buffer);
306
307        // shared value set by the writer when it is dropped
308        let last_written_index = Rc::new(Cell::<Option<usize>>::new(None));
309        let last_written_index_clone = Rc::clone(&last_written_index);
310
311        // shared value for reader/writer to determine who was rendered first,
312        // this will indicate if the delay node acts as a cycle breaker
313        let latest_frame_written = Rc::new(Cell::new(u64::MAX));
314        let latest_frame_written_clone = Rc::clone(&latest_frame_written);
315
316        let node = context.base().register(move |writer_registration| {
317            let node = context.base().register(move |reader_registration| {
318                let param_opts = AudioParamDescriptor {
319                    name: String::new(),
320                    min_value: 0.,
321                    max_value: max_delay_time as f32,
322                    default_value: 0.,
323                    automation_rate: crate::param::AutomationRate::A,
324                };
325                let (param, proc) = context.create_audio_param(param_opts, &reader_registration);
326
327                param.set_value(options.delay_time as f32);
328
329                let reader_render = DelayReader {
330                    delay_time: proc,
331                    ring_buffer: shared_ring_buffer_clone,
332                    index: 0,
333                    last_written_index: last_written_index_clone,
334                    in_cycle: false,
335                    last_written_index_checked: None,
336                    latest_frame_written: latest_frame_written_clone,
337                };
338
339                let node = DelayNode {
340                    reader_registration,
341                    writer_registration,
342                    channel_config: options.audio_node_options.into(),
343                    delay_time: param,
344                };
345
346                (node, Box::new(reader_render))
347            });
348
349            let writer_render = DelayWriter {
350                ring_buffer: shared_ring_buffer,
351                index: 0,
352                last_written_index,
353                latest_frame_written,
354            };
355
356            (node, Box::new(writer_render))
357        });
358
359        let writer_id = node.writer_registration.id();
360        let reader_id = node.reader_registration.id();
361        // connect Writer to Reader to guarantee order of processing and enable
362        // sub-quantum delay. If found in cycle this connection will be deleted
363        // by the graph and the minimum delay clamped to one render quantum
364        context.base().mark_cycle_breaker(&node.writer_registration);
365        context.base().connect(writer_id, reader_id, 0, 0);
366
367        node
368    }
369
370    /// A-rate [`AudioParam`] representing the amount of delay (in seconds) to apply.
371    pub fn delay_time(&self) -> &AudioParam {
372        &self.delay_time
373    }
374}
375
376struct DelayWriter {
377    ring_buffer: Rc<RefCell<Vec<AudioRenderQuantum>>>,
378    index: usize,
379    latest_frame_written: Rc<Cell<u64>>,
380    last_written_index: Rc<Cell<Option<usize>>>,
381}
382
383// SAFETY:
384// AudioRenderQuantums are not Send but we promise the `ring_buffer` Vec is
385// empty before we ship it to the render thread.
386#[allow(clippy::non_send_fields_in_send_ty)]
387unsafe impl Send for DelayWriter {}
388
389trait RingBufferChecker {
390    fn ring_buffer_mut(&self) -> RefMut<'_, Vec<AudioRenderQuantum>>;
391
392    // This step guarantees the ring buffer is filled with silence buffers,
393    // This allow to simplify the code in both Writer and Reader as we know
394    // `len() == capacity()` and all inner buffers are initialized with zeros.
395    #[inline(always)]
396    fn check_ring_buffer_size(&self, render_quantum: &AudioRenderQuantum) {
397        let mut ring_buffer = self.ring_buffer_mut();
398
399        if ring_buffer.len() < ring_buffer.capacity() {
400            let len = ring_buffer.capacity();
401            let mut silence = render_quantum.clone();
402            silence.make_silent();
403
404            ring_buffer.resize(len, silence);
405        }
406    }
407}
408
409impl Drop for DelayWriter {
410    fn drop(&mut self) {
411        let last_written_index = if self.index == 0 {
412            self.ring_buffer.borrow().capacity() - 1
413        } else {
414            self.index - 1
415        };
416
417        self.last_written_index.set(Some(last_written_index));
418    }
419}
420
421impl RingBufferChecker for DelayWriter {
422    #[inline(always)]
423    fn ring_buffer_mut(&self) -> RefMut<'_, Vec<AudioRenderQuantum>> {
424        self.ring_buffer.borrow_mut()
425    }
426}
427
428impl AudioProcessor for DelayWriter {
429    fn process(
430        &mut self,
431        inputs: &[AudioRenderQuantum],
432        outputs: &mut [AudioRenderQuantum],
433        _params: AudioParamValues<'_>,
434        scope: &AudioWorkletGlobalScope,
435    ) -> bool {
436        // single input/output node
437        let input = inputs[0].clone();
438        let output = &mut outputs[0];
439
440        // We must perform this check on both Writer and Reader as the order of
441        // the rendering between them is not guaranteed.
442        self.check_ring_buffer_size(&input);
443        // `check_ring_buffer_up_down_mix` can only be done on the Writer
444        // side as Reader do not access the "real" input
445        self.check_ring_buffer_up_down_mix(&input);
446
447        // populate ring buffer
448        let mut buffer = self.ring_buffer.borrow_mut();
449        buffer[self.index] = input;
450
451        // increment cursor and last written frame
452        self.index = (self.index + 1) % buffer.capacity();
453        self.latest_frame_written.set(scope.current_frame);
454
455        // The writer end does not produce output,
456        // clear the buffer so that it can be reused
457        output.make_silent();
458
459        // let the node be decommisioned if it has no input left
460        false
461    }
462
463    fn has_side_effects(&self) -> bool {
464        true // message passing
465    }
466}
467
468impl DelayWriter {
469    #[inline(always)]
470    fn check_ring_buffer_up_down_mix(&self, input: &AudioRenderQuantum) {
471        // [spec]
472        // When the number of channels in a DelayNode's input changes (thus changing
473        // the output channel count also), there may be delayed audio samples which
474        // have not yet been output by the node and are part of its internal state.
475        // If these samples were received earlier with a different channel count,
476        // they MUST be upmixed or downmixed before being combined with newly received
477        // input so that all internal delay-line mixing takes place using the single
478        // prevailing channel layout.
479        let mut ring_buffer = self.ring_buffer_mut();
480        let buffer_number_of_channels = ring_buffer[0].number_of_channels();
481        let input_number_of_channels = input.number_of_channels();
482
483        if buffer_number_of_channels != input_number_of_channels {
484            for render_quantum in ring_buffer.iter_mut() {
485                render_quantum.mix(input_number_of_channels, ChannelInterpretation::Speakers);
486            }
487        }
488    }
489}
490
491struct DelayReader {
492    delay_time: AudioParamId,
493    ring_buffer: Rc<RefCell<Vec<AudioRenderQuantum>>>,
494    index: usize,
495    latest_frame_written: Rc<Cell<u64>>,
496    in_cycle: bool,
497    last_written_index: Rc<Cell<Option<usize>>>,
498    // local copy of shared `last_written_index` so as to avoid render ordering issues
499    last_written_index_checked: Option<usize>,
500}
501
502// SAFETY:
503// AudioRenderQuantums are not Send but we promise the `ring_buffer` Vec is
504// empty before we ship it to the render thread.
505#[allow(clippy::non_send_fields_in_send_ty)]
506unsafe impl Send for DelayReader {}
507
508impl RingBufferChecker for DelayReader {
509    #[inline(always)]
510    fn ring_buffer_mut(&self) -> RefMut<'_, Vec<AudioRenderQuantum>> {
511        self.ring_buffer.borrow_mut()
512    }
513}
514
515impl AudioProcessor for DelayReader {
516    fn process(
517        &mut self,
518        _inputs: &[AudioRenderQuantum], // cannot be used
519        outputs: &mut [AudioRenderQuantum],
520        params: AudioParamValues<'_>,
521        scope: &AudioWorkletGlobalScope,
522    ) -> bool {
523        // single input/output node
524        let output = &mut outputs[0];
525        // We must perform the checks (buffer size and up/down mix) on both Writer
526        // and Reader as the order of processing between them is not guaranteed.
527        self.check_ring_buffer_size(output);
528
529        let ring_buffer = self.ring_buffer.borrow();
530
531        // we need to rely on ring buffer to know the actual number of output channels
532        let number_of_channels = ring_buffer[0].number_of_channels();
533        output.set_number_of_channels(number_of_channels);
534
535        if !self.in_cycle {
536            // check the latest written frame by the delay writer
537            let latest_frame_written = self.latest_frame_written.get();
538            // if the delay writer has not rendered before us, the cycle breaker has been applied
539            self.in_cycle = latest_frame_written != scope.current_frame;
540            // once we store in_cycle = true, we do not want to go back to false
541            // https://github.com/orottier/web-audio-api-rs/pull/198#discussion_r945326200
542        }
543
544        // compute all playback infos for this block
545        let delay = params.get(&self.delay_time);
546        let sample_rate = scope.sample_rate as f64;
547        let dt = 1. / sample_rate;
548        let quantum_duration = RENDER_QUANTUM_SIZE as f64 * dt;
549        let ring_size = ring_buffer.len() as i32;
550        let ring_index = self.index as i32;
551        let mut playback_infos = [PlaybackInfo::default(); RENDER_QUANTUM_SIZE];
552
553        if delay.len() == 1 {
554            playback_infos[0] = Self::get_playback_infos(
555                f64::from(delay[0]),
556                self.in_cycle,
557                0.,
558                quantum_duration,
559                sample_rate,
560                ring_size,
561                ring_index,
562            );
563
564            for i in 1..RENDER_QUANTUM_SIZE {
565                let PlaybackInfo {
566                    prev_block_index,
567                    prev_frame_index,
568                    k,
569                } = playback_infos[i - 1];
570
571                let mut prev_block_index = prev_block_index;
572                let mut prev_frame_index = prev_frame_index + 1;
573
574                if prev_frame_index >= RENDER_QUANTUM_SIZE {
575                    prev_block_index = (prev_block_index + 1) % ring_buffer.len();
576                    prev_frame_index = 0;
577                }
578
579                playback_infos[i] = PlaybackInfo {
580                    prev_block_index,
581                    prev_frame_index,
582                    k,
583                };
584            }
585        } else {
586            delay
587                .iter()
588                .zip(playback_infos.iter_mut())
589                .enumerate()
590                .for_each(|(index, (&d, infos))| {
591                    *infos = Self::get_playback_infos(
592                        f64::from(d),
593                        self.in_cycle,
594                        index as f64,
595                        quantum_duration,
596                        sample_rate,
597                        ring_size,
598                        ring_index,
599                    );
600                });
601        }
602
603        // [spec] A DelayNode in a cycle is actively processing only when the absolute
604        // value of any output sample for the current render quantum is greater
605        // than or equal to 2^−126 (smallest f32 value).
606        // @note: we use the same strategy even if not in a cycle
607        let mut is_actively_processing = false;
608
609        // render channels aligned
610        for (channel_number, output_channel) in output.channels_mut().iter_mut().enumerate() {
611            // store channel data locally and update pointer only when needed
612            let mut block_index = playback_infos[0].prev_block_index;
613            let mut channel_data = ring_buffer[block_index].channel_data(channel_number);
614
615            output_channel
616                .iter_mut()
617                .zip(playback_infos.iter_mut())
618                .for_each(|(o, infos)| {
619                    let PlaybackInfo {
620                        prev_block_index,
621                        prev_frame_index,
622                        k,
623                    } = *infos;
624
625                    // find next sample address
626                    let mut next_block_index = prev_block_index;
627                    let mut next_frame_index = prev_frame_index + 1;
628
629                    if next_frame_index >= RENDER_QUANTUM_SIZE {
630                        next_block_index = (next_block_index + 1) % ring_buffer.len();
631                        next_frame_index = 0;
632                    }
633
634                    // update pointer to channel_data if needed
635                    // @note: most of the time the step is not necessary but can
636                    // be in case of an automotation with increasing delay time
637                    if block_index != prev_block_index {
638                        block_index = prev_block_index;
639                        channel_data = ring_buffer[block_index].channel_data(channel_number);
640                    }
641
642                    let prev_sample = channel_data[prev_frame_index];
643
644                    // update pointer to channel_data if needed
645                    if block_index != next_block_index {
646                        block_index = next_block_index;
647                        channel_data = ring_buffer[block_index].channel_data(channel_number);
648                    }
649
650                    let next_sample = channel_data[next_frame_index];
651
652                    let value = (1. - k).mul_add(prev_sample, k * next_sample);
653
654                    if value.is_normal() {
655                        is_actively_processing = true;
656                    }
657
658                    *o = value;
659                });
660        }
661
662        if !is_actively_processing {
663            output.make_silent();
664        }
665
666        if matches!(self.last_written_index_checked, Some(index) if index == self.index) {
667            return false;
668        }
669
670        // check if the writer has been decommissioned
671        // we need this local copy because if the writer has been processed
672        // before the reader, the direct check against `self.last_written_index`
673        // would be true earlier than we want
674        let last_written_index = self.last_written_index.get();
675
676        if last_written_index.is_some() && self.last_written_index_checked.is_none() {
677            self.last_written_index_checked = last_written_index;
678        }
679        // increment ring buffer cursor
680        self.index = (self.index + 1) % ring_buffer.capacity();
681
682        true
683    }
684}
685
686impl DelayReader {
687    #[inline(always)]
688    fn get_playback_infos(
689        delay: f64,
690        in_cycle: bool,
691        sample_index: f64,
692        quantum_duration: f64,
693        sample_rate: f64,
694        ring_size: i32,
695        ring_index: i32,
696    ) -> PlaybackInfo {
697        // param is already clamped to max_delay_time internally, so it is
698        // safe to only check lower boundary
699        let clamped_delay = if in_cycle {
700            delay.max(quantum_duration)
701        } else {
702            delay
703        };
704        let num_samples = clamped_delay * sample_rate;
705        // negative position of the playhead relative to this block start
706        let position = sample_index - num_samples;
707        let position_floored = position.floor();
708        // find address of the frame in the ring buffer just before `position`
709        let num_frames = RENDER_QUANTUM_SIZE as i32;
710
711        // offset of the block in which the target sample is recorded
712        // we need to be `float` here so that `floor()` behaves as expected
713        let block_offset = (position_floored / num_frames as f64).floor();
714        // index of the block in which the target sample is recorded
715        let mut prev_block_index = ring_index + block_offset as i32;
716        // unroll ring buffer is needed
717        if prev_block_index < 0 {
718            prev_block_index += ring_size;
719        }
720
721        // find frame index in the target block
722        let mut frame_offset = position_floored as i32 % num_frames;
723        // handle special 0 case
724        if frame_offset == 0 {
725            frame_offset = -num_frames;
726        }
727
728        let prev_frame_index = if frame_offset <= 0 {
729            num_frames + frame_offset
730        } else {
731            // sub-quantum delay
732            frame_offset
733        };
734
735        // as position is negative k will be what we expect
736        let k = (position - position_floored) as f32;
737
738        PlaybackInfo {
739            prev_block_index: prev_block_index as usize,
740            prev_frame_index: prev_frame_index as usize,
741            k,
742        }
743    }
744}
745
746#[cfg(test)]
747mod tests {
748    use float_eq::assert_float_eq;
749
750    use crate::context::OfflineAudioContext;
751    use crate::node::AudioScheduledSourceNode;
752
753    use super::*;
754
755    #[test]
756    fn test_audioparam_value_applies_immediately() {
757        let context = OfflineAudioContext::new(1, 128, 48_000.);
758        let options = DelayOptions {
759            delay_time: 0.12,
760            ..Default::default()
761        };
762        let src = DelayNode::new(&context, options);
763        assert_float_eq!(src.delay_time.value(), 0.12, abs_all <= 0.);
764    }
765
766    #[test]
767    fn test_sample_accurate() {
768        for delay_in_samples in [128., 131., 197.].iter() {
769            let sample_rate = 48_000.;
770            let mut context = OfflineAudioContext::new(1, 256, sample_rate);
771
772            let delay = context.create_delay(2.);
773            delay.delay_time.set_value(delay_in_samples / sample_rate);
774            delay.connect(&context.destination());
775
776            let mut dirac = context.create_buffer(1, 1, sample_rate);
777            dirac.copy_to_channel(&[1.], 0);
778
779            let mut src = context.create_buffer_source();
780            src.connect(&delay);
781            src.set_buffer(dirac);
782            src.start_at(0.);
783
784            let result = context.start_rendering_sync();
785            let channel = result.get_channel_data(0);
786
787            let mut expected = vec![0.; 256];
788            expected[*delay_in_samples as usize] = 1.;
789
790            assert_float_eq!(channel[..], expected[..], abs_all <= 0.00001);
791        }
792    }
793
794    #[test]
795    fn test_sub_sample_accurate_1() {
796        let delay_in_samples = 128.5;
797        let sample_rate = 48_000.;
798        let mut context = OfflineAudioContext::new(1, 256, sample_rate);
799
800        let delay = context.create_delay(2.);
801        delay.delay_time.set_value(delay_in_samples / sample_rate);
802        delay.connect(&context.destination());
803
804        let mut dirac = context.create_buffer(1, 1, sample_rate);
805        dirac.copy_to_channel(&[1.], 0);
806
807        let mut src = context.create_buffer_source();
808        src.connect(&delay);
809        src.set_buffer(dirac);
810        src.start_at(0.);
811
812        let result = context.start_rendering_sync();
813        let channel = result.get_channel_data(0);
814
815        let mut expected = vec![0.; 256];
816        expected[128] = 0.5;
817        expected[129] = 0.5;
818
819        assert_float_eq!(channel[..], expected[..], abs_all <= 0.00001);
820    }
821
822    #[test]
823    fn test_sub_sample_accurate_2() {
824        let delay_in_samples = 128.8;
825        let sample_rate = 48_000.;
826        let mut context = OfflineAudioContext::new(1, 256, sample_rate);
827
828        let delay = context.create_delay(2.);
829        delay.delay_time.set_value(delay_in_samples / sample_rate);
830        delay.connect(&context.destination());
831
832        let mut dirac = context.create_buffer(1, 1, sample_rate);
833        dirac.copy_to_channel(&[1.], 0);
834
835        let mut src = context.create_buffer_source();
836        src.connect(&delay);
837        src.set_buffer(dirac);
838        src.start_at(0.);
839
840        let result = context.start_rendering_sync();
841        let channel = result.get_channel_data(0);
842
843        let mut expected = vec![0.; 256];
844        expected[128] = 0.2;
845        expected[129] = 0.8;
846
847        assert_float_eq!(channel[..], expected[..], abs_all <= 1e-5);
848    }
849
850    #[test]
851    fn test_multichannel() {
852        let delay_in_samples = 128.;
853        let sample_rate = 48_000.;
854        let mut context = OfflineAudioContext::new(2, 2 * 128, sample_rate);
855
856        let delay = context.create_delay(2.);
857        delay.delay_time.set_value(delay_in_samples / sample_rate);
858        delay.connect(&context.destination());
859
860        let mut two_chan_dirac = context.create_buffer(2, 256, sample_rate);
861        // different channels
862        two_chan_dirac.copy_to_channel(&[1.], 0);
863        two_chan_dirac.copy_to_channel(&[0., 1.], 1);
864
865        let mut src = context.create_buffer_source();
866        src.connect(&delay);
867        src.set_buffer(two_chan_dirac);
868        src.start_at(0.);
869
870        let result = context.start_rendering_sync();
871
872        let channel_left = result.get_channel_data(0);
873        let mut expected_left = vec![0.; 256];
874        expected_left[128] = 1.;
875        assert_float_eq!(channel_left[..], expected_left[..], abs_all <= 1e-5);
876
877        let channel_right = result.get_channel_data(1);
878        let mut expected_right = vec![0.; 256];
879        expected_right[128 + 1] = 1.;
880        assert_float_eq!(channel_right[..], expected_right[..], abs_all <= 1e-5);
881    }
882
883    #[test]
884    fn test_input_number_of_channels_change() {
885        let delay_in_samples = 128.;
886        let sample_rate = 48_000.;
887        let mut context = OfflineAudioContext::new(2, 3 * 128, sample_rate);
888
889        let delay = context.create_delay(2.);
890        delay.delay_time.set_value(delay_in_samples / sample_rate);
891        delay.connect(&context.destination());
892
893        let mut one_chan_dirac = context.create_buffer(1, 128, sample_rate);
894        one_chan_dirac.copy_to_channel(&[1.], 0);
895
896        let mut src1 = context.create_buffer_source();
897        src1.connect(&delay);
898        src1.set_buffer(one_chan_dirac);
899        src1.start_at(0.);
900
901        let mut two_chan_dirac = context.create_buffer(2, 256, sample_rate);
902        // the two channels are different
903        two_chan_dirac.copy_to_channel(&[1.], 0);
904        two_chan_dirac.copy_to_channel(&[0., 1.], 1);
905        // start second buffer at next block
906        let mut src2 = context.create_buffer_source();
907        src2.connect(&delay);
908        src2.set_buffer(two_chan_dirac);
909        src2.start_at(delay_in_samples as f64 / sample_rate as f64);
910
911        let result = context.start_rendering_sync();
912
913        let channel_left = result.get_channel_data(0);
914        let mut expected_left = vec![0.; 3 * 128];
915        expected_left[128] = 1.;
916        expected_left[256] = 1.;
917        assert_float_eq!(channel_left[..], expected_left[..], abs_all <= 1e-5);
918
919        let channel_right = result.get_channel_data(1);
920        let mut expected_right = vec![0.; 3 * 128];
921        expected_right[128] = 1.;
922        expected_right[256 + 1] = 1.;
923        assert_float_eq!(channel_right[..], expected_right[..], abs_all <= 1e-5);
924    }
925
926    #[test]
927    fn test_node_stays_alive_long_enough() {
928        // make sure there are no hidden order problem
929        for _ in 0..10 {
930            let sample_rate = 48_000.;
931            let mut context = OfflineAudioContext::new(1, 5 * 128, sample_rate);
932
933            // Set up a source that starts only after 5 render quanta.
934            // The delay writer and reader should stay alive in this period of silence.
935            // We set up the nodes in a separate block {} so they are dropped in the control thread,
936            // otherwise the lifecycle rules do not kick in
937            {
938                let delay = context.create_delay(1.);
939                delay.delay_time.set_value(128. / sample_rate);
940                delay.connect(&context.destination());
941
942                let mut dirac = context.create_buffer(1, 1, sample_rate);
943                dirac.copy_to_channel(&[1.], 0);
944
945                let mut src = context.create_buffer_source();
946                src.connect(&delay);
947                src.set_buffer(dirac);
948                // 3rd block - play buffer
949                // 4th block - play silence and dropped in render thread
950                src.start_at(128. * 3. / sample_rate as f64);
951            } // src and delay nodes are dropped
952
953            let result = context.start_rendering_sync();
954            let mut expected = vec![0.; 5 * 128];
955            // source starts after 2 * 128 samples, then is delayed another 128
956            expected[4 * 128] = 1.;
957
958            assert_float_eq!(result.get_channel_data(0), &expected[..], abs_all <= 1e-5);
959        }
960    }
961
962    #[test]
963    fn test_subquantum_delay() {
964        for i in 0..128 {
965            let sample_rate = 48_000.;
966            let mut context = OfflineAudioContext::new(1, 128, sample_rate);
967
968            let delay = context.create_delay(1.);
969            delay.delay_time.set_value(i as f32 / sample_rate);
970            delay.connect(&context.destination());
971
972            let mut dirac = context.create_buffer(1, 1, sample_rate);
973            dirac.copy_to_channel(&[1.], 0);
974
975            let mut src = context.create_buffer_source();
976            src.connect(&delay);
977            src.set_buffer(dirac);
978            src.start_at(0.);
979
980            let result = context.start_rendering_sync();
981            let channel = result.get_channel_data(0);
982
983            let mut expected = vec![0.; 128];
984            expected[i] = 1.;
985
986            assert_float_eq!(channel[..], expected[..], abs_all <= 1e-5);
987        }
988    }
989
990    #[test]
991    fn test_min_delay_when_in_loop() {
992        let sample_rate = 48_000.;
993        let mut context = OfflineAudioContext::new(1, 256, sample_rate);
994
995        let delay = context.create_delay(1.);
996        delay.delay_time.set_value(1. / sample_rate);
997        delay.connect(&context.destination());
998        // create a loop with a gain at 0 to avoid feedback
999        // therefore delay_time will be clamped to 128 * sample_rate by the Reader
1000        let gain = context.create_gain();
1001        gain.gain().set_value(0.);
1002        delay.connect(&gain);
1003        gain.connect(&delay);
1004
1005        let mut dirac = context.create_buffer(1, 1, sample_rate);
1006        dirac.copy_to_channel(&[1.], 0);
1007
1008        let mut src = context.create_buffer_source();
1009        src.connect(&delay);
1010        src.set_buffer(dirac);
1011        src.start_at(0.);
1012
1013        let result = context.start_rendering_sync();
1014        let channel = result.get_channel_data(0);
1015
1016        let mut expected = vec![0.; 256];
1017        expected[128] = 1.;
1018
1019        assert_float_eq!(channel[..], expected[..], abs_all <= 0.);
1020    }
1021
1022    // reproduce wpt tests from
1023    // - the-delaynode-interface/delaynode-max-default-delay.html
1024    // - the-delaynode-interface/delaynode-max-nondefault-delay.html
1025    #[test]
1026    fn test_max_delay() {
1027        use std::f32::consts::PI;
1028
1029        for &delay_time_seconds in [1., 1.5].iter() {
1030            let sample_rate = 44100.0;
1031            let render_length = 4 * sample_rate as usize;
1032
1033            let mut context = OfflineAudioContext::new(1, render_length, sample_rate);
1034
1035            // create 2 seconds tone buffer at 20Hz
1036            let tone_frequency = 20.;
1037            let tone_length_seconds = 2.;
1038            let tone_length = tone_length_seconds as usize * sample_rate as usize;
1039            let mut tone_buffer = context.create_buffer(1, tone_length, sample_rate);
1040            let tone_data = tone_buffer.get_channel_data_mut(0);
1041
1042            for (i, s) in tone_data.iter_mut().enumerate() {
1043                *s = (tone_frequency * 2.0 * PI * i as f32 / sample_rate).sin();
1044            }
1045
1046            let mut buffer_source = context.create_buffer_source();
1047            buffer_source.set_buffer(tone_buffer.clone());
1048
1049            let delay = context.create_delay(delay_time_seconds); // max delay defaults to 1 second
1050            delay.delay_time.set_value(delay_time_seconds as f32);
1051
1052            buffer_source.connect(&delay);
1053            delay.connect(&context.destination());
1054            buffer_source.start_at(0.);
1055
1056            let output = context.start_rendering_sync();
1057            let source = tone_buffer.get_channel_data(0);
1058            let rendered = output.get_channel_data(0);
1059
1060            let delay_time_frames = (delay_time_seconds * sample_rate as f64) as usize;
1061            let tone_length_frames = (tone_length_seconds * sample_rate as f64) as usize;
1062
1063            for (i, s) in rendered.iter().enumerate() {
1064                if i < delay_time_frames {
1065                    assert_eq!(*s, 0.);
1066                } else if i >= delay_time_frames && i < delay_time_frames + tone_length_frames {
1067                    let j = i - delay_time_frames;
1068                    assert_eq!(*s, source[j]);
1069                } else {
1070                    assert_eq!(*s, 0.);
1071                }
1072            }
1073        }
1074    }
1075
1076    #[test]
1077    fn test_max_delay_smaller_than_quantum_size() {
1078        // regression test that even if the declared max_delay_time is smaller than
1079        // a quantum duration, the node internally clamps it to quantum duration so
1080        // that everything works even if order of processing is not guaranteed
1081        // (i.e. when delay is in a loop)
1082        for _ in 0..10 {
1083            let sample_rate = 48_000.;
1084            let mut context = OfflineAudioContext::new(1, 256, sample_rate);
1085
1086            // this will be internally clamped to 128 * sample_rate
1087            let delay = context.create_delay((64. / sample_rate).into());
1088            // this will be clamped to 128 * sample_rate by the Reader
1089            delay.delay_time.set_value(64. / sample_rate);
1090            delay.connect(&context.destination());
1091
1092            // create a loop with a gain at 0 to avoid feedback
1093            let gain = context.create_gain();
1094            gain.gain().set_value(0.);
1095            delay.connect(&gain);
1096            gain.connect(&delay);
1097
1098            let mut dirac = context.create_buffer(1, 1, sample_rate);
1099            dirac.copy_to_channel(&[1.], 0);
1100
1101            let mut src = context.create_buffer_source();
1102            src.connect(&delay);
1103            src.set_buffer(dirac);
1104            src.start_at(0.);
1105
1106            let result = context.start_rendering_sync();
1107            let channel = result.get_channel_data(0);
1108
1109            let mut expected = vec![0.; 256];
1110            expected[128] = 1.;
1111
1112            assert_float_eq!(channel[..], expected[..], abs_all <= 0.);
1113        }
1114    }
1115
1116    // test_max_delay_multiple_of_quantum_size_x
1117    // are regression test that delay node has always enough internal buffer size
1118    // when max_delay is a multiple of quantum size and delay == max_delay.
1119    // This bug only occurs when the Writer is called before than the Reader,
1120    // which is the case when not in a loop
1121    #[test]
1122    fn test_max_delay_multiple_of_quantum_size_1() {
1123        // set delay and max delay time exactly 1 render quantum
1124        let sample_rate = 48_000.;
1125        let mut context = OfflineAudioContext::new(1, 256, sample_rate);
1126
1127        let max_delay = 128. / sample_rate;
1128        let delay = context.create_delay(max_delay.into());
1129        delay.delay_time.set_value(max_delay);
1130        delay.connect(&context.destination());
1131
1132        let mut dirac = context.create_buffer(1, 1, sample_rate);
1133        dirac.copy_to_channel(&[1.], 0);
1134
1135        let mut src = context.create_buffer_source();
1136        src.connect(&delay);
1137        src.set_buffer(dirac);
1138        src.start_at(0.);
1139
1140        let result = context.start_rendering_sync();
1141        let channel = result.get_channel_data(0);
1142
1143        let mut expected = vec![0.; 256];
1144        expected[128] = 1.;
1145
1146        assert_float_eq!(channel[..], expected[..], abs_all <= 1e-5);
1147    }
1148
1149    #[test]
1150    fn test_max_delay_multiple_of_quantum_size_2() {
1151        // set delay and max delay time exactly 2 render quantum
1152        let sample_rate = 48_000.;
1153        let mut context = OfflineAudioContext::new(1, 3 * 128, sample_rate);
1154
1155        let max_delay = 128. * 2. / sample_rate;
1156        let delay = context.create_delay(max_delay.into());
1157        delay.delay_time.set_value(max_delay);
1158        delay.connect(&context.destination());
1159
1160        let mut dirac = context.create_buffer(1, 1, sample_rate);
1161        dirac.copy_to_channel(&[1.], 0);
1162
1163        let mut src = context.create_buffer_source();
1164        src.connect(&delay);
1165        src.set_buffer(dirac);
1166        src.start_at(0.);
1167
1168        let result = context.start_rendering_sync();
1169        let channel = result.get_channel_data(0);
1170
1171        let mut expected = vec![0.; 3 * 128];
1172        expected[256] = 1.;
1173
1174        assert_float_eq!(channel[..], expected[..], abs_all <= 1e-5);
1175    }
1176
1177    #[test]
1178    fn test_subquantum_delay_dynamic_lifetime() {
1179        let sample_rate = 48_000.;
1180        let mut context = OfflineAudioContext::new(1, 3 * 128, sample_rate);
1181
1182        // Setup a source that emits for 120 frames, so it deallocates after the first render
1183        // quantum. Delay the signal with 64 frames. Deallocation of the delay writer might trick
1184        // the delay reader into thinking it is part of a cycle, and would clamp the delay to a
1185        // full render quantum.
1186        {
1187            let delay = context.create_delay(1.);
1188            delay.delay_time.set_value(64_f32 / sample_rate);
1189            delay.connect(&context.destination());
1190
1191            // emit 120 samples
1192            let mut src = context.create_constant_source();
1193            src.connect(&delay);
1194            src.start_at(0.);
1195            src.stop_at(120. / sample_rate as f64);
1196        } // drop all nodes, trigger dynamic lifetimes
1197
1198        let result = context.start_rendering_sync();
1199        let channel = result.get_channel_data(0);
1200
1201        let mut expected = vec![0.; 3 * 128];
1202        expected[64..64 + 120].fill(1.);
1203
1204        assert_float_eq!(channel[..], expected[..], abs_all <= 1e-5);
1205    }
1206}