Skip to main content

web_audio_api/context/
offline.rs

1//! The `OfflineAudioContext` type
2
3use std::sync::atomic::{AtomicU64, AtomicU8};
4use std::sync::{Arc, Mutex};
5
6use crate::buffer::AudioBuffer;
7use crate::context::{AudioContextState, BaseAudioContext, ConcreteBaseAudioContext};
8use crate::events::{
9    Event, EventDispatch, EventHandler, EventPayload, EventType, OfflineAudioCompletionEvent,
10};
11use crate::render::RenderThread;
12use crate::stats::AudioStats;
13use crate::{
14    assert_valid_buffer_length, assert_valid_number_of_channels, assert_valid_sample_rate,
15    RENDER_QUANTUM_SIZE,
16};
17
18use crate::events::EventLoop;
19use futures_channel::{mpsc, oneshot};
20use futures_util::SinkExt as _;
21
22pub(crate) type OfflineAudioContextCallback =
23    dyn FnOnce(&mut OfflineAudioContext) + Send + Sync + 'static;
24
25/// The `OfflineAudioContext` doesn't render the audio to the device hardware; instead, it generates
26/// it, as fast as it can, and outputs the result to an `AudioBuffer`.
27// the naming comes from the web audio specification
28#[allow(clippy::module_name_repetitions)]
29pub struct OfflineAudioContext {
30    /// represents the underlying `BaseAudioContext`
31    base: ConcreteBaseAudioContext,
32    /// the size of the buffer in sample-frames
33    length: usize,
34    /// actual renderer of the audio graph, can only be called once
35    renderer: Mutex<Option<OfflineAudioContextRenderer>>,
36    /// channel to notify resume actions on the rendering
37    resume_sender: mpsc::Sender<()>,
38}
39
40impl std::fmt::Debug for OfflineAudioContext {
41    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
42        f.debug_struct("OfflineAudioContext")
43            .field("length", &self.length())
44            .field("base", &self.base())
45            .finish_non_exhaustive()
46    }
47}
48
49struct OfflineAudioContextRenderer {
50    /// the rendering 'thread', fully controlled by the offline context
51    renderer: RenderThread,
52    /// sorted list of promises to resolve at certain render quanta (via `suspend`)
53    suspend_promises: Vec<(usize, oneshot::Sender<()>)>,
54    /// sorted list of callbacks to run at certain render quanta (via `suspend_sync`)
55    suspend_callbacks: Vec<(usize, Box<OfflineAudioContextCallback>)>,
56    /// channel to listen for `resume` calls on a suspended context
57    resume_receiver: mpsc::Receiver<()>,
58    /// event loop to run after each render quantum
59    event_loop: EventLoop,
60}
61
62impl BaseAudioContext for OfflineAudioContext {
63    fn base(&self) -> &ConcreteBaseAudioContext {
64        &self.base
65    }
66}
67
68impl OfflineAudioContext {
69    /// Creates an `OfflineAudioContext` instance
70    ///
71    /// # Arguments
72    ///
73    /// * `channels` - number of output channels to render
74    /// * `length` - length of the rendering audio buffer
75    /// * `sample_rate` - output sample rate
76    #[must_use]
77    #[allow(clippy::missing_panics_doc)]
78    pub fn new(number_of_channels: usize, length: usize, sample_rate: f32) -> Self {
79        assert_valid_number_of_channels(number_of_channels);
80        assert_valid_buffer_length(length);
81        assert_valid_sample_rate(sample_rate);
82
83        // communication channel to the render thread,
84        // unbounded is fine because it does not need to be realtime safe
85        let (sender, receiver) = crossbeam_channel::unbounded();
86
87        let (node_id_producer, node_id_consumer) = llq::Queue::new().split();
88        let graph = crate::render::graph::Graph::new(node_id_producer);
89        let message = crate::message::ControlMessage::Startup { graph };
90        sender.send(message).unwrap();
91
92        // track number of frames - synced from render thread to control thread
93        let frames_played = Arc::new(AtomicU64::new(0));
94        let frames_played_clone = Arc::clone(&frames_played);
95        let state = Arc::new(AtomicU8::new(AudioContextState::Suspended as u8));
96        let state_clone = Arc::clone(&state);
97
98        // Communication channel for events from the render thread to the control thread.
99        // Use an unbounded channel because we do not require real-time safety.
100        let (event_send, event_recv) = crossbeam_channel::unbounded();
101        let event_loop = EventLoop::new(event_recv);
102
103        // setup the render 'thread', which will run inside the control thread
104        let renderer = RenderThread::new(
105            sample_rate,
106            number_of_channels,
107            receiver,
108            state_clone,
109            frames_played_clone,
110            AudioStats::new(),
111            event_send.clone(),
112        );
113
114        // first, setup the base audio context
115        let base = ConcreteBaseAudioContext::new(
116            sample_rate,
117            number_of_channels,
118            state,
119            frames_played,
120            sender,
121            event_send,
122            event_loop.clone(),
123            true,
124            node_id_consumer,
125        );
126
127        let (resume_sender, resume_receiver) = mpsc::channel(0);
128
129        let renderer = OfflineAudioContextRenderer {
130            renderer,
131            suspend_promises: Vec::new(),
132            suspend_callbacks: Vec::new(),
133            resume_receiver,
134            event_loop,
135        };
136
137        Self {
138            base,
139            length,
140            renderer: Mutex::new(Some(renderer)),
141            resume_sender,
142        }
143    }
144
145    /// Given the current connections and scheduled changes, starts rendering audio.
146    ///
147    /// This function will block the current thread and returns the rendered `AudioBuffer`
148    /// synchronously.
149    ///
150    /// This method will only adhere to scheduled suspensions via [`Self::suspend_sync`] and
151    /// will ignore those provided via [`Self::suspend`].
152    ///
153    /// # Panics
154    ///
155    /// Panics if this method is called multiple times
156    #[must_use]
157    pub fn start_rendering_sync(&mut self) -> AudioBuffer {
158        let renderer = self
159            .renderer
160            .lock()
161            .unwrap()
162            .take()
163            .expect("InvalidStateError - Cannot call `startRendering` twice");
164
165        let OfflineAudioContextRenderer {
166            renderer,
167            suspend_callbacks,
168            event_loop,
169            ..
170        } = renderer;
171
172        self.base.set_state(AudioContextState::Running);
173
174        let result = renderer.render_audiobuffer_sync(self, suspend_callbacks, &event_loop);
175
176        self.base.set_state(AudioContextState::Closed);
177        let _ = self
178            .base
179            .send_event(EventDispatch::complete(result.clone()));
180
181        // spin the event loop once more to handle the statechange/complete events
182        event_loop.handle_pending_events();
183
184        result
185    }
186
187    /// Given the current connections and scheduled changes, starts rendering audio.
188    ///
189    /// Rendering is purely CPU bound and contains no `await` points, so calling this method will
190    /// block the executor until completion or until the context is suspended.
191    ///
192    /// This method will only adhere to scheduled suspensions via [`Self::suspend`] and will
193    /// ignore those provided via [`Self::suspend_sync`].
194    ///
195    /// # Panics
196    ///
197    /// Panics if this method is called multiple times.
198    pub async fn start_rendering(&self) -> AudioBuffer {
199        // We are mixing async with a std Mutex, so be sure not to `await` while the lock is held
200        let renderer = self
201            .renderer
202            .lock()
203            .unwrap()
204            .take()
205            .expect("InvalidStateError - Cannot call `startRendering` twice");
206
207        let OfflineAudioContextRenderer {
208            renderer,
209            suspend_promises,
210            resume_receiver,
211            event_loop,
212            ..
213        } = renderer;
214
215        self.base.set_state(AudioContextState::Running);
216
217        let result = renderer
218            .render_audiobuffer(self.length, suspend_promises, resume_receiver, &event_loop)
219            .await;
220
221        self.base.set_state(AudioContextState::Closed);
222        let _ = self
223            .base
224            .send_event(EventDispatch::complete(result.clone()));
225
226        // spin the event loop once more to handle the statechange/complete events
227        event_loop.handle_pending_events();
228
229        result
230    }
231
232    /// get the length of rendering audio buffer
233    // false positive: OfflineAudioContext is not const
234    #[allow(clippy::missing_const_for_fn, clippy::unused_self)]
235    #[must_use]
236    pub fn length(&self) -> usize {
237        self.length
238    }
239
240    #[track_caller]
241    fn calculate_suspend_frame(&self, suspend_time: f64) -> usize {
242        assert!(
243            suspend_time >= 0.,
244            "InvalidStateError: suspendTime cannot be negative"
245        );
246        assert!(
247            suspend_time < self.length as f64 / self.sample_rate() as f64,
248            "InvalidStateError: suspendTime cannot be greater than or equal to the total render duration"
249        );
250        (suspend_time * self.base.sample_rate() as f64 / RENDER_QUANTUM_SIZE as f64).ceil() as usize
251    }
252
253    /// Schedules a suspension of the time progression in the audio context at the specified time
254    /// and returns a promise
255    ///
256    /// The specified time is quantized and rounded up to the render quantum size.
257    ///
258    /// # Panics
259    ///
260    /// Panics if the quantized frame number
261    ///
262    /// - is negative or
263    /// - is less than or equal to the current time or
264    /// - is greater than or equal to the total render duration or
265    /// - is scheduled by another suspend for the same time
266    ///
267    /// # Example usage
268    ///
269    /// ```rust
270    /// use futures::{executor, join};
271    /// use futures::FutureExt as _;
272    /// use std::sync::Arc;
273    ///
274    /// use web_audio_api::context::BaseAudioContext;
275    /// use web_audio_api::context::OfflineAudioContext;
276    /// use web_audio_api::node::{AudioNode, AudioScheduledSourceNode};
277    ///
278    /// let context = Arc::new(OfflineAudioContext::new(1, 512, 44_100.));
279    /// let context_clone = Arc::clone(&context);
280    ///
281    /// let suspend_promise = context.suspend(128. / 44_100.).then(|_| async move {
282    ///     let mut src = context_clone.create_constant_source();
283    ///     src.connect(&context_clone.destination());
284    ///     src.start();
285    ///     context_clone.resume().await;
286    /// });
287    ///
288    /// let render_promise = context.start_rendering();
289    ///
290    /// let buffer = executor::block_on(async move { join!(suspend_promise, render_promise).1 });
291    /// assert_eq!(buffer.number_of_channels(), 1);
292    /// assert_eq!(buffer.length(), 512);
293    /// ```
294    pub async fn suspend(&self, suspend_time: f64) {
295        let quantum = self.calculate_suspend_frame(suspend_time);
296
297        let (sender, receiver) = oneshot::channel();
298
299        // We are mixing async with a std Mutex, so be sure not to `await` while the lock is held
300        {
301            let mut lock = self.renderer.lock().unwrap();
302            let renderer = lock
303                .as_mut()
304                .expect("InvalidStateError - cannot suspend when rendering has already started");
305
306            let insert_pos = renderer
307                .suspend_promises
308                .binary_search_by_key(&quantum, |&(q, _)| q)
309                .expect_err(
310                    "InvalidStateError - cannot suspend multiple times at the same render quantum",
311                );
312
313            renderer
314                .suspend_promises
315                .insert(insert_pos, (quantum, sender));
316        } // lock is dropped
317
318        receiver.await.unwrap();
319        self.base().set_state(AudioContextState::Suspended);
320    }
321
322    /// Schedules a suspension of the time progression in the audio context at the specified time
323    /// and runs a callback.
324    ///
325    /// This is a synchronous version of [`Self::suspend`] that runs the provided callback at
326    /// the `suspendTime`. The rendering resumes automatically after the callback has run, so there
327    /// is no `resume_sync` method.
328    ///
329    /// The specified time is quantized and rounded up to the render quantum size.
330    ///
331    /// # Panics
332    ///
333    /// Panics if the quantized frame number
334    ///
335    /// - is negative or
336    /// - is less than or equal to the current time or
337    /// - is greater than or equal to the total render duration or
338    /// - is scheduled by another suspend for the same time
339    ///
340    /// # Example usage
341    ///
342    /// ```rust
343    /// use web_audio_api::context::BaseAudioContext;
344    /// use web_audio_api::context::OfflineAudioContext;
345    /// use web_audio_api::node::{AudioNode, AudioScheduledSourceNode};
346    ///
347    /// let mut context = OfflineAudioContext::new(1, 512, 44_100.);
348    ///
349    /// context.suspend_sync(128. / 44_100., |context| {
350    ///     let mut src = context.create_constant_source();
351    ///     src.connect(&context.destination());
352    ///     src.start();
353    /// });
354    ///
355    /// let buffer = context.start_rendering_sync();
356    /// assert_eq!(buffer.number_of_channels(), 1);
357    /// assert_eq!(buffer.length(), 512);
358    /// ```
359    pub fn suspend_sync<F: FnOnce(&mut Self) + Send + Sync + 'static>(
360        &mut self,
361        suspend_time: f64,
362        callback: F,
363    ) {
364        let quantum = self.calculate_suspend_frame(suspend_time);
365
366        let mut lock = self.renderer.lock().unwrap();
367        let renderer = lock
368            .as_mut()
369            .expect("InvalidStateError - cannot suspend when rendering has already started");
370
371        let insert_pos = renderer
372            .suspend_callbacks
373            .binary_search_by_key(&quantum, |(q, _c)| *q)
374            .expect_err(
375                "InvalidStateError - cannot suspend multiple times at the same render quantum",
376            );
377
378        let boxed_callback = Box::new(|ctx: &mut OfflineAudioContext| {
379            ctx.base().set_state(AudioContextState::Suspended);
380            (callback)(ctx);
381            ctx.base().set_state(AudioContextState::Running);
382        });
383
384        renderer
385            .suspend_callbacks
386            .insert(insert_pos, (quantum, boxed_callback));
387    }
388
389    /// Resumes the progression of the OfflineAudioContext's currentTime when it has been suspended
390    ///
391    /// # Panics
392    ///
393    /// Panics when the context is closed or rendering has not started
394    pub async fn resume(&self) {
395        self.base().set_state(AudioContextState::Running);
396        self.resume_sender.clone().send(()).await.unwrap()
397    }
398
399    /// Register callback to run when the rendering has completed
400    ///
401    /// Only a single event handler is active at any time. Calling this method multiple times will
402    /// override the previous event handler.
403    #[allow(clippy::missing_panics_doc)]
404    pub fn set_oncomplete<F: FnOnce(OfflineAudioCompletionEvent) + Send + 'static>(
405        &self,
406        callback: F,
407    ) {
408        let callback = move |v| match v {
409            EventPayload::Complete(v) => {
410                let event = OfflineAudioCompletionEvent {
411                    rendered_buffer: v,
412                    event: Event { type_: "complete" },
413                };
414                callback(event)
415            }
416            _ => unreachable!(),
417        };
418
419        self.base()
420            .set_event_handler(EventType::Complete, EventHandler::Once(Box::new(callback)));
421    }
422
423    /// Unset the callback to run when the rendering has completed
424    pub fn clear_oncomplete(&self) {
425        self.base().clear_event_handler(EventType::Complete);
426    }
427}
428
429#[cfg(test)]
430mod tests {
431    use super::*;
432    use float_eq::assert_float_eq;
433    use std::sync::atomic::{AtomicBool, Ordering};
434
435    use crate::node::AudioNode;
436    use crate::node::AudioScheduledSourceNode;
437
438    #[test]
439    fn test_sample_rate_length() {
440        let context = OfflineAudioContext::new(1, 48000, 96000.);
441        assert_float_eq!(context.sample_rate(), 96000., abs_all <= 0.);
442        assert_eq!(context.length(), 48000);
443    }
444
445    #[test]
446    fn render_empty_graph() {
447        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
448        assert_eq!(context.state(), AudioContextState::Suspended);
449        let buffer = context.start_rendering_sync();
450
451        assert_eq!(context.length(), 555);
452
453        assert_eq!(buffer.number_of_channels(), 2);
454        assert_eq!(buffer.length(), 555);
455        assert_float_eq!(buffer.get_channel_data(0), &[0.; 555][..], abs_all <= 0.);
456        assert_float_eq!(buffer.get_channel_data(1), &[0.; 555][..], abs_all <= 0.);
457
458        assert_eq!(context.state(), AudioContextState::Closed);
459    }
460
461    #[test]
462    #[should_panic]
463    fn render_twice_panics() {
464        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
465        let _ = context.start_rendering_sync();
466        let _ = context.start_rendering_sync();
467    }
468
469    #[test]
470    fn test_suspend_sync() {
471        use crate::node::ConstantSourceNode;
472        use std::sync::OnceLock;
473
474        let len = RENDER_QUANTUM_SIZE * 4;
475        let sample_rate = 48000_f64;
476
477        let mut context = OfflineAudioContext::new(1, len, sample_rate as f32);
478        static SOURCE: OnceLock<ConstantSourceNode> = OnceLock::new();
479
480        context.suspend_sync(RENDER_QUANTUM_SIZE as f64 / sample_rate, |context| {
481            assert_eq!(context.state(), AudioContextState::Suspended);
482            let mut src = context.create_constant_source();
483            src.connect(&context.destination());
484            src.start();
485            SOURCE.set(src).unwrap();
486        });
487
488        context.suspend_sync((3 * RENDER_QUANTUM_SIZE) as f64 / sample_rate, |context| {
489            assert_eq!(context.state(), AudioContextState::Suspended);
490            SOURCE.get().unwrap().disconnect();
491        });
492
493        let output = context.start_rendering_sync();
494
495        assert_float_eq!(
496            output.get_channel_data(0)[..RENDER_QUANTUM_SIZE],
497            &[0.; RENDER_QUANTUM_SIZE][..],
498            abs_all <= 0.
499        );
500        assert_float_eq!(
501            output.get_channel_data(0)[RENDER_QUANTUM_SIZE..3 * RENDER_QUANTUM_SIZE],
502            &[1.; 2 * RENDER_QUANTUM_SIZE][..],
503            abs_all <= 0.
504        );
505        assert_float_eq!(
506            output.get_channel_data(0)[3 * RENDER_QUANTUM_SIZE..4 * RENDER_QUANTUM_SIZE],
507            &[0.; RENDER_QUANTUM_SIZE][..],
508            abs_all <= 0.
509        );
510    }
511
512    #[test]
513    fn render_suspend_resume_async() {
514        use futures::executor;
515        use futures::join;
516        use futures::FutureExt as _;
517
518        let context = Arc::new(OfflineAudioContext::new(1, 512, 44_100.));
519        let context_clone = Arc::clone(&context);
520
521        let suspend_promise = context.suspend(128. / 44_100.).then(|_| async move {
522            let mut src = context_clone.create_constant_source();
523            src.connect(&context_clone.destination());
524            src.start();
525            context_clone.resume().await;
526        });
527
528        let render_promise = context.start_rendering();
529
530        let buffer = executor::block_on(async move { join!(suspend_promise, render_promise).1 });
531
532        assert_eq!(buffer.number_of_channels(), 1);
533        assert_eq!(buffer.length(), 512);
534
535        assert_float_eq!(
536            buffer.get_channel_data(0)[..128],
537            &[0.; 128][..],
538            abs_all <= 0.
539        );
540        assert_float_eq!(
541            buffer.get_channel_data(0)[128..],
542            &[1.; 384][..],
543            abs_all <= 0.
544        );
545    }
546
547    #[test]
548    #[should_panic]
549    fn test_suspend_negative_panics() {
550        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
551        context.suspend_sync(-1.0, |_| ());
552    }
553
554    #[test]
555    #[should_panic]
556    fn test_suspend_after_duration_panics() {
557        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
558        context.suspend_sync(1.0, |_| ());
559    }
560
561    #[test]
562    #[should_panic]
563    fn test_suspend_after_render_panics() {
564        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
565        let _ = context.start_rendering_sync();
566        context.suspend_sync(0.0, |_| ());
567    }
568
569    #[test]
570    #[should_panic]
571    fn test_suspend_identical_frame_panics() {
572        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
573        context.suspend_sync(0.0, |_| ());
574        context.suspend_sync(0.0, |_| ());
575    }
576
577    #[test]
578    fn test_onstatechange() {
579        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
580
581        let changed = Arc::new(AtomicBool::new(false));
582        let changed_clone = Arc::clone(&changed);
583        context.set_onstatechange(move |_event| {
584            changed_clone.store(true, Ordering::Relaxed);
585        });
586
587        let _ = context.start_rendering_sync();
588
589        assert!(changed.load(Ordering::Relaxed));
590    }
591
592    #[test]
593    fn test_onstatechange_async() {
594        use futures::executor;
595
596        let context = OfflineAudioContext::new(2, 555, 44_100.);
597
598        let changed = Arc::new(AtomicBool::new(false));
599        let changed_clone = Arc::clone(&changed);
600        context.set_onstatechange(move |_event| {
601            changed_clone.store(true, Ordering::Relaxed);
602        });
603
604        let _ = executor::block_on(context.start_rendering());
605
606        assert!(changed.load(Ordering::Relaxed));
607    }
608
609    #[test]
610    fn test_oncomplete() {
611        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
612
613        let complete = Arc::new(AtomicBool::new(false));
614        let complete_clone = Arc::clone(&complete);
615        context.set_oncomplete(move |event| {
616            assert_eq!(event.rendered_buffer.length(), 555);
617            complete_clone.store(true, Ordering::Relaxed);
618        });
619
620        let _ = context.start_rendering_sync();
621
622        assert!(complete.load(Ordering::Relaxed));
623    }
624
625    #[test]
626    fn test_oncomplete_async() {
627        use futures::executor;
628
629        let context = OfflineAudioContext::new(2, 555, 44_100.);
630
631        let complete = Arc::new(AtomicBool::new(false));
632        let complete_clone = Arc::clone(&complete);
633        context.set_oncomplete(move |event| {
634            assert_eq!(event.rendered_buffer.length(), 555);
635            complete_clone.store(true, Ordering::Relaxed);
636        });
637
638        let _ = executor::block_on(context.start_rendering());
639
640        assert!(complete.load(Ordering::Relaxed));
641    }
642
643    fn require_send_sync<T: Send + Sync>(_: T) {}
644
645    #[test]
646    fn test_all_futures_thread_safe() {
647        let context = OfflineAudioContext::new(2, 555, 44_100.);
648
649        require_send_sync(context.start_rendering());
650        require_send_sync(context.suspend(1.));
651        require_send_sync(context.resume());
652    }
653}