web_audio_api/context/
offline.rs

1//! The `OfflineAudioContext` type
2
3use std::sync::atomic::{AtomicU64, AtomicU8};
4use std::sync::{Arc, Mutex};
5
6use crate::buffer::AudioBuffer;
7use crate::context::{AudioContextState, BaseAudioContext, ConcreteBaseAudioContext};
8use crate::events::{
9    Event, EventDispatch, EventHandler, EventPayload, EventType, OfflineAudioCompletionEvent,
10};
11use crate::render::RenderThread;
12use crate::{
13    assert_valid_buffer_length, assert_valid_number_of_channels, assert_valid_sample_rate,
14    RENDER_QUANTUM_SIZE,
15};
16
17use crate::events::EventLoop;
18use futures_channel::{mpsc, oneshot};
19use futures_util::SinkExt as _;
20
21pub(crate) type OfflineAudioContextCallback =
22    dyn FnOnce(&mut OfflineAudioContext) + Send + Sync + 'static;
23
24/// The `OfflineAudioContext` doesn't render the audio to the device hardware; instead, it generates
25/// it, as fast as it can, and outputs the result to an `AudioBuffer`.
26// the naming comes from the web audio specification
27#[allow(clippy::module_name_repetitions)]
28pub struct OfflineAudioContext {
29    /// represents the underlying `BaseAudioContext`
30    base: ConcreteBaseAudioContext,
31    /// the size of the buffer in sample-frames
32    length: usize,
33    /// actual renderer of the audio graph, can only be called once
34    renderer: Mutex<Option<OfflineAudioContextRenderer>>,
35    /// channel to notify resume actions on the rendering
36    resume_sender: mpsc::Sender<()>,
37}
38
39impl std::fmt::Debug for OfflineAudioContext {
40    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
41        f.debug_struct("OfflineAudioContext")
42            .field("length", &self.length())
43            .field("base", &self.base())
44            .finish_non_exhaustive()
45    }
46}
47
48struct OfflineAudioContextRenderer {
49    /// the rendering 'thread', fully controlled by the offline context
50    renderer: RenderThread,
51    /// sorted list of promises to resolve at certain render quanta (via `suspend`)
52    suspend_promises: Vec<(usize, oneshot::Sender<()>)>,
53    /// sorted list of callbacks to run at certain render quanta (via `suspend_sync`)
54    suspend_callbacks: Vec<(usize, Box<OfflineAudioContextCallback>)>,
55    /// channel to listen for `resume` calls on a suspended context
56    resume_receiver: mpsc::Receiver<()>,
57    /// event loop to run after each render quantum
58    event_loop: EventLoop,
59}
60
61impl BaseAudioContext for OfflineAudioContext {
62    fn base(&self) -> &ConcreteBaseAudioContext {
63        &self.base
64    }
65}
66
67impl OfflineAudioContext {
68    /// Creates an `OfflineAudioContext` instance
69    ///
70    /// # Arguments
71    ///
72    /// * `channels` - number of output channels to render
73    /// * `length` - length of the rendering audio buffer
74    /// * `sample_rate` - output sample rate
75    #[must_use]
76    #[allow(clippy::missing_panics_doc)]
77    pub fn new(number_of_channels: usize, length: usize, sample_rate: f32) -> Self {
78        assert_valid_number_of_channels(number_of_channels);
79        assert_valid_buffer_length(length);
80        assert_valid_sample_rate(sample_rate);
81
82        // communication channel to the render thread,
83        // unbounded is fine because it does not need to be realtime safe
84        let (sender, receiver) = crossbeam_channel::unbounded();
85
86        let (node_id_producer, node_id_consumer) = llq::Queue::new().split();
87        let graph = crate::render::graph::Graph::new(node_id_producer);
88        let message = crate::message::ControlMessage::Startup { graph };
89        sender.send(message).unwrap();
90
91        // track number of frames - synced from render thread to control thread
92        let frames_played = Arc::new(AtomicU64::new(0));
93        let frames_played_clone = Arc::clone(&frames_played);
94        let state = Arc::new(AtomicU8::new(AudioContextState::Suspended as u8));
95        let state_clone = Arc::clone(&state);
96
97        // Communication channel for events from the render thread to the control thread.
98        // Use an unbounded channel because we do not require real-time safety.
99        let (event_send, event_recv) = crossbeam_channel::unbounded();
100        let event_loop = EventLoop::new(event_recv);
101
102        // setup the render 'thread', which will run inside the control thread
103        let renderer = RenderThread::new(
104            sample_rate,
105            number_of_channels,
106            receiver,
107            state_clone,
108            frames_played_clone,
109            event_send.clone(),
110        );
111
112        // first, setup the base audio context
113        let base = ConcreteBaseAudioContext::new(
114            sample_rate,
115            number_of_channels,
116            state,
117            frames_played,
118            sender,
119            event_send,
120            event_loop.clone(),
121            true,
122            node_id_consumer,
123        );
124
125        let (resume_sender, resume_receiver) = mpsc::channel(0);
126
127        let renderer = OfflineAudioContextRenderer {
128            renderer,
129            suspend_promises: Vec::new(),
130            suspend_callbacks: Vec::new(),
131            resume_receiver,
132            event_loop,
133        };
134
135        Self {
136            base,
137            length,
138            renderer: Mutex::new(Some(renderer)),
139            resume_sender,
140        }
141    }
142
143    /// Given the current connections and scheduled changes, starts rendering audio.
144    ///
145    /// This function will block the current thread and returns the rendered `AudioBuffer`
146    /// synchronously.
147    ///
148    /// This method will only adhere to scheduled suspensions via [`Self::suspend_sync`] and
149    /// will ignore those provided via [`Self::suspend`].
150    ///
151    /// # Panics
152    ///
153    /// Panics if this method is called multiple times
154    #[must_use]
155    pub fn start_rendering_sync(&mut self) -> AudioBuffer {
156        let renderer = self
157            .renderer
158            .lock()
159            .unwrap()
160            .take()
161            .expect("InvalidStateError - Cannot call `startRendering` twice");
162
163        let OfflineAudioContextRenderer {
164            renderer,
165            suspend_callbacks,
166            event_loop,
167            ..
168        } = renderer;
169
170        self.base.set_state(AudioContextState::Running);
171
172        let result = renderer.render_audiobuffer_sync(self, suspend_callbacks, &event_loop);
173
174        self.base.set_state(AudioContextState::Closed);
175        let _ = self
176            .base
177            .send_event(EventDispatch::complete(result.clone()));
178
179        // spin the event loop once more to handle the statechange/complete events
180        event_loop.handle_pending_events();
181
182        result
183    }
184
185    /// Given the current connections and scheduled changes, starts rendering audio.
186    ///
187    /// Rendering is purely CPU bound and contains no `await` points, so calling this method will
188    /// block the executor until completion or until the context is suspended.
189    ///
190    /// This method will only adhere to scheduled suspensions via [`Self::suspend`] and will
191    /// ignore those provided via [`Self::suspend_sync`].
192    ///
193    /// # Panics
194    ///
195    /// Panics if this method is called multiple times.
196    pub async fn start_rendering(&self) -> AudioBuffer {
197        // We are mixing async with a std Mutex, so be sure not to `await` while the lock is held
198        let renderer = self
199            .renderer
200            .lock()
201            .unwrap()
202            .take()
203            .expect("InvalidStateError - Cannot call `startRendering` twice");
204
205        let OfflineAudioContextRenderer {
206            renderer,
207            suspend_promises,
208            resume_receiver,
209            event_loop,
210            ..
211        } = renderer;
212
213        self.base.set_state(AudioContextState::Running);
214
215        let result = renderer
216            .render_audiobuffer(self.length, suspend_promises, resume_receiver, &event_loop)
217            .await;
218
219        self.base.set_state(AudioContextState::Closed);
220        let _ = self
221            .base
222            .send_event(EventDispatch::complete(result.clone()));
223
224        // spin the event loop once more to handle the statechange/complete events
225        event_loop.handle_pending_events();
226
227        result
228    }
229
230    /// get the length of rendering audio buffer
231    // false positive: OfflineAudioContext is not const
232    #[allow(clippy::missing_const_for_fn, clippy::unused_self)]
233    #[must_use]
234    pub fn length(&self) -> usize {
235        self.length
236    }
237
238    #[track_caller]
239    fn calculate_suspend_frame(&self, suspend_time: f64) -> usize {
240        assert!(
241            suspend_time >= 0.,
242            "InvalidStateError: suspendTime cannot be negative"
243        );
244        assert!(
245            suspend_time < self.length as f64 / self.sample_rate() as f64,
246            "InvalidStateError: suspendTime cannot be greater than or equal to the total render duration"
247        );
248        (suspend_time * self.base.sample_rate() as f64 / RENDER_QUANTUM_SIZE as f64).ceil() as usize
249    }
250
251    /// Schedules a suspension of the time progression in the audio context at the specified time
252    /// and returns a promise
253    ///
254    /// The specified time is quantized and rounded up to the render quantum size.
255    ///
256    /// # Panics
257    ///
258    /// Panics if the quantized frame number
259    ///
260    /// - is negative or
261    /// - is less than or equal to the current time or
262    /// - is greater than or equal to the total render duration or
263    /// - is scheduled by another suspend for the same time
264    ///
265    /// # Example usage
266    ///
267    /// ```rust
268    /// use futures::{executor, join};
269    /// use futures::FutureExt as _;
270    /// use std::sync::Arc;
271    ///
272    /// use web_audio_api::context::BaseAudioContext;
273    /// use web_audio_api::context::OfflineAudioContext;
274    /// use web_audio_api::node::{AudioNode, AudioScheduledSourceNode};
275    ///
276    /// let context = Arc::new(OfflineAudioContext::new(1, 512, 44_100.));
277    /// let context_clone = Arc::clone(&context);
278    ///
279    /// let suspend_promise = context.suspend(128. / 44_100.).then(|_| async move {
280    ///     let mut src = context_clone.create_constant_source();
281    ///     src.connect(&context_clone.destination());
282    ///     src.start();
283    ///     context_clone.resume().await;
284    /// });
285    ///
286    /// let render_promise = context.start_rendering();
287    ///
288    /// let buffer = executor::block_on(async move { join!(suspend_promise, render_promise).1 });
289    /// assert_eq!(buffer.number_of_channels(), 1);
290    /// assert_eq!(buffer.length(), 512);
291    /// ```
292    pub async fn suspend(&self, suspend_time: f64) {
293        let quantum = self.calculate_suspend_frame(suspend_time);
294
295        let (sender, receiver) = oneshot::channel();
296
297        // We are mixing async with a std Mutex, so be sure not to `await` while the lock is held
298        {
299            let mut lock = self.renderer.lock().unwrap();
300            let renderer = lock
301                .as_mut()
302                .expect("InvalidStateError - cannot suspend when rendering has already started");
303
304            let insert_pos = renderer
305                .suspend_promises
306                .binary_search_by_key(&quantum, |&(q, _)| q)
307                .expect_err(
308                    "InvalidStateError - cannot suspend multiple times at the same render quantum",
309                );
310
311            renderer
312                .suspend_promises
313                .insert(insert_pos, (quantum, sender));
314        } // lock is dropped
315
316        receiver.await.unwrap();
317        self.base().set_state(AudioContextState::Suspended);
318    }
319
320    /// Schedules a suspension of the time progression in the audio context at the specified time
321    /// and runs a callback.
322    ///
323    /// This is a synchronous version of [`Self::suspend`] that runs the provided callback at
324    /// the `suspendTime`. The rendering resumes automatically after the callback has run, so there
325    /// is no `resume_sync` method.
326    ///
327    /// The specified time is quantized and rounded up to the render quantum size.
328    ///
329    /// # Panics
330    ///
331    /// Panics if the quantized frame number
332    ///
333    /// - is negative or
334    /// - is less than or equal to the current time or
335    /// - is greater than or equal to the total render duration or
336    /// - is scheduled by another suspend for the same time
337    ///
338    /// # Example usage
339    ///
340    /// ```rust
341    /// use web_audio_api::context::BaseAudioContext;
342    /// use web_audio_api::context::OfflineAudioContext;
343    /// use web_audio_api::node::{AudioNode, AudioScheduledSourceNode};
344    ///
345    /// let mut context = OfflineAudioContext::new(1, 512, 44_100.);
346    ///
347    /// context.suspend_sync(128. / 44_100., |context| {
348    ///     let mut src = context.create_constant_source();
349    ///     src.connect(&context.destination());
350    ///     src.start();
351    /// });
352    ///
353    /// let buffer = context.start_rendering_sync();
354    /// assert_eq!(buffer.number_of_channels(), 1);
355    /// assert_eq!(buffer.length(), 512);
356    /// ```
357    pub fn suspend_sync<F: FnOnce(&mut Self) + Send + Sync + 'static>(
358        &mut self,
359        suspend_time: f64,
360        callback: F,
361    ) {
362        let quantum = self.calculate_suspend_frame(suspend_time);
363
364        let mut lock = self.renderer.lock().unwrap();
365        let renderer = lock
366            .as_mut()
367            .expect("InvalidStateError - cannot suspend when rendering has already started");
368
369        let insert_pos = renderer
370            .suspend_callbacks
371            .binary_search_by_key(&quantum, |(q, _c)| *q)
372            .expect_err(
373                "InvalidStateError - cannot suspend multiple times at the same render quantum",
374            );
375
376        let boxed_callback = Box::new(|ctx: &mut OfflineAudioContext| {
377            ctx.base().set_state(AudioContextState::Suspended);
378            (callback)(ctx);
379            ctx.base().set_state(AudioContextState::Running);
380        });
381
382        renderer
383            .suspend_callbacks
384            .insert(insert_pos, (quantum, boxed_callback));
385    }
386
387    /// Resumes the progression of the OfflineAudioContext's currentTime when it has been suspended
388    ///
389    /// # Panics
390    ///
391    /// Panics when the context is closed or rendering has not started
392    pub async fn resume(&self) {
393        self.base().set_state(AudioContextState::Running);
394        self.resume_sender.clone().send(()).await.unwrap()
395    }
396
397    /// Register callback to run when the rendering has completed
398    ///
399    /// Only a single event handler is active at any time. Calling this method multiple times will
400    /// override the previous event handler.
401    #[allow(clippy::missing_panics_doc)]
402    pub fn set_oncomplete<F: FnOnce(OfflineAudioCompletionEvent) + Send + 'static>(
403        &self,
404        callback: F,
405    ) {
406        let callback = move |v| match v {
407            EventPayload::Complete(v) => {
408                let event = OfflineAudioCompletionEvent {
409                    rendered_buffer: v,
410                    event: Event { type_: "complete" },
411                };
412                callback(event)
413            }
414            _ => unreachable!(),
415        };
416
417        self.base()
418            .set_event_handler(EventType::Complete, EventHandler::Once(Box::new(callback)));
419    }
420
421    /// Unset the callback to run when the rendering has completed
422    pub fn clear_oncomplete(&self) {
423        self.base().clear_event_handler(EventType::Complete);
424    }
425}
426
427#[cfg(test)]
428mod tests {
429    use super::*;
430    use float_eq::assert_float_eq;
431    use std::sync::atomic::{AtomicBool, Ordering};
432
433    use crate::node::AudioNode;
434    use crate::node::AudioScheduledSourceNode;
435
436    #[test]
437    fn test_sample_rate_length() {
438        let context = OfflineAudioContext::new(1, 48000, 96000.);
439        assert_float_eq!(context.sample_rate(), 96000., abs_all <= 0.);
440        assert_eq!(context.length(), 48000);
441    }
442
443    #[test]
444    fn render_empty_graph() {
445        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
446        assert_eq!(context.state(), AudioContextState::Suspended);
447        let buffer = context.start_rendering_sync();
448
449        assert_eq!(context.length(), 555);
450
451        assert_eq!(buffer.number_of_channels(), 2);
452        assert_eq!(buffer.length(), 555);
453        assert_float_eq!(buffer.get_channel_data(0), &[0.; 555][..], abs_all <= 0.);
454        assert_float_eq!(buffer.get_channel_data(1), &[0.; 555][..], abs_all <= 0.);
455
456        assert_eq!(context.state(), AudioContextState::Closed);
457    }
458
459    #[test]
460    #[should_panic]
461    fn render_twice_panics() {
462        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
463        let _ = context.start_rendering_sync();
464        let _ = context.start_rendering_sync();
465    }
466
467    #[test]
468    fn test_suspend_sync() {
469        use crate::node::ConstantSourceNode;
470        use std::sync::OnceLock;
471
472        let len = RENDER_QUANTUM_SIZE * 4;
473        let sample_rate = 48000_f64;
474
475        let mut context = OfflineAudioContext::new(1, len, sample_rate as f32);
476        static SOURCE: OnceLock<ConstantSourceNode> = OnceLock::new();
477
478        context.suspend_sync(RENDER_QUANTUM_SIZE as f64 / sample_rate, |context| {
479            assert_eq!(context.state(), AudioContextState::Suspended);
480            let mut src = context.create_constant_source();
481            src.connect(&context.destination());
482            src.start();
483            SOURCE.set(src).unwrap();
484        });
485
486        context.suspend_sync((3 * RENDER_QUANTUM_SIZE) as f64 / sample_rate, |context| {
487            assert_eq!(context.state(), AudioContextState::Suspended);
488            SOURCE.get().unwrap().disconnect();
489        });
490
491        let output = context.start_rendering_sync();
492
493        assert_float_eq!(
494            output.get_channel_data(0)[..RENDER_QUANTUM_SIZE],
495            &[0.; RENDER_QUANTUM_SIZE][..],
496            abs_all <= 0.
497        );
498        assert_float_eq!(
499            output.get_channel_data(0)[RENDER_QUANTUM_SIZE..3 * RENDER_QUANTUM_SIZE],
500            &[1.; 2 * RENDER_QUANTUM_SIZE][..],
501            abs_all <= 0.
502        );
503        assert_float_eq!(
504            output.get_channel_data(0)[3 * RENDER_QUANTUM_SIZE..4 * RENDER_QUANTUM_SIZE],
505            &[0.; RENDER_QUANTUM_SIZE][..],
506            abs_all <= 0.
507        );
508    }
509
510    #[test]
511    fn render_suspend_resume_async() {
512        use futures::executor;
513        use futures::join;
514        use futures::FutureExt as _;
515
516        let context = Arc::new(OfflineAudioContext::new(1, 512, 44_100.));
517        let context_clone = Arc::clone(&context);
518
519        let suspend_promise = context.suspend(128. / 44_100.).then(|_| async move {
520            let mut src = context_clone.create_constant_source();
521            src.connect(&context_clone.destination());
522            src.start();
523            context_clone.resume().await;
524        });
525
526        let render_promise = context.start_rendering();
527
528        let buffer = executor::block_on(async move { join!(suspend_promise, render_promise).1 });
529
530        assert_eq!(buffer.number_of_channels(), 1);
531        assert_eq!(buffer.length(), 512);
532
533        assert_float_eq!(
534            buffer.get_channel_data(0)[..128],
535            &[0.; 128][..],
536            abs_all <= 0.
537        );
538        assert_float_eq!(
539            buffer.get_channel_data(0)[128..],
540            &[1.; 384][..],
541            abs_all <= 0.
542        );
543    }
544
545    #[test]
546    #[should_panic]
547    fn test_suspend_negative_panics() {
548        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
549        context.suspend_sync(-1.0, |_| ());
550    }
551
552    #[test]
553    #[should_panic]
554    fn test_suspend_after_duration_panics() {
555        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
556        context.suspend_sync(1.0, |_| ());
557    }
558
559    #[test]
560    #[should_panic]
561    fn test_suspend_after_render_panics() {
562        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
563        let _ = context.start_rendering_sync();
564        context.suspend_sync(0.0, |_| ());
565    }
566
567    #[test]
568    #[should_panic]
569    fn test_suspend_identical_frame_panics() {
570        let mut context = OfflineAudioContext::new(2, 128, 44_100.);
571        context.suspend_sync(0.0, |_| ());
572        context.suspend_sync(0.0, |_| ());
573    }
574
575    #[test]
576    fn test_onstatechange() {
577        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
578
579        let changed = Arc::new(AtomicBool::new(false));
580        let changed_clone = Arc::clone(&changed);
581        context.set_onstatechange(move |_event| {
582            changed_clone.store(true, Ordering::Relaxed);
583        });
584
585        let _ = context.start_rendering_sync();
586
587        assert!(changed.load(Ordering::Relaxed));
588    }
589
590    #[test]
591    fn test_onstatechange_async() {
592        use futures::executor;
593
594        let context = OfflineAudioContext::new(2, 555, 44_100.);
595
596        let changed = Arc::new(AtomicBool::new(false));
597        let changed_clone = Arc::clone(&changed);
598        context.set_onstatechange(move |_event| {
599            changed_clone.store(true, Ordering::Relaxed);
600        });
601
602        let _ = executor::block_on(context.start_rendering());
603
604        assert!(changed.load(Ordering::Relaxed));
605    }
606
607    #[test]
608    fn test_oncomplete() {
609        let mut context = OfflineAudioContext::new(2, 555, 44_100.);
610
611        let complete = Arc::new(AtomicBool::new(false));
612        let complete_clone = Arc::clone(&complete);
613        context.set_oncomplete(move |event| {
614            assert_eq!(event.rendered_buffer.length(), 555);
615            complete_clone.store(true, Ordering::Relaxed);
616        });
617
618        let _ = context.start_rendering_sync();
619
620        assert!(complete.load(Ordering::Relaxed));
621    }
622
623    #[test]
624    fn test_oncomplete_async() {
625        use futures::executor;
626
627        let context = OfflineAudioContext::new(2, 555, 44_100.);
628
629        let complete = Arc::new(AtomicBool::new(false));
630        let complete_clone = Arc::clone(&complete);
631        context.set_oncomplete(move |event| {
632            assert_eq!(event.rendered_buffer.length(), 555);
633            complete_clone.store(true, Ordering::Relaxed);
634        });
635
636        let _ = executor::block_on(context.start_rendering());
637
638        assert!(complete.load(Ordering::Relaxed));
639    }
640
641    fn require_send_sync<T: Send + Sync>(_: T) {}
642
643    #[test]
644    fn test_all_futures_thread_safe() {
645        let context = OfflineAudioContext::new(2, 555, 44_100.);
646
647        require_send_sync(context.start_rendering());
648        require_send_sync(context.suspend(1.));
649        require_send_sync(context.resume());
650    }
651}