1use std::sync::atomic::{AtomicU64, AtomicU8};
4use std::sync::{Arc, Mutex};
5
6use crate::buffer::AudioBuffer;
7use crate::context::{AudioContextState, BaseAudioContext, ConcreteBaseAudioContext};
8use crate::events::{
9 Event, EventDispatch, EventHandler, EventPayload, EventType, OfflineAudioCompletionEvent,
10};
11use crate::render::RenderThread;
12use crate::{
13 assert_valid_buffer_length, assert_valid_number_of_channels, assert_valid_sample_rate,
14 RENDER_QUANTUM_SIZE,
15};
16
17use crate::events::EventLoop;
18use futures_channel::{mpsc, oneshot};
19use futures_util::SinkExt as _;
20
21pub(crate) type OfflineAudioContextCallback =
22 dyn FnOnce(&mut OfflineAudioContext) + Send + Sync + 'static;
23
24#[allow(clippy::module_name_repetitions)]
28pub struct OfflineAudioContext {
29 base: ConcreteBaseAudioContext,
31 length: usize,
33 renderer: Mutex<Option<OfflineAudioContextRenderer>>,
35 resume_sender: mpsc::Sender<()>,
37}
38
39impl std::fmt::Debug for OfflineAudioContext {
40 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
41 f.debug_struct("OfflineAudioContext")
42 .field("length", &self.length())
43 .field("base", &self.base())
44 .finish_non_exhaustive()
45 }
46}
47
48struct OfflineAudioContextRenderer {
49 renderer: RenderThread,
51 suspend_promises: Vec<(usize, oneshot::Sender<()>)>,
53 suspend_callbacks: Vec<(usize, Box<OfflineAudioContextCallback>)>,
55 resume_receiver: mpsc::Receiver<()>,
57 event_loop: EventLoop,
59}
60
61impl BaseAudioContext for OfflineAudioContext {
62 fn base(&self) -> &ConcreteBaseAudioContext {
63 &self.base
64 }
65}
66
67impl OfflineAudioContext {
68 #[must_use]
76 #[allow(clippy::missing_panics_doc)]
77 pub fn new(number_of_channels: usize, length: usize, sample_rate: f32) -> Self {
78 assert_valid_number_of_channels(number_of_channels);
79 assert_valid_buffer_length(length);
80 assert_valid_sample_rate(sample_rate);
81
82 let (sender, receiver) = crossbeam_channel::unbounded();
85
86 let (node_id_producer, node_id_consumer) = llq::Queue::new().split();
87 let graph = crate::render::graph::Graph::new(node_id_producer);
88 let message = crate::message::ControlMessage::Startup { graph };
89 sender.send(message).unwrap();
90
91 let frames_played = Arc::new(AtomicU64::new(0));
93 let frames_played_clone = Arc::clone(&frames_played);
94 let state = Arc::new(AtomicU8::new(AudioContextState::Suspended as u8));
95 let state_clone = Arc::clone(&state);
96
97 let (event_send, event_recv) = crossbeam_channel::unbounded();
100 let event_loop = EventLoop::new(event_recv);
101
102 let renderer = RenderThread::new(
104 sample_rate,
105 number_of_channels,
106 receiver,
107 state_clone,
108 frames_played_clone,
109 event_send.clone(),
110 );
111
112 let base = ConcreteBaseAudioContext::new(
114 sample_rate,
115 number_of_channels,
116 state,
117 frames_played,
118 sender,
119 event_send,
120 event_loop.clone(),
121 true,
122 node_id_consumer,
123 );
124
125 let (resume_sender, resume_receiver) = mpsc::channel(0);
126
127 let renderer = OfflineAudioContextRenderer {
128 renderer,
129 suspend_promises: Vec::new(),
130 suspend_callbacks: Vec::new(),
131 resume_receiver,
132 event_loop,
133 };
134
135 Self {
136 base,
137 length,
138 renderer: Mutex::new(Some(renderer)),
139 resume_sender,
140 }
141 }
142
143 #[must_use]
155 pub fn start_rendering_sync(&mut self) -> AudioBuffer {
156 let renderer = self
157 .renderer
158 .lock()
159 .unwrap()
160 .take()
161 .expect("InvalidStateError - Cannot call `startRendering` twice");
162
163 let OfflineAudioContextRenderer {
164 renderer,
165 suspend_callbacks,
166 event_loop,
167 ..
168 } = renderer;
169
170 self.base.set_state(AudioContextState::Running);
171
172 let result = renderer.render_audiobuffer_sync(self, suspend_callbacks, &event_loop);
173
174 self.base.set_state(AudioContextState::Closed);
175 let _ = self
176 .base
177 .send_event(EventDispatch::complete(result.clone()));
178
179 event_loop.handle_pending_events();
181
182 result
183 }
184
185 pub async fn start_rendering(&self) -> AudioBuffer {
197 let renderer = self
199 .renderer
200 .lock()
201 .unwrap()
202 .take()
203 .expect("InvalidStateError - Cannot call `startRendering` twice");
204
205 let OfflineAudioContextRenderer {
206 renderer,
207 suspend_promises,
208 resume_receiver,
209 event_loop,
210 ..
211 } = renderer;
212
213 self.base.set_state(AudioContextState::Running);
214
215 let result = renderer
216 .render_audiobuffer(self.length, suspend_promises, resume_receiver, &event_loop)
217 .await;
218
219 self.base.set_state(AudioContextState::Closed);
220 let _ = self
221 .base
222 .send_event(EventDispatch::complete(result.clone()));
223
224 event_loop.handle_pending_events();
226
227 result
228 }
229
230 #[allow(clippy::missing_const_for_fn, clippy::unused_self)]
233 #[must_use]
234 pub fn length(&self) -> usize {
235 self.length
236 }
237
238 #[track_caller]
239 fn calculate_suspend_frame(&self, suspend_time: f64) -> usize {
240 assert!(
241 suspend_time >= 0.,
242 "InvalidStateError: suspendTime cannot be negative"
243 );
244 assert!(
245 suspend_time < self.length as f64 / self.sample_rate() as f64,
246 "InvalidStateError: suspendTime cannot be greater than or equal to the total render duration"
247 );
248 (suspend_time * self.base.sample_rate() as f64 / RENDER_QUANTUM_SIZE as f64).ceil() as usize
249 }
250
251 pub async fn suspend(&self, suspend_time: f64) {
293 let quantum = self.calculate_suspend_frame(suspend_time);
294
295 let (sender, receiver) = oneshot::channel();
296
297 {
299 let mut lock = self.renderer.lock().unwrap();
300 let renderer = lock
301 .as_mut()
302 .expect("InvalidStateError - cannot suspend when rendering has already started");
303
304 let insert_pos = renderer
305 .suspend_promises
306 .binary_search_by_key(&quantum, |&(q, _)| q)
307 .expect_err(
308 "InvalidStateError - cannot suspend multiple times at the same render quantum",
309 );
310
311 renderer
312 .suspend_promises
313 .insert(insert_pos, (quantum, sender));
314 } receiver.await.unwrap();
317 self.base().set_state(AudioContextState::Suspended);
318 }
319
320 pub fn suspend_sync<F: FnOnce(&mut Self) + Send + Sync + 'static>(
358 &mut self,
359 suspend_time: f64,
360 callback: F,
361 ) {
362 let quantum = self.calculate_suspend_frame(suspend_time);
363
364 let mut lock = self.renderer.lock().unwrap();
365 let renderer = lock
366 .as_mut()
367 .expect("InvalidStateError - cannot suspend when rendering has already started");
368
369 let insert_pos = renderer
370 .suspend_callbacks
371 .binary_search_by_key(&quantum, |(q, _c)| *q)
372 .expect_err(
373 "InvalidStateError - cannot suspend multiple times at the same render quantum",
374 );
375
376 let boxed_callback = Box::new(|ctx: &mut OfflineAudioContext| {
377 ctx.base().set_state(AudioContextState::Suspended);
378 (callback)(ctx);
379 ctx.base().set_state(AudioContextState::Running);
380 });
381
382 renderer
383 .suspend_callbacks
384 .insert(insert_pos, (quantum, boxed_callback));
385 }
386
387 pub async fn resume(&self) {
393 self.base().set_state(AudioContextState::Running);
394 self.resume_sender.clone().send(()).await.unwrap()
395 }
396
397 #[allow(clippy::missing_panics_doc)]
402 pub fn set_oncomplete<F: FnOnce(OfflineAudioCompletionEvent) + Send + 'static>(
403 &self,
404 callback: F,
405 ) {
406 let callback = move |v| match v {
407 EventPayload::Complete(v) => {
408 let event = OfflineAudioCompletionEvent {
409 rendered_buffer: v,
410 event: Event { type_: "complete" },
411 };
412 callback(event)
413 }
414 _ => unreachable!(),
415 };
416
417 self.base()
418 .set_event_handler(EventType::Complete, EventHandler::Once(Box::new(callback)));
419 }
420
421 pub fn clear_oncomplete(&self) {
423 self.base().clear_event_handler(EventType::Complete);
424 }
425}
426
427#[cfg(test)]
428mod tests {
429 use super::*;
430 use float_eq::assert_float_eq;
431 use std::sync::atomic::{AtomicBool, Ordering};
432
433 use crate::node::AudioNode;
434 use crate::node::AudioScheduledSourceNode;
435
436 #[test]
437 fn test_sample_rate_length() {
438 let context = OfflineAudioContext::new(1, 48000, 96000.);
439 assert_float_eq!(context.sample_rate(), 96000., abs_all <= 0.);
440 assert_eq!(context.length(), 48000);
441 }
442
443 #[test]
444 fn render_empty_graph() {
445 let mut context = OfflineAudioContext::new(2, 555, 44_100.);
446 assert_eq!(context.state(), AudioContextState::Suspended);
447 let buffer = context.start_rendering_sync();
448
449 assert_eq!(context.length(), 555);
450
451 assert_eq!(buffer.number_of_channels(), 2);
452 assert_eq!(buffer.length(), 555);
453 assert_float_eq!(buffer.get_channel_data(0), &[0.; 555][..], abs_all <= 0.);
454 assert_float_eq!(buffer.get_channel_data(1), &[0.; 555][..], abs_all <= 0.);
455
456 assert_eq!(context.state(), AudioContextState::Closed);
457 }
458
459 #[test]
460 #[should_panic]
461 fn render_twice_panics() {
462 let mut context = OfflineAudioContext::new(2, 555, 44_100.);
463 let _ = context.start_rendering_sync();
464 let _ = context.start_rendering_sync();
465 }
466
467 #[test]
468 fn test_suspend_sync() {
469 use crate::node::ConstantSourceNode;
470 use std::sync::OnceLock;
471
472 let len = RENDER_QUANTUM_SIZE * 4;
473 let sample_rate = 48000_f64;
474
475 let mut context = OfflineAudioContext::new(1, len, sample_rate as f32);
476 static SOURCE: OnceLock<ConstantSourceNode> = OnceLock::new();
477
478 context.suspend_sync(RENDER_QUANTUM_SIZE as f64 / sample_rate, |context| {
479 assert_eq!(context.state(), AudioContextState::Suspended);
480 let mut src = context.create_constant_source();
481 src.connect(&context.destination());
482 src.start();
483 SOURCE.set(src).unwrap();
484 });
485
486 context.suspend_sync((3 * RENDER_QUANTUM_SIZE) as f64 / sample_rate, |context| {
487 assert_eq!(context.state(), AudioContextState::Suspended);
488 SOURCE.get().unwrap().disconnect();
489 });
490
491 let output = context.start_rendering_sync();
492
493 assert_float_eq!(
494 output.get_channel_data(0)[..RENDER_QUANTUM_SIZE],
495 &[0.; RENDER_QUANTUM_SIZE][..],
496 abs_all <= 0.
497 );
498 assert_float_eq!(
499 output.get_channel_data(0)[RENDER_QUANTUM_SIZE..3 * RENDER_QUANTUM_SIZE],
500 &[1.; 2 * RENDER_QUANTUM_SIZE][..],
501 abs_all <= 0.
502 );
503 assert_float_eq!(
504 output.get_channel_data(0)[3 * RENDER_QUANTUM_SIZE..4 * RENDER_QUANTUM_SIZE],
505 &[0.; RENDER_QUANTUM_SIZE][..],
506 abs_all <= 0.
507 );
508 }
509
510 #[test]
511 fn render_suspend_resume_async() {
512 use futures::executor;
513 use futures::join;
514 use futures::FutureExt as _;
515
516 let context = Arc::new(OfflineAudioContext::new(1, 512, 44_100.));
517 let context_clone = Arc::clone(&context);
518
519 let suspend_promise = context.suspend(128. / 44_100.).then(|_| async move {
520 let mut src = context_clone.create_constant_source();
521 src.connect(&context_clone.destination());
522 src.start();
523 context_clone.resume().await;
524 });
525
526 let render_promise = context.start_rendering();
527
528 let buffer = executor::block_on(async move { join!(suspend_promise, render_promise).1 });
529
530 assert_eq!(buffer.number_of_channels(), 1);
531 assert_eq!(buffer.length(), 512);
532
533 assert_float_eq!(
534 buffer.get_channel_data(0)[..128],
535 &[0.; 128][..],
536 abs_all <= 0.
537 );
538 assert_float_eq!(
539 buffer.get_channel_data(0)[128..],
540 &[1.; 384][..],
541 abs_all <= 0.
542 );
543 }
544
545 #[test]
546 #[should_panic]
547 fn test_suspend_negative_panics() {
548 let mut context = OfflineAudioContext::new(2, 128, 44_100.);
549 context.suspend_sync(-1.0, |_| ());
550 }
551
552 #[test]
553 #[should_panic]
554 fn test_suspend_after_duration_panics() {
555 let mut context = OfflineAudioContext::new(2, 128, 44_100.);
556 context.suspend_sync(1.0, |_| ());
557 }
558
559 #[test]
560 #[should_panic]
561 fn test_suspend_after_render_panics() {
562 let mut context = OfflineAudioContext::new(2, 128, 44_100.);
563 let _ = context.start_rendering_sync();
564 context.suspend_sync(0.0, |_| ());
565 }
566
567 #[test]
568 #[should_panic]
569 fn test_suspend_identical_frame_panics() {
570 let mut context = OfflineAudioContext::new(2, 128, 44_100.);
571 context.suspend_sync(0.0, |_| ());
572 context.suspend_sync(0.0, |_| ());
573 }
574
575 #[test]
576 fn test_onstatechange() {
577 let mut context = OfflineAudioContext::new(2, 555, 44_100.);
578
579 let changed = Arc::new(AtomicBool::new(false));
580 let changed_clone = Arc::clone(&changed);
581 context.set_onstatechange(move |_event| {
582 changed_clone.store(true, Ordering::Relaxed);
583 });
584
585 let _ = context.start_rendering_sync();
586
587 assert!(changed.load(Ordering::Relaxed));
588 }
589
590 #[test]
591 fn test_onstatechange_async() {
592 use futures::executor;
593
594 let context = OfflineAudioContext::new(2, 555, 44_100.);
595
596 let changed = Arc::new(AtomicBool::new(false));
597 let changed_clone = Arc::clone(&changed);
598 context.set_onstatechange(move |_event| {
599 changed_clone.store(true, Ordering::Relaxed);
600 });
601
602 let _ = executor::block_on(context.start_rendering());
603
604 assert!(changed.load(Ordering::Relaxed));
605 }
606
607 #[test]
608 fn test_oncomplete() {
609 let mut context = OfflineAudioContext::new(2, 555, 44_100.);
610
611 let complete = Arc::new(AtomicBool::new(false));
612 let complete_clone = Arc::clone(&complete);
613 context.set_oncomplete(move |event| {
614 assert_eq!(event.rendered_buffer.length(), 555);
615 complete_clone.store(true, Ordering::Relaxed);
616 });
617
618 let _ = context.start_rendering_sync();
619
620 assert!(complete.load(Ordering::Relaxed));
621 }
622
623 #[test]
624 fn test_oncomplete_async() {
625 use futures::executor;
626
627 let context = OfflineAudioContext::new(2, 555, 44_100.);
628
629 let complete = Arc::new(AtomicBool::new(false));
630 let complete_clone = Arc::clone(&complete);
631 context.set_oncomplete(move |event| {
632 assert_eq!(event.rendered_buffer.length(), 555);
633 complete_clone.store(true, Ordering::Relaxed);
634 });
635
636 let _ = executor::block_on(context.start_rendering());
637
638 assert!(complete.load(Ordering::Relaxed));
639 }
640
641 fn require_send_sync<T: Send + Sync>(_: T) {}
642
643 #[test]
644 fn test_all_futures_thread_safe() {
645 let context = OfflineAudioContext::new(2, 555, 44_100.);
646
647 require_send_sync(context.start_rendering());
648 require_send_sync(context.suspend(1.));
649 require_send_sync(context.resume());
650 }
651}