use crate::context::{AudioNodeId, AudioParamId};
use crate::events::{AudioProcessingEvent, ErrorEvent, EventDispatch};
use crate::{AudioBuffer, Event, RENDER_QUANTUM_SIZE};
use super::{graph::Node, AudioRenderQuantum, NodeCollection};
use crossbeam_channel::Sender;
use std::cell::Cell;
use std::any::Any;
use std::ops::Deref;
#[non_exhaustive] pub struct AudioWorkletGlobalScope {
pub current_frame: u64,
pub current_time: f64,
pub sample_rate: f32,
pub(crate) node_id: Cell<AudioNodeId>,
pub(crate) event_sender: Sender<EventDispatch>,
}
impl std::fmt::Debug for AudioWorkletGlobalScope {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut format = f.debug_struct("RenderScope");
format
.field("current_frame", &self.current_frame)
.field("current_time", &self.current_time)
.field("sample_rate", &self.sample_rate)
.finish_non_exhaustive()
}
}
impl AudioWorkletGlobalScope {
pub fn post_message(&self, msg: Box<dyn Any + Send + 'static>) {
let _ = self
.event_sender
.try_send(EventDispatch::message(self.node_id.get(), msg));
}
pub(crate) fn send_ended_event(&self) {
let _ = self
.event_sender
.try_send(EventDispatch::ended(self.node_id.get()));
}
pub(crate) fn send_audio_processing_event(
&self,
input_buffer: AudioBuffer,
output_buffer: AudioBuffer,
playback_time: f64,
) {
let event = AudioProcessingEvent {
input_buffer,
output_buffer,
playback_time,
registration: None,
};
let dispatch = EventDispatch::audio_processing(self.node_id.get(), event);
let _ = self.event_sender.try_send(dispatch);
}
pub(crate) fn report_error(&self, error: Box<dyn Any + Send>) {
pub fn type_name_of_val<T: ?Sized>(_val: &T) -> &'static str {
std::any::type_name::<T>()
}
let message = if let Some(v) = error.downcast_ref::<String>() {
v.to_string()
} else if let Some(v) = error.downcast_ref::<&str>() {
v.to_string()
} else {
type_name_of_val(&error).to_string()
};
eprintln!(
"Panic occurred in Audio Processor: '{}'. Removing node from graph.",
&message
);
let event = ErrorEvent {
message,
error,
event: Event {
type_: "ErrorEvent",
},
};
let _ = self
.event_sender
.try_send(EventDispatch::processor_error(self.node_id.get(), event));
}
}
pub trait AudioProcessor: Send {
fn process(
&mut self,
inputs: &[AudioRenderQuantum],
outputs: &mut [AudioRenderQuantum],
params: AudioParamValues<'_>,
scope: &AudioWorkletGlobalScope,
) -> bool;
#[allow(unused_variables)]
fn onmessage(&mut self, msg: &mut dyn Any) {
log::warn!("Ignoring incoming message");
}
fn name(&self) -> &'static str {
std::any::type_name::<Self>()
}
fn has_side_effects(&self) -> bool {
false
}
fn before_drop(&mut self, _scope: &AudioWorkletGlobalScope) {}
}
impl std::fmt::Debug for dyn AudioProcessor {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(self.name()).finish_non_exhaustive()
}
}
struct DerefAudioRenderQuantumChannel<'a>(std::cell::Ref<'a, Node>);
impl Deref for DerefAudioRenderQuantumChannel<'_> {
type Target = [f32];
fn deref(&self) -> &Self::Target {
let buffer = self.0.get_buffer();
let len = if buffer.single_valued() {
1
} else {
RENDER_QUANTUM_SIZE
};
&buffer.channel_data(0)[..len]
}
}
pub struct AudioParamValues<'a> {
nodes: &'a NodeCollection,
}
impl std::fmt::Debug for AudioParamValues<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("AudioParamValues").finish_non_exhaustive()
}
}
impl<'a> AudioParamValues<'a> {
pub(crate) fn from(nodes: &'a NodeCollection) -> Self {
Self { nodes }
}
#[allow(clippy::missing_panics_doc)]
pub fn get(&self, index: &AudioParamId) -> impl Deref<Target = [f32]> + '_ {
DerefAudioRenderQuantumChannel(self.nodes.get_unchecked(index.into()).borrow())
}
pub(crate) fn listener_params(&self) -> [impl Deref<Target = [f32]> + '_; 9] {
crate::context::LISTENER_AUDIO_PARAM_IDS.map(|p| self.get(&p))
}
}
#[cfg(test)]
mod tests {
use super::*;
struct TestNode;
impl AudioProcessor for TestNode {
fn process(
&mut self,
_inputs: &[AudioRenderQuantum],
_outputs: &mut [AudioRenderQuantum],
_params: AudioParamValues<'_>,
_scope: &AudioWorkletGlobalScope,
) -> bool {
todo!()
}
}
#[test]
fn test_debug_fmt() {
let proc = &TestNode as &dyn AudioProcessor;
assert_eq!(
&format!("{:?}", proc),
"web_audio_api::render::processor::tests::TestNode { .. }"
);
}
}