use crate::context::{
AudioContextRegistration, AudioContextState, AudioNodeId, BaseAudioContext,
DESTINATION_NODE_ID, LISTENER_NODE_ID, LISTENER_PARAM_IDS,
};
use crate::events::{EventDispatch, EventHandler, EventLoop, EventType};
use crate::message::ControlMessage;
use crate::node::{AudioDestinationNode, AudioNode, ChannelConfig, ChannelConfigOptions};
use crate::param::{AudioParam, AudioParamEvent};
use crate::render::AudioProcessor;
use crate::spatial::AudioListenerParams;
use crate::AudioListener;
use crossbeam_channel::{Receiver, SendError, Sender};
use std::sync::atomic::{AtomicU64, AtomicU8, Ordering};
use std::sync::{Arc, Mutex, RwLock, RwLockWriteGuard};
#[allow(clippy::module_name_repetitions)]
#[derive(Clone)]
#[doc(hidden)]
pub struct ConcreteBaseAudioContext {
inner: Arc<ConcreteBaseAudioContextInner>,
}
impl PartialEq for ConcreteBaseAudioContext {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.inner, &other.inner)
}
}
struct ConcreteBaseAudioContextInner {
sample_rate: f32,
max_channel_count: usize,
node_id_inc: AtomicU64,
destination_channel_config: ChannelConfig,
render_channel: RwLock<Sender<ControlMessage>>,
queued_messages: Mutex<Vec<ControlMessage>>,
frames_played: Arc<AtomicU64>,
queued_audio_listener_msgs: Mutex<Vec<ControlMessage>>,
listener_params: Option<AudioListenerParams>,
offline: bool,
state: AtomicU8,
event_loop: EventLoop,
event_send: Option<Sender<EventDispatch>>,
}
impl BaseAudioContext for ConcreteBaseAudioContext {
fn base(&self) -> &ConcreteBaseAudioContext {
self
}
fn register<
T: AudioNode,
F: FnOnce(AudioContextRegistration) -> (T, Box<dyn AudioProcessor>),
>(
&self,
f: F,
) -> T {
let id = self.inner.node_id_inc.fetch_add(1, Ordering::SeqCst);
let id = AudioNodeId(id);
let registration = AudioContextRegistration {
id,
context: self.clone(),
};
let (node, render) = (f)(registration);
let message = ControlMessage::RegisterNode {
id,
node: render,
inputs: node.number_of_inputs(),
outputs: node.number_of_outputs(),
channel_config: node.channel_config().clone(),
};
if id == LISTENER_NODE_ID || LISTENER_PARAM_IDS.contains(&id.0) {
let mut queued_audio_listener_msgs =
self.inner.queued_audio_listener_msgs.lock().unwrap();
queued_audio_listener_msgs.push(message);
} else {
self.send_control_msg(message).unwrap();
self.resolve_queued_control_msgs(id);
}
node
}
}
impl ConcreteBaseAudioContext {
pub(super) fn new(
sample_rate: f32,
max_channel_count: usize,
frames_played: Arc<AtomicU64>,
render_channel: Sender<ControlMessage>,
event_channel: Option<(Sender<EventDispatch>, Receiver<EventDispatch>)>,
offline: bool,
) -> Self {
let event_loop = EventLoop::new();
let (event_send, event_recv) = match event_channel {
None => (None, None),
Some((send, recv)) => (Some(send), Some(recv)),
};
let base_inner = ConcreteBaseAudioContextInner {
sample_rate,
max_channel_count,
render_channel: RwLock::new(render_channel),
queued_messages: Mutex::new(Vec::new()),
node_id_inc: AtomicU64::new(0),
destination_channel_config: ChannelConfigOptions::default().into(),
frames_played,
queued_audio_listener_msgs: Mutex::new(Vec::new()),
listener_params: None,
offline,
state: AtomicU8::new(AudioContextState::Suspended as u8),
event_loop: event_loop.clone(),
event_send,
};
let base = Self {
inner: Arc::new(base_inner),
};
let (listener_params, destination_channel_config) = {
let dest = AudioDestinationNode::new(&base, max_channel_count);
let destination_channel_config = dest.into_channel_config();
let listener = crate::spatial::AudioListenerNode::new(&base);
let listener_params = listener.into_fields();
let AudioListener {
position_x,
position_y,
position_z,
forward_x,
forward_y,
forward_z,
up_x,
up_y,
up_z,
} = listener_params;
let listener_params = AudioListenerParams {
position_x: position_x.into_raw_parts(),
position_y: position_y.into_raw_parts(),
position_z: position_z.into_raw_parts(),
forward_x: forward_x.into_raw_parts(),
forward_y: forward_y.into_raw_parts(),
forward_z: forward_z.into_raw_parts(),
up_x: up_x.into_raw_parts(),
up_y: up_y.into_raw_parts(),
up_z: up_z.into_raw_parts(),
};
(listener_params, destination_channel_config)
};
let mut base = base;
let mut inner_mut = Arc::get_mut(&mut base.inner).unwrap();
inner_mut.listener_params = Some(listener_params);
inner_mut.destination_channel_config = destination_channel_config;
debug_assert_eq!(
base.inner.node_id_inc.load(Ordering::Relaxed),
LISTENER_PARAM_IDS.end,
);
if let Some(event_channel) = event_recv {
event_loop.run(event_channel);
}
base
}
pub(crate) fn send_control_msg(
&self,
msg: ControlMessage,
) -> Result<(), SendError<ControlMessage>> {
self.inner.render_channel.read().unwrap().send(msg)
}
pub(crate) fn send_event(&self, msg: EventDispatch) -> Result<(), SendError<EventDispatch>> {
match self.inner.event_send.as_ref() {
Some(s) => s.send(msg),
None => Err(SendError(msg)),
}
}
pub(crate) fn lock_control_msg_sender(&self) -> RwLockWriteGuard<Sender<ControlMessage>> {
self.inner.render_channel.write().unwrap()
}
pub(super) fn mark_node_dropped(&self, id: AudioNodeId) {
let magic = id == DESTINATION_NODE_ID
|| id == LISTENER_NODE_ID
|| LISTENER_PARAM_IDS.contains(&id.0);
if !magic {
let message = ControlMessage::FreeWhenFinished { id };
let _r = self.send_control_msg(message);
}
}
#[doc(hidden)]
pub fn mark_cycle_breaker(&self, reg: &AudioContextRegistration) {
let id = reg.id();
let message = ControlMessage::MarkCycleBreaker { id };
let _r = self.send_control_msg(message);
}
pub(super) fn destination_channel_config(&self) -> ChannelConfig {
self.inner.destination_channel_config.clone()
}
pub(super) fn listener(&self) -> AudioListener {
let mut ids = LISTENER_PARAM_IDS.map(|i| AudioContextRegistration {
id: AudioNodeId(i),
context: self.clone(),
});
let params = self.inner.listener_params.as_ref().unwrap();
AudioListener {
position_x: AudioParam::from_raw_parts(ids.next().unwrap(), params.position_x.clone()),
position_y: AudioParam::from_raw_parts(ids.next().unwrap(), params.position_y.clone()),
position_z: AudioParam::from_raw_parts(ids.next().unwrap(), params.position_z.clone()),
forward_x: AudioParam::from_raw_parts(ids.next().unwrap(), params.forward_x.clone()),
forward_y: AudioParam::from_raw_parts(ids.next().unwrap(), params.forward_y.clone()),
forward_z: AudioParam::from_raw_parts(ids.next().unwrap(), params.forward_z.clone()),
up_x: AudioParam::from_raw_parts(ids.next().unwrap(), params.up_x.clone()),
up_y: AudioParam::from_raw_parts(ids.next().unwrap(), params.up_y.clone()),
up_z: AudioParam::from_raw_parts(ids.next().unwrap(), params.up_z.clone()),
}
}
#[must_use]
pub(super) fn state(&self) -> AudioContextState {
self.inner.state.load(Ordering::SeqCst).into()
}
pub(super) fn set_state(&self, state: AudioContextState) {
self.inner.state.store(state as u8, Ordering::SeqCst);
}
#[must_use]
pub(super) fn sample_rate(&self) -> f32 {
self.inner.sample_rate
}
#[must_use]
#[allow(clippy::cast_precision_loss)]
pub(super) fn current_time(&self) -> f64 {
self.inner.frames_played.load(Ordering::SeqCst) as f64 / self.inner.sample_rate as f64
}
#[must_use]
pub(crate) fn max_channel_count(&self) -> usize {
self.inner.max_channel_count
}
fn resolve_queued_control_msgs(&self, id: AudioNodeId) {
let mut queued = self.inner.queued_messages.lock().unwrap();
let mut i = 0; while i < queued.len() {
if matches!(&queued[i], ControlMessage::ConnectNode {to, ..} if *to == id) {
let m = queued.remove(i);
self.send_control_msg(m).unwrap();
} else {
i += 1;
}
}
}
pub(crate) fn connect(&self, from: AudioNodeId, to: AudioNodeId, output: usize, input: usize) {
let message = ControlMessage::ConnectNode {
from,
to,
output,
input,
};
self.send_control_msg(message).unwrap();
}
pub(super) fn queue_audio_param_connect(&self, param: &AudioParam, audio_node: AudioNodeId) {
let message = ControlMessage::ConnectNode {
from: param.registration().id(),
to: audio_node,
output: 0,
input: usize::MAX, };
self.inner.queued_messages.lock().unwrap().push(message);
}
pub(crate) fn disconnect_from(&self, from: AudioNodeId, to: AudioNodeId) {
let message = ControlMessage::DisconnectNode { from, to };
self.send_control_msg(message).unwrap();
}
pub(crate) fn disconnect(&self, from: AudioNodeId) {
let message = ControlMessage::DisconnectAll { from };
self.send_control_msg(message).unwrap();
}
pub(crate) fn pass_audio_param_event(
&self,
to: &Sender<AudioParamEvent>,
event: AudioParamEvent,
) {
let message = ControlMessage::AudioParamEvent {
to: to.clone(),
event,
};
self.send_control_msg(message).unwrap();
}
pub(crate) fn connect_listener_to_panner(&self, panner: AudioNodeId) {
self.connect(LISTENER_NODE_ID, panner, 0, usize::MAX);
}
pub(crate) fn ensure_audio_listener_present(&self) {
let mut queued_audio_listener_msgs = self.inner.queued_audio_listener_msgs.lock().unwrap();
let mut released = false;
while let Some(message) = queued_audio_listener_msgs.pop() {
self.send_control_msg(message).unwrap();
released = true;
}
if released {
self.resolve_queued_control_msgs(LISTENER_NODE_ID);
self.connect(LISTENER_NODE_ID, DESTINATION_NODE_ID, 0, usize::MAX);
}
}
pub(crate) fn offline(&self) -> bool {
self.inner.offline
}
pub(crate) fn set_event_handler(&self, event: EventType, callback: EventHandler) {
self.inner.event_loop.set_handler(event, callback);
}
pub(crate) fn clear_event_handler(&self, event: EventType) {
self.inner.event_loop.clear_handler(event);
}
}