web_audio_api/
spatial.rs

1//! Spatialization/Panning primitives
2//!
3//! Required for panning algorithm, distance and cone effects of panner nodes
4
5use crate::context::{AudioContextRegistration, BaseAudioContext};
6use crate::node::{
7    AudioNode, AudioNodeOptions, ChannelConfig, ChannelCountMode, ChannelInterpretation,
8};
9use crate::param::{AudioParam, AudioParamDescriptor, AudioParamInner, AutomationRate};
10use crate::render::{
11    AudioParamValues, AudioProcessor, AudioRenderQuantum, AudioWorkletGlobalScope,
12};
13
14use std::f32::consts::PI;
15use std::sync::OnceLock;
16
17/// AudioParam settings for the cartesian coordinates
18pub(crate) const PARAM_OPTS: AudioParamDescriptor = AudioParamDescriptor {
19    name: String::new(),
20    min_value: f32::MIN,
21    max_value: f32::MAX,
22    default_value: 0.,
23    automation_rate: AutomationRate::A,
24};
25
26/// Represents the position and orientation of the person listening to the audio scene
27///
28/// All [`PannerNode`](crate::node::PannerNode) objects spatialize in relation to the [BaseAudioContext's](crate::context::BaseAudioContext) listener.
29///
30/// # Usage
31///
32/// For example usage, check the [`PannerNode`](crate::node::PannerNode) docs.
33#[derive(Debug)]
34pub struct AudioListener {
35    pub(crate) position_x: AudioParam,
36    pub(crate) position_y: AudioParam,
37    pub(crate) position_z: AudioParam,
38    pub(crate) forward_x: AudioParam,
39    pub(crate) forward_y: AudioParam,
40    pub(crate) forward_z: AudioParam,
41    pub(crate) up_x: AudioParam,
42    pub(crate) up_y: AudioParam,
43    pub(crate) up_z: AudioParam,
44}
45
46impl AudioListener {
47    pub fn position_x(&self) -> &AudioParam {
48        &self.position_x
49    }
50    pub fn position_y(&self) -> &AudioParam {
51        &self.position_y
52    }
53    pub fn position_z(&self) -> &AudioParam {
54        &self.position_z
55    }
56    pub fn forward_x(&self) -> &AudioParam {
57        &self.forward_x
58    }
59    pub fn forward_y(&self) -> &AudioParam {
60        &self.forward_y
61    }
62    pub fn forward_z(&self) -> &AudioParam {
63        &self.forward_z
64    }
65    pub fn up_x(&self) -> &AudioParam {
66        &self.up_x
67    }
68    pub fn up_y(&self) -> &AudioParam {
69        &self.up_y
70    }
71    pub fn up_z(&self) -> &AudioParam {
72        &self.up_z
73    }
74}
75
76/// Wrapper for the [`AudioListener`] so it can be placed in the audio graph.
77///
78/// This node has no input, but takes the position/orientation AudioParams and copies them into the
79/// 9 outputs. The outputs are connected to the PannerNodes (via an AudioParam).
80///
81/// The AudioListener is always connected to the AudioDestinationNode so at each
82/// render quantum its positions are recalculated.
83pub(crate) struct AudioListenerNode {
84    registration: AudioContextRegistration,
85    fields: AudioListener,
86}
87
88impl AudioNode for AudioListenerNode {
89    fn registration(&self) -> &AudioContextRegistration {
90        &self.registration
91    }
92
93    fn channel_config(&self) -> &ChannelConfig {
94        static INSTANCE: OnceLock<ChannelConfig> = OnceLock::new();
95        INSTANCE.get_or_init(|| {
96            AudioNodeOptions {
97                channel_count: 1,
98                channel_count_mode: ChannelCountMode::Explicit,
99                channel_interpretation: ChannelInterpretation::Discrete,
100            }
101            .into()
102        })
103    }
104
105    fn number_of_inputs(&self) -> usize {
106        0
107    }
108
109    fn number_of_outputs(&self) -> usize {
110        9 // return all audio params as output
111    }
112
113    fn set_channel_count(&self, _v: usize) {
114        panic!("NotSupportedError - AudioListenerNode has channel count constraints");
115    }
116    fn set_channel_count_mode(&self, _v: ChannelCountMode) {
117        panic!("NotSupportedError - AudioListenerNode has channel count mode constraints");
118    }
119    fn set_channel_interpretation(&self, _v: ChannelInterpretation) {
120        panic!("NotSupportedError - AudioListenerNode has channel interpretation constraints");
121    }
122}
123
124impl AudioListenerNode {
125    pub fn new<C: BaseAudioContext>(context: &C) -> Self {
126        context.base().register(move |registration| {
127            let forward_z_opts = AudioParamDescriptor {
128                default_value: -1.,
129                ..PARAM_OPTS
130            };
131            let up_y_opts = AudioParamDescriptor {
132                default_value: 1.,
133                ..PARAM_OPTS
134            };
135
136            let (p1, _v1) = context.create_audio_param(PARAM_OPTS, &registration);
137            let (p2, _v2) = context.create_audio_param(PARAM_OPTS, &registration);
138            let (p3, _v3) = context.create_audio_param(PARAM_OPTS, &registration);
139            let (p4, _v4) = context.create_audio_param(PARAM_OPTS, &registration);
140            let (p5, _v5) = context.create_audio_param(PARAM_OPTS, &registration);
141            let (p6, _v6) = context.create_audio_param(forward_z_opts, &registration);
142            let (p7, _v7) = context.create_audio_param(PARAM_OPTS, &registration);
143            let (p8, _v8) = context.create_audio_param(up_y_opts, &registration);
144            let (p9, _v9) = context.create_audio_param(PARAM_OPTS, &registration);
145
146            let node = Self {
147                registration,
148                fields: AudioListener {
149                    position_x: p1,
150                    position_y: p2,
151                    position_z: p3,
152                    forward_x: p4,
153                    forward_y: p5,
154                    forward_z: p6,
155                    up_x: p7,
156                    up_y: p8,
157                    up_z: p9,
158                },
159            };
160            let proc = ListenerRenderer {};
161
162            (node, Box::new(proc))
163        })
164    }
165
166    pub fn into_fields(self) -> AudioListener {
167        self.fields
168    }
169}
170
171struct ListenerRenderer {}
172
173impl AudioProcessor for ListenerRenderer {
174    fn process(
175        &mut self,
176        _inputs: &[AudioRenderQuantum],
177        _outputs: &mut [AudioRenderQuantum],
178        _params: AudioParamValues<'_>,
179        _scope: &AudioWorkletGlobalScope,
180    ) -> bool {
181        // do nothing, the Listener is just here to make sure the position/forward/up params render in order
182
183        true // never drop
184    }
185}
186
187/// Data holder for the BaseAudioContext so it can reconstruct the AudioListener on request
188pub(crate) struct AudioListenerParams {
189    pub position_x: AudioParamInner,
190    pub position_y: AudioParamInner,
191    pub position_z: AudioParamInner,
192    pub forward_x: AudioParamInner,
193    pub forward_y: AudioParamInner,
194    pub forward_z: AudioParamInner,
195    pub up_x: AudioParamInner,
196    pub up_y: AudioParamInner,
197    pub up_z: AudioParamInner,
198}
199
200use vecmath::{
201    vec3_cross, vec3_dot, vec3_len, vec3_normalized, vec3_scale, vec3_square_len, vec3_sub, Vector3,
202};
203
204/// Direction to source position measured from listener in 3D
205pub fn azimuth_and_elevation(
206    source_position: Vector3<f32>,
207    listener_position: Vector3<f32>,
208    listener_forward: Vector3<f32>,
209    listener_up: Vector3<f32>,
210) -> (f32, f32) {
211    let relative_pos = vec3_sub(source_position, listener_position);
212
213    // Handle degenerate case if source and listener are at the same point.
214    if vec3_square_len(relative_pos) <= f32::MIN_POSITIVE {
215        return (0., 0.);
216    }
217
218    // Calculate the source-listener vector.
219    let source_listener = vec3_normalized(relative_pos);
220
221    // Align axes.
222    let listener_right = vec3_cross(listener_forward, listener_up);
223
224    if vec3_square_len(listener_right) == 0. {
225        // Handle the case where listener’s 'up' and 'forward' vectors are linearly dependent, in
226        // which case 'right' cannot be determined
227        return (0., 0.);
228    }
229
230    // Determine a unit vector orthogonal to listener’s right, forward
231    let listener_right_norm = vec3_normalized(listener_right);
232    let listener_forward_norm = vec3_normalized(listener_forward);
233    let up = vec3_cross(listener_right_norm, listener_forward_norm);
234
235    // Determine elevation first
236    let mut elevation = 90. - 180. * vec3_dot(source_listener, up).acos() / PI;
237    if elevation > 90. {
238        elevation = 180. - elevation;
239    } else if elevation < -90. {
240        elevation = -180. - elevation;
241    }
242
243    let up_projection = vec3_dot(source_listener, up);
244    let projected_source = vec3_sub(source_listener, vec3_scale(up, up_projection));
245
246    // this case is not handled by the spec, so I stole the solution from
247    // https://hg.mozilla.org/mozilla-central/rev/1100a5bc013b541c635bc42bd753531e95c952e4
248    if vec3_square_len(projected_source) == 0. {
249        return (0., elevation);
250    }
251    let projected_source = vec3_normalized(projected_source);
252
253    let mut azimuth = 180. * vec3_dot(projected_source, listener_right_norm).acos() / PI;
254
255    // Source in front or behind the listener.
256    let front_back = vec3_dot(projected_source, listener_forward_norm);
257    if front_back < 0. {
258        azimuth = 360. - azimuth;
259    }
260
261    // Make azimuth relative to "forward" and not "right" listener vector.
262    #[allow(clippy::manual_range_contains)]
263    if azimuth >= 0. && azimuth <= 270. {
264        azimuth = 90. - azimuth;
265    } else {
266        azimuth = 450. - azimuth;
267    }
268
269    (azimuth, elevation)
270}
271
272/// Distance between two points in 3D
273pub fn distance(source_position: Vector3<f32>, listener_position: Vector3<f32>) -> f32 {
274    vec3_len(vec3_sub(source_position, listener_position))
275}
276
277/// Angle between two vectors in 3D
278pub fn angle(
279    source_position: Vector3<f32>,
280    source_orientation: Vector3<f32>,
281    listener_position: Vector3<f32>,
282) -> f32 {
283    // handle edge case of missing source orientation
284    if vec3_square_len(source_orientation) == 0. {
285        return 0.;
286    }
287    let normalized_source_orientation = vec3_normalized(source_orientation);
288
289    let relative_pos = vec3_sub(source_position, listener_position);
290    // Handle degenerate case if source and listener are at the same point.
291    if vec3_square_len(relative_pos) <= f32::MIN_POSITIVE {
292        return 0.;
293    }
294    // Calculate the source-listener vector.
295    let source_listener = vec3_normalized(relative_pos);
296
297    let angle = 180. * vec3_dot(source_listener, normalized_source_orientation).acos() / PI;
298    angle.abs()
299}
300
301#[cfg(test)]
302mod tests {
303    use float_eq::assert_float_eq;
304
305    use super::*;
306
307    // listener coordinates/directions
308    const LP: [f32; 3] = [0., 0., 0.];
309    const LF: [f32; 3] = [0., 0., -1.];
310    const LU: [f32; 3] = [0., 1., 0.];
311
312    #[test]
313    fn azimuth_elevation_equal_pos() {
314        let pos = [0., 0., 0.];
315        let (azimuth, elevation) = azimuth_and_elevation(pos, LP, LF, LU);
316
317        assert_float_eq!(azimuth, 0., abs <= 0.);
318        assert_float_eq!(elevation, 0., abs <= 0.);
319    }
320
321    #[test]
322    fn azimuth_elevation_horizontal_plane() {
323        // horizontal plane is spanned by x-z axes
324
325        let pos = [10., 0., 0.];
326        let (azimuth, elevation) = azimuth_and_elevation(pos, LP, LF, LU);
327        assert_float_eq!(azimuth, 90., abs <= 0.001);
328        assert_float_eq!(elevation, 0., abs <= 0.);
329
330        let pos = [-10., 0., 0.];
331        let (azimuth, elevation) = azimuth_and_elevation(pos, LP, LF, LU);
332        assert_float_eq!(azimuth, -90., abs <= 0.001);
333        assert_float_eq!(elevation, 0., abs <= 0.);
334
335        let pos = [10., 0., -10.];
336        let (azimuth, elevation) = azimuth_and_elevation(pos, LP, LF, LU);
337        assert_float_eq!(azimuth, 45., abs <= 0.001);
338        assert_float_eq!(elevation, 0., abs <= 0.);
339
340        let pos = [-10., 0., -10.];
341        let (azimuth, elevation) = azimuth_and_elevation(pos, LP, LF, LU);
342        assert_float_eq!(azimuth, -45., abs <= 0.001);
343        assert_float_eq!(elevation, 0., abs <= 0.);
344    }
345
346    #[test]
347    fn azimuth_elevation_vertical() {
348        let pos = [0., -10., 0.];
349        let (azimuth, elevation) = azimuth_and_elevation(pos, LP, LF, LU);
350        assert_float_eq!(azimuth, 0., abs <= 0.001);
351        assert_float_eq!(elevation, -90., abs <= 0.001);
352
353        let pos = [0., 10., 0.];
354        let (azimuth, elevation) = azimuth_and_elevation(pos, LP, LF, LU);
355        assert_float_eq!(azimuth, 0., abs <= 0.001);
356        assert_float_eq!(elevation, 90., abs <= 0.001);
357    }
358
359    #[test]
360    fn angle_equal_pos() {
361        let pos = [0., 0., 0.];
362        let orientation = [1., 0., 0.];
363        let angle = angle(pos, orientation, LP);
364
365        assert_float_eq!(angle, 0., abs <= 0.);
366    }
367
368    #[test]
369    fn angle_no_orientation() {
370        let pos = [10., 0., 0.];
371        let orientation = [0., 0., 0.];
372        let angle = angle(pos, orientation, LP);
373
374        assert_float_eq!(angle, 0., abs <= 0.);
375    }
376
377    #[test]
378    fn test_angle() {
379        let pos = [1., 0., 0.];
380        let orientation = [0., 1., 0.];
381        let angle = angle(pos, orientation, LP);
382
383        assert_float_eq!(angle, 90., abs <= 0.);
384    }
385
386    #[test]
387    fn test_angle_abs_value() {
388        let pos = [1., 0., 0.];
389        let orientation = [0., -1., 0.];
390        let angle = angle(pos, orientation, LP);
391
392        assert_float_eq!(angle, 90., abs <= 0.);
393    }
394}