rg3d_sound/renderer/hrtf.rs
1//! Head-Related Transfer Function (HRTF) module. Provides all needed types and methods for HRTF rendering.
2//!
3//! # Overview
4//!
5//! HRTF stands for [Head-Related Transfer Function](https://en.wikipedia.org/wiki/Head-related_transfer_function)
6//! and can work only with spatial sounds. For each of such sound source after it was processed by HRTF you can
7//! definitely tell from which locationsound came from. In other words HRTF improves perception of sound to
8//! the level of real life.
9//!
10//! # HRIR Spheres
11//!
12//! This library uses Head-Related Impulse Response (HRIR) spheres to create HRTF spheres. HRTF sphere is a set of
13//! points in 3D space which are connected into a mesh forming triangulated sphere. Each point contains spectrum
14//! for left and right ears which will be used to modify samples from each spatial sound source to create binaural
15//! sound. HRIR spheres can be found [here](https://github.com/mrDIMAS/hrir_sphere_builder/tree/master/hrtf_base/IRCAM)
16//!
17//! # Usage
18//!
19//! To use HRTF you need to change default renderer to HRTF renderer like so:
20//!
21//! ```no_run
22//! use rg3d_sound::context::{self, SoundContext};
23//! use rg3d_sound::renderer::hrtf::{HrtfRenderer};
24//! use rg3d_sound::renderer::Renderer;
25//! use std::path::Path;
26//! use hrtf::HrirSphere;
27//!
28//! fn use_hrtf(context: &mut SoundContext) {
29//! // IRC_1002_C.bin is HRIR sphere in binary format, can be any valid HRIR sphere
30//! // from base mentioned above.
31//! let hrir_sphere = HrirSphere::from_file("examples/data/IRC_1002_C.bin", context::SAMPLE_RATE).unwrap();
32//!
33//! context.state().set_renderer(Renderer::HrtfRenderer(HrtfRenderer::new(hrir_sphere)));
34//! }
35//! ```
36//!
37//! # Performance
38//!
39//! HRTF is `heavy`. Usually it 4-5 slower than default renderer, this is essential because HRTF requires some heavy
40//! math (fast Fourier transform, convolution, etc.). On Ryzen 1700 it takes 400-450 μs (0.4 - 0.45 ms) per source.
41//! In most cases this is ok, engine works in separate thread and it has around 100 ms to prepare new portion of
42//! samples for output device.
43//!
44//! # Known problems
45//!
46//! This renderer still suffers from small audible clicks in very fast moving sounds, clicks sounds more like
47//! "buzzing" - it is due the fact that hrtf is different from frame to frame which gives "bumps" in amplitude
48//! of signal because of phase shift each impulse response have. This can be fixed by short cross fade between
49//! small amount of samples from previous frame with same amount of frames of current as proposed in
50//! [here](http://csoundjournal.com/issue9/newHRTFOpcodes.html)
51//!
52//! Clicks can be reproduced by using clean sine wave of 440 Hz on some source moving around listener.
53
54use crate::{
55 context,
56 context::{DistanceModel, SoundContext},
57 listener::Listener,
58 renderer::render_source_default,
59 source::SoundSource,
60};
61use hrtf::HrirSphere;
62use rg3d_core::visitor::{Visit, VisitResult, Visitor};
63use std::fmt::Debug;
64
65/// See module docs.
66#[derive(Clone, Debug, Default)]
67pub struct HrtfRenderer {
68 processor: Option<hrtf::HrtfProcessor>,
69}
70
71impl Visit for HrtfRenderer {
72 fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {
73 visitor.enter_region(name)?;
74
75 let mut resource_path = if visitor.is_reading() {
76 Default::default()
77 } else {
78 self.processor
79 .as_ref()
80 .unwrap()
81 .hrtf_sphere()
82 .source()
83 .to_owned()
84 };
85 resource_path.visit("ResourcePath", visitor)?;
86 if visitor.is_reading() {
87 self.processor = Some(hrtf::HrtfProcessor::new(
88 HrirSphere::from_file(resource_path, context::SAMPLE_RATE).unwrap(),
89 SoundContext::HRTF_INTERPOLATION_STEPS,
90 SoundContext::HRTF_BLOCK_LEN,
91 ));
92 }
93
94 visitor.leave_region()
95 }
96}
97
98impl HrtfRenderer {
99 /// Creates new HRTF renderer using specified HRTF sphere. See module docs for more info.
100 pub fn new(hrir_sphere: hrtf::HrirSphere) -> Self {
101 Self {
102 processor: Some(hrtf::HrtfProcessor::new(
103 hrir_sphere,
104 SoundContext::HRTF_INTERPOLATION_STEPS,
105 SoundContext::HRTF_BLOCK_LEN,
106 )),
107 }
108 }
109
110 pub(crate) fn render_source(
111 &mut self,
112 source: &mut SoundSource,
113 listener: &Listener,
114 distance_model: DistanceModel,
115 out_buf: &mut [(f32, f32)],
116 ) {
117 match source {
118 SoundSource::Generic(_) => {
119 render_source_default(source, listener, distance_model, out_buf)
120 }
121 SoundSource::Spatial(spatial) => {
122 let new_distance_gain = spatial.get_distance_gain(listener, distance_model);
123 let new_sampling_vector = spatial.get_sampling_vector(listener);
124
125 self.processor
126 .as_mut()
127 .unwrap()
128 .process_samples(hrtf::HrtfContext {
129 source: &spatial.generic.frame_samples,
130 output: out_buf,
131 new_sample_vector: hrtf::Vec3::new(
132 new_sampling_vector.x,
133 new_sampling_vector.y,
134 new_sampling_vector.z,
135 ),
136 prev_sample_vector: hrtf::Vec3::new(
137 spatial.prev_sampling_vector.x,
138 spatial.prev_sampling_vector.y,
139 spatial.prev_sampling_vector.z,
140 ),
141 prev_left_samples: &mut spatial.prev_left_samples,
142 prev_right_samples: &mut spatial.prev_right_samples,
143 prev_distance_gain: spatial.prev_distance_gain.unwrap_or(new_distance_gain),
144 new_distance_gain,
145 });
146
147 spatial.prev_sampling_vector = new_sampling_vector;
148 spatial.prev_distance_gain = Some(new_distance_gain);
149 }
150 }
151 }
152}