webrtc_audio_processing/
lib.rs1#![warn(clippy::all)]
6#![warn(missing_docs)]
7
8mod config;
9
10use std::{error, fmt, sync::Arc};
11use webrtc_audio_processing_sys as ffi;
12
13pub use config::*;
14pub use ffi::NUM_SAMPLES_PER_FRAME;
15
16#[derive(Debug)]
20pub struct Error {
21 code: i32,
23}
24
25impl fmt::Display for Error {
26 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
27 write!(f, "ffi::AudioProcessing::Error code: {}", self.code)
28 }
29}
30
31impl error::Error for Error {}
32
33#[derive(Clone)]
38pub struct Processor {
39 inner: Arc<AudioProcessing>,
40 deinterleaved_capture_frame: Vec<Vec<f32>>,
43 deinterleaved_render_frame: Vec<Vec<f32>>,
44}
45
46impl Processor {
47 pub fn new(config: &ffi::InitializationConfig) -> Result<Self, Error> {
51 Ok(Self {
52 inner: Arc::new(AudioProcessing::new(config)?),
53 deinterleaved_capture_frame: vec![
54 vec![0f32; NUM_SAMPLES_PER_FRAME as usize];
55 config.num_capture_channels as usize
56 ],
57 deinterleaved_render_frame: vec![
58 vec![0f32; NUM_SAMPLES_PER_FRAME as usize];
59 config.num_render_channels as usize
60 ],
61 })
62 }
63
64 pub fn process_capture_frame(&mut self, frame: &mut [f32]) -> Result<(), Error> {
68 Self::deinterleave(frame, &mut self.deinterleaved_capture_frame);
69 self.inner.process_capture_frame(&mut self.deinterleaved_capture_frame)?;
70 Self::interleave(&self.deinterleaved_capture_frame, frame);
71 Ok(())
72 }
73
74 pub fn process_capture_frame_noninterleaved(
79 &mut self,
80 frame: &mut [Vec<f32>],
81 ) -> Result<(), Error> {
82 self.inner.process_capture_frame(frame)
83 }
84
85 pub fn process_render_frame(&mut self, frame: &mut [f32]) -> Result<(), Error> {
89 Self::deinterleave(frame, &mut self.deinterleaved_render_frame);
90 self.inner.process_render_frame(&mut self.deinterleaved_render_frame)?;
91 Self::interleave(&self.deinterleaved_render_frame, frame);
92 Ok(())
93 }
94
95 pub fn process_render_frame_noninterleaved(
99 &mut self,
100 frame: &mut [Vec<f32>],
101 ) -> Result<(), Error> {
102 self.inner.process_render_frame(frame)
103 }
104
105 pub fn get_stats(&self) -> Stats {
107 self.inner.get_stats()
108 }
109
110 pub fn set_config(&mut self, config: Config) {
114 self.inner.set_config(config);
115 }
116
117 pub fn set_output_will_be_muted(&self, muted: bool) {
120 self.inner.set_output_will_be_muted(muted);
121 }
122
123 pub fn set_stream_key_pressed(&self, pressed: bool) {
125 self.inner.set_stream_key_pressed(pressed);
126 }
127
128 fn deinterleave<T: AsMut<[f32]>>(src: &[f32], dst: &mut [T]) {
146 let num_channels = dst.len();
147 let num_samples = dst[0].as_mut().len();
148 assert_eq!(src.len(), num_channels * num_samples);
149 for channel_index in 0..num_channels {
150 for sample_index in 0..num_samples {
151 dst[channel_index].as_mut()[sample_index] =
152 src[num_channels * sample_index + channel_index];
153 }
154 }
155 }
156
157 fn interleave<T: AsRef<[f32]>>(src: &[T], dst: &mut [f32]) {
159 let num_channels = src.len();
160 let num_samples = src[0].as_ref().len();
161 assert_eq!(dst.len(), num_channels * num_samples);
162 for channel_index in 0..num_channels {
163 for sample_index in 0..num_samples {
164 dst[num_channels * sample_index + channel_index] =
165 src[channel_index].as_ref()[sample_index];
166 }
167 }
168 }
169}
170
171pub struct AudioProcessing {
177 inner: *mut ffi::AudioProcessing,
178}
179
180impl AudioProcessing {
181 pub fn new(config: &ffi::InitializationConfig) -> Result<Self, Error> {
185 let mut code = 0;
186 let inner = unsafe { ffi::audio_processing_create(config, &mut code) };
187 if !inner.is_null() {
188 Ok(Self { inner })
189 } else {
190 Err(Error { code })
191 }
192 }
193
194 pub fn process_capture_frame(&self, frame: &mut [Vec<f32>]) -> Result<(), Error> {
199 let mut frame_ptr = frame.iter_mut().map(|v| v.as_mut_ptr()).collect::<Vec<*mut f32>>();
200 unsafe {
201 let code = ffi::process_capture_frame(self.inner, frame_ptr.as_mut_ptr());
202 if ffi::is_success(code) {
203 Ok(())
204 } else {
205 Err(Error { code })
206 }
207 }
208 }
209
210 pub fn process_render_frame(&self, frame: &mut [Vec<f32>]) -> Result<(), Error> {
214 let mut frame_ptr = frame.iter_mut().map(|v| v.as_mut_ptr()).collect::<Vec<*mut f32>>();
215 unsafe {
216 let code = ffi::process_render_frame(self.inner, frame_ptr.as_mut_ptr());
217 if ffi::is_success(code) {
218 Ok(())
219 } else {
220 Err(Error { code })
221 }
222 }
223 }
224
225 pub fn get_stats(&self) -> Stats {
227 unsafe { ffi::get_stats(self.inner).into() }
228 }
229
230 pub fn set_config(&self, config: Config) {
234 unsafe {
235 ffi::set_config(self.inner, &config.into());
236 }
237 }
238
239 pub fn set_output_will_be_muted(&self, muted: bool) {
242 unsafe {
243 ffi::set_output_will_be_muted(self.inner, muted);
244 }
245 }
246
247 pub fn set_stream_key_pressed(&self, pressed: bool) {
249 unsafe {
250 ffi::set_stream_key_pressed(self.inner, pressed);
251 }
252 }
253}
254
255impl Drop for AudioProcessing {
256 fn drop(&mut self) {
257 unsafe {
258 ffi::audio_processing_delete(self.inner);
259 }
260 }
261}
262
263unsafe impl Sync for AudioProcessing {}
266unsafe impl Send for AudioProcessing {}
267
268#[cfg(test)]
269mod tests {
270 use super::*;
271 use std::{thread, time::Duration};
272
273 #[test]
274 fn test_create_failure() {
275 let config =
276 InitializationConfig { num_capture_channels: 0, ..InitializationConfig::default() };
277 assert!(Processor::new(&config).is_err());
278 }
279
280 #[test]
281 fn test_create_drop() {
282 let config = InitializationConfig {
283 num_capture_channels: 1,
284 num_render_channels: 1,
285 ..InitializationConfig::default()
286 };
287 let _p = Processor::new(&config).unwrap();
288 }
289
290 #[test]
291 fn test_deinterleave_interleave() {
292 let num_channels = 2usize;
293 let num_samples = 3usize;
294
295 let interleaved = (0..num_channels * num_samples).map(|v| v as f32).collect::<Vec<f32>>();
296 let mut deinterleaved = vec![vec![-1f32; num_samples]; num_channels];
297 Processor::deinterleave(&interleaved, &mut deinterleaved);
298 assert_eq!(vec![vec![0f32, 2f32, 4f32], vec![1f32, 3f32, 5f32]], deinterleaved);
299
300 let mut interleaved_out = vec![-1f32; num_samples * num_channels];
301 Processor::interleave(&deinterleaved, &mut interleaved_out);
302 assert_eq!(interleaved, interleaved_out);
303 }
304
305 fn sample_stereo_frames() -> (Vec<f32>, Vec<f32>) {
306 let num_samples_per_frame = NUM_SAMPLES_PER_FRAME as usize;
307
308 let mut render_frame = Vec::with_capacity(num_samples_per_frame * 2);
310 for i in 0..num_samples_per_frame {
311 render_frame.push((i as f32 / 40.0).cos() * 0.4);
312 render_frame.push((i as f32 / 40.0).cos() * 0.2);
313 }
314
315 let mut capture_frame = Vec::with_capacity(num_samples_per_frame * 2);
318 for i in 0..num_samples_per_frame {
319 capture_frame.push((i as f32 / 20.0).sin() * 0.4 + render_frame[i * 2] * 0.2);
320 capture_frame.push((i as f32 / 20.0).sin() * 0.2 + render_frame[i * 2 + 1] * 0.2);
321 }
322
323 (render_frame, capture_frame)
324 }
325
326 #[test]
327 fn test_nominal() {
328 let config = InitializationConfig {
329 num_capture_channels: 2,
330 num_render_channels: 2,
331 ..InitializationConfig::default()
332 };
333 let mut ap = Processor::new(&config).unwrap();
334
335 let config = Config {
336 echo_cancellation: Some(EchoCancellation {
337 suppression_level: EchoCancellationSuppressionLevel::High,
338 stream_delay_ms: None,
339 enable_delay_agnostic: false,
340 enable_extended_filter: false,
341 }),
342 ..Config::default()
343 };
344 ap.set_config(config);
345
346 let (render_frame, capture_frame) = sample_stereo_frames();
347
348 let mut render_frame_output = render_frame.clone();
349 ap.process_render_frame(&mut render_frame_output).unwrap();
350
351 assert_eq!(render_frame, render_frame_output);
353
354 let mut capture_frame_output = capture_frame.clone();
355 ap.process_capture_frame(&mut capture_frame_output).unwrap();
356
357 assert_ne!(capture_frame, capture_frame_output);
360
361 let stats = ap.get_stats();
362 assert!(stats.echo_return_loss.is_some());
363 println!("{stats:#?}");
364 }
365
366 #[test]
367 #[ignore]
368 fn test_nominal_threaded() {
369 let config = InitializationConfig {
370 num_capture_channels: 2,
371 num_render_channels: 2,
372 ..InitializationConfig::default()
373 };
374 let ap = Processor::new(&config).unwrap();
375
376 let (render_frame, capture_frame) = sample_stereo_frames();
377
378 let mut config_ap = ap.clone();
379 let config_thread = thread::spawn(move || {
380 thread::sleep(Duration::from_millis(100));
381
382 let config = Config {
383 echo_cancellation: Some(EchoCancellation {
384 suppression_level: EchoCancellationSuppressionLevel::High,
385 stream_delay_ms: None,
386 enable_delay_agnostic: false,
387 enable_extended_filter: false,
388 }),
389 ..Config::default()
390 };
391 config_ap.set_config(config);
392 });
393
394 let mut render_ap = ap.clone();
395 let render_thread = thread::spawn(move || {
396 for _ in 0..100 {
397 let mut render_frame_output = render_frame.clone();
398 render_ap.process_render_frame(&mut render_frame_output).unwrap();
399
400 thread::sleep(Duration::from_millis(10));
401 }
402 });
403
404 let mut capture_ap = ap.clone();
405 let capture_thread = thread::spawn(move || {
406 for i in 0..100 {
407 let mut capture_frame_output = capture_frame.clone();
408 capture_ap.process_capture_frame(&mut capture_frame_output).unwrap();
409
410 let stats = capture_ap.get_stats();
411 if i < 5 {
412 assert!(stats.echo_return_loss.is_none());
414 } else if i >= 95 {
415 assert!(stats.echo_return_loss.is_some());
417 }
418
419 thread::sleep(Duration::from_millis(10));
420 }
421 });
422
423 config_thread.join().unwrap();
424 render_thread.join().unwrap();
425 capture_thread.join().unwrap();
426 }
427
428 #[test]
429 fn test_tweak_processor_params() {
430 let config = InitializationConfig {
431 num_capture_channels: 2,
432 num_render_channels: 2,
433 ..InitializationConfig::default()
434 };
435 let mut ap = Processor::new(&config).unwrap();
436
437 ap.set_output_will_be_muted(true);
439 ap.set_stream_key_pressed(true);
440
441 let (render_frame, capture_frame) = sample_stereo_frames();
443
444 let mut render_frame_output = render_frame.clone();
445 ap.process_render_frame(&mut render_frame_output).unwrap();
446 let mut capture_frame_output = capture_frame.clone();
447 ap.process_capture_frame(&mut capture_frame_output).unwrap();
448
449 }
451}