use webrtc_audio_processing::*;
use webrtc_audio_processing_config::{Config, EchoCanceller};
fn main() {
let sample_rate_hz = 48_000;
let ap = Processor::new(sample_rate_hz).unwrap();
let config = Config { echo_canceller: Some(EchoCanceller::default()), ..Default::default() };
ap.set_config(config);
let (render_frame, capture_frame) = sample_stereo_frames(&ap);
let mut render_frame_output = render_frame.clone();
ap.process_render_frame(&mut render_frame_output).unwrap();
assert_eq!(render_frame, render_frame_output, "render_frame should not be modified.");
let mut capture_frame_output = capture_frame.clone();
ap.process_capture_frame(&mut capture_frame_output).unwrap();
assert_ne!(
capture_frame, capture_frame_output,
"Echo cancellation should have modified capture_frame."
);
println!("Successfully processed a render and capture frame through WebRTC!");
}
fn sample_stereo_frames(processor: &Processor) -> (Vec<Vec<f32>>, Vec<Vec<f32>>) {
let num_samples_per_frame = processor.num_samples_per_frame();
let mut render_frame = vec![vec![]; 2];
let mut capture_frame = vec![vec![]; 2];
for i in 0..num_samples_per_frame {
render_frame[0].push((i as f32 / 40.0).cos() * 0.4);
render_frame[1].push((i as f32 / 40.0).cos() * 0.2);
capture_frame[0].push((i as f32 / 20.0).sin() * 0.4 + render_frame[0][i] * 0.2);
capture_frame[1].push((i as f32 / 20.0).sin() * 0.2 + render_frame[1][i] * 0.2);
}
(render_frame, capture_frame)
}